CombinedText stringlengths 4 3.42M |
|---|
i686-elf-gcc 9.3.0 (new formula)
The formula provides GNU C/C++ compilers for generic ELF target on Intel
32-bit CPUs. Canonical target name is i686-pc-elf.
It's different from x86_64-elf-gcc. Certain operations like division of
signed and unsigned long are not native for i686 and provided by libgcc:
https://gcc.gnu.org/onlinedocs/gccint/Integer-library-routines.html
E.g. libgcc in x86_64-elf-gcc does not have the functions:
__umoddi3
__udivdi3
Therefore an attempt to compile code with -m32 that uses division of
unsigned long will fail on x86_64-elf-gcc.
Closes #54946.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class I686ElfGcc < Formula
desc "The GNU compiler collection for i686-elf"
homepage "https://gcc.gnu.org"
url "https://ftp.gnu.org/gnu/gcc/gcc-9.3.0/gcc-9.3.0.tar.xz"
mirror "https://ftpmirror.gnu.org/gcc/gcc-9.3.0/gcc-9.3.0.tar.xz"
sha256 "71e197867611f6054aa1119b13a0c0abac12834765fe2d81f35ac57f84f742d1"
depends_on "gmp"
depends_on "i686-elf-binutils"
depends_on "libmpc"
depends_on "mpfr"
def install
mkdir "i686-elf-gcc-build" do
system "../configure", "--target=i686-elf",
"--prefix=#{prefix}",
"--infodir=#{info}/i686-elf-gcc",
"--disable-nls",
"--without-isl",
"--without-headers",
"--with-as=#{Formula["i686-elf-binutils"].bin}/i686-elf-as",
"--with-ld=#{Formula["i686-elf-binutils"].bin}/i686-elf-ld",
"--enable-languages=c,c++",
"SED=/usr/bin/sed"
system "make", "all-gcc"
system "make", "install-gcc"
system "make", "all-target-libgcc"
system "make", "install-target-libgcc"
# FSF-related man pages may conflict with native gcc
(share/"man/man7").rmtree
end
end
test do
(testpath/"test-c.c").write <<~EOS
int main(void)
{
int i=0;
while(i<10) i++;
return i;
}
EOS
system "#{bin}/i686-elf-gcc", "-c", "-o", "test-c.o", "test-c.c"
assert_match "file format elf32-i386",
shell_output("#{Formula["i686-elf-binutils"].bin}/i686-elf-objdump -a test-c.o")
end
end
|
class JettyRunner < Formula
desc "Use Jetty without an installed distribution"
homepage "https://www.eclipse.org/jetty/"
url "https://search.maven.org/remotecontent?filepath=org/eclipse/jetty/jetty-runner/9.4.37.v20210219/jetty-runner-9.4.37.v20210219.jar"
version "9.4.37.v20210219"
sha256 "819fe2a5fcc6c4444b8711c9f3d8c90d4ecc8893245dd84c8ba96b69623c4545"
license any_of: ["Apache-2.0", "EPL-1.0"]
livecheck do
url "https://www.eclipse.org/jetty/download.php"
regex(/href=.*?jetty-distribution[._-]v?(\d+(?:\.\d+)+(?:\.v\d+)?)\.t/i)
end
bottle :unneeded
depends_on "openjdk"
def install
libexec.install Dir["*"]
bin.write_jar_script libexec/"jetty-runner-#{version}.jar", "jetty-runner"
end
test do
ENV.append "_JAVA_OPTIONS", "-Djava.io.tmpdir=#{testpath}"
touch "#{testpath}/test.war"
port = free_port
pid = fork do
exec "#{bin}/jetty-runner --port #{port} test.war"
end
sleep 10
begin
output = shell_output("curl -I http://localhost:#{port}")
assert_match %r{HTTP/1\.1 200 OK}, output
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
jetty-runner 9.4.38.v20210224 (#72048)
Co-authored-by: Alexander Zigelski <bd3df3fa7735ba1d6bd0ae59e2f107fff939fb5d@users.noreply.github.com>
class JettyRunner < Formula
desc "Use Jetty without an installed distribution"
homepage "https://www.eclipse.org/jetty/"
url "https://search.maven.org/remotecontent?filepath=org/eclipse/jetty/jetty-runner/9.4.38.v20210224/jetty-runner-9.4.38.v20210224.jar"
version "9.4.38.v20210224"
sha256 "03daee0ff6c3de949060d1f4198d01e89dc8f0222bb8a983b5eae7470e19b778"
license any_of: ["Apache-2.0", "EPL-1.0"]
livecheck do
url "https://www.eclipse.org/jetty/download.php"
regex(/href=.*?jetty-distribution[._-]v?(\d+(?:\.\d+)+(?:\.v\d+)?)\.t/i)
end
bottle :unneeded
depends_on "openjdk"
def install
libexec.install Dir["*"]
bin.write_jar_script libexec/"jetty-runner-#{version}.jar", "jetty-runner"
end
test do
ENV.append "_JAVA_OPTIONS", "-Djava.io.tmpdir=#{testpath}"
touch "#{testpath}/test.war"
port = free_port
pid = fork do
exec "#{bin}/jetty-runner --port #{port} test.war"
end
sleep 10
begin
output = shell_output("curl -I http://localhost:#{port}")
assert_match %r{HTTP/1\.1 200 OK}, output
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
|
class JsonrpcGlib < Formula
desc "GNOME library to communicate with JSON-RPC based peers"
homepage "https://gitlab.gnome.org/GNOME/jsonrpc-glib"
url "https://download.gnome.org/sources/jsonrpc-glib/3.34/jsonrpc-glib-3.34.0.tar.xz"
sha256 "d1ceb24b503e49e7bfe6e44630c03abc65f2d047a68271eb62e332b13be90548"
revision 1
bottle do
cellar :any
sha256 "1c302e75c1276b6fac906158727353657f8a6a353ec9a395ecbae99a5025dd18" => :catalina
sha256 "42081e5890e3bfe1be1d29bc08d2ca9376d9e2493c0a7f1828497e59ffa8cc54" => :mojave
sha256 "7fc3931e31121ca0f83710193591bc1ffe2fed9c9300aedead32db047b39e940" => :high_sierra
sha256 "b47f0d67cfe99c39b1d789aabcc4e19384afc7477273687b46e359064f79fa32" => :sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "glib"
depends_on "json-glib"
def install
mkdir "build" do
system "meson", *std_meson_args, "-Dwith_vapi=true", ".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <jsonrpc-glib.h>
int main(int argc, char *argv[]) {
JsonrpcInputStream *stream = jsonrpc_input_stream_new(NULL);
return 0;
}
EOS
gettext = Formula["gettext"]
glib = Formula["glib"]
json_glib = Formula["json-glib"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/jsonrpc-glib-1.0
-I#{json_glib.opt_include}/json-glib-1.0
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{json_glib.opt_lib}
-L#{lib}
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lintl
-ljson-glib-1.0
-ljsonrpc-glib-1.0
-Wl,-framework
-Wl,CoreFoundation
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
jsonrpc-glib: update 3.34.0_1 bottle.
class JsonrpcGlib < Formula
desc "GNOME library to communicate with JSON-RPC based peers"
homepage "https://gitlab.gnome.org/GNOME/jsonrpc-glib"
url "https://download.gnome.org/sources/jsonrpc-glib/3.34/jsonrpc-glib-3.34.0.tar.xz"
sha256 "d1ceb24b503e49e7bfe6e44630c03abc65f2d047a68271eb62e332b13be90548"
revision 1
bottle do
cellar :any
sha256 "a51dd608adcefb9e3514534e49a5db64ad1e38b407b8d9349df7f3eb08bc5fe7" => :catalina
sha256 "b0ef1160532600a75471bb0e4cc3f48e7f092111ffa7d494534e46db43accabc" => :mojave
sha256 "298d4a37194ab19bc753bb4f7022ec0d34d12a30dd4860806582cda4406b62ec" => :high_sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "glib"
depends_on "json-glib"
def install
mkdir "build" do
system "meson", *std_meson_args, "-Dwith_vapi=true", ".."
system "ninja", "-v"
system "ninja", "install", "-v"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <jsonrpc-glib.h>
int main(int argc, char *argv[]) {
JsonrpcInputStream *stream = jsonrpc_input_stream_new(NULL);
return 0;
}
EOS
gettext = Formula["gettext"]
glib = Formula["glib"]
json_glib = Formula["json-glib"]
pcre = Formula["pcre"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/jsonrpc-glib-1.0
-I#{json_glib.opt_include}/json-glib-1.0
-I#{pcre.opt_include}
-D_REENTRANT
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{json_glib.opt_lib}
-L#{lib}
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lintl
-ljson-glib-1.0
-ljsonrpc-glib-1.0
-Wl,-framework
-Wl,CoreFoundation
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
class KitchenSync < Formula
desc "Fast efficiently sync database without dumping & reloading"
homepage "https://github.com/willbryant/kitchen_sync"
url "https://github.com/willbryant/kitchen_sync/archive/v2.1.tar.gz"
sha256 "932649aa76b05f44bf39ff69d3a8934aed5546ff5f472fd9dedbcc11a69c4334"
head "https://github.com/willbryant/kitchen_sync.git"
bottle do
cellar :any
sha256 "6676b783d9f06d0283d5fd0ade5e34fced18f25009c78aab4d6482978658ff78" => :catalina
sha256 "f9ef3897384ba8cca73a74b39718035568599ebe63b094352b3c927eb06cd6c8" => :mojave
sha256 "21f25b77bdb986066ecb4f8d6bbf0f487c1252e7697a56dd25557089c4385aa8" => :high_sierra
end
depends_on "cmake" => :build
depends_on "libpq"
depends_on "mysql-client"
def install
system "cmake", ".",
"-DMySQL_INCLUDE_DIR=#{Formula["mysql-client"].opt_include}/mysql",
"-DMySQL_LIBRARY_DIR=#{Formula["mysql-client"].opt_lib}",
"-DPostgreSQL_INCLUDE_DIR=#{Formula["libpq"].opt_include}",
"-DPostgreSQL_LIBRARY_DIR=#{Formula["libpq"].opt_lib}",
*std_cmake_args
system "make", "install"
end
test do
output = shell_output("#{bin}/ks --from a://b/ --to c://d/ 2>&1")
assert_match "Finished Kitchen Syncing", output
end
end
kitchen-sync: update 2.1 bottle.
class KitchenSync < Formula
desc "Fast efficiently sync database without dumping & reloading"
homepage "https://github.com/willbryant/kitchen_sync"
url "https://github.com/willbryant/kitchen_sync/archive/v2.1.tar.gz"
sha256 "932649aa76b05f44bf39ff69d3a8934aed5546ff5f472fd9dedbcc11a69c4334"
head "https://github.com/willbryant/kitchen_sync.git"
bottle do
cellar :any
sha256 "112496221f7512274f46170b41765a2e933802e0e8bbc612715fb30dcffb54f9" => :catalina
sha256 "62925a19ef8b0bc3dd701cb392cc41b21079af0b47ea978d9c9e2870ec725d05" => :mojave
sha256 "9ccae36b21cc293c6db76a5974c4dc313d2e9e25659c5b067cc56082cb0b2ff6" => :high_sierra
end
depends_on "cmake" => :build
depends_on "libpq"
depends_on "mysql-client"
def install
system "cmake", ".",
"-DMySQL_INCLUDE_DIR=#{Formula["mysql-client"].opt_include}/mysql",
"-DMySQL_LIBRARY_DIR=#{Formula["mysql-client"].opt_lib}",
"-DPostgreSQL_INCLUDE_DIR=#{Formula["libpq"].opt_include}",
"-DPostgreSQL_LIBRARY_DIR=#{Formula["libpq"].opt_lib}",
*std_cmake_args
system "make", "install"
end
test do
output = shell_output("#{bin}/ks --from a://b/ --to c://d/ 2>&1")
assert_match "Finished Kitchen Syncing", output
end
end
|
class KitchenSync < Formula
desc "Fast efficiently sync database without dumping & reloading"
homepage "https://github.com/willbryant/kitchen_sync"
url "https://github.com/willbryant/kitchen_sync/archive/0.99.1.tar.gz"
sha256 "895b710f10e9399ce8e49efe1b24ca925b4a62510d817a0e4b60e11c4cefcaf6"
revision 2
head "https://github.com/willbryant/kitchen_sync.git"
bottle do
cellar :any
sha256 "49ad47bba862b6c73727cb6f0d0d72725c961e6003eca4a81633b31d1f0b5e99" => :high_sierra
sha256 "305a5c37197e113074c4ba61265ea28fde9c7560531032611e380ca3204d1d34" => :sierra
sha256 "30846fdf73486d98d69d3361a7329a89717e2cb81f3531241ebc4b2dfa2174a4" => :el_capitan
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "yaml-cpp"
depends_on "mysql" => :recommended
depends_on "postgresql" => :optional
needs :cxx11
def install
ENV.cxx11
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
output = shell_output("#{bin}/ks --from a://b/ --to c://d/ 2>&1")
assert_match "Finished Kitchen Syncing", output
end
end
kitchen-sync: update 0.99.1_2 bottle for Linuxbrew.
class KitchenSync < Formula
desc "Fast efficiently sync database without dumping & reloading"
homepage "https://github.com/willbryant/kitchen_sync"
url "https://github.com/willbryant/kitchen_sync/archive/0.99.1.tar.gz"
sha256 "895b710f10e9399ce8e49efe1b24ca925b4a62510d817a0e4b60e11c4cefcaf6"
revision 2
head "https://github.com/willbryant/kitchen_sync.git"
bottle do
cellar :any
sha256 "49ad47bba862b6c73727cb6f0d0d72725c961e6003eca4a81633b31d1f0b5e99" => :high_sierra
sha256 "305a5c37197e113074c4ba61265ea28fde9c7560531032611e380ca3204d1d34" => :sierra
sha256 "30846fdf73486d98d69d3361a7329a89717e2cb81f3531241ebc4b2dfa2174a4" => :el_capitan
sha256 "5350c92e888f403c50c36cc3184426184b8a39035a9739a773373801745e045e" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "yaml-cpp"
depends_on "mysql" => :recommended
depends_on "postgresql" => :optional
needs :cxx11
def install
ENV.cxx11
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
output = shell_output("#{bin}/ks --from a://b/ --to c://d/ 2>&1")
assert_match "Finished Kitchen Syncing", output
end
end
|
class Languagetool < Formula
desc "Style and grammar checker"
homepage "https://www.languagetool.org/"
url "https://languagetool.org/download/LanguageTool-3.4.zip"
sha256 "ff36380c5807c5bdc67d222c5f2adeeb0a047a02661885c29cd8297443846c9c"
bottle :unneeded
def server_script(server_jar); <<-EOS.undent
#!/bin/bash
exec java -cp #{server_jar} org.languagetool.server.HTTPServer "$@"
EOS
end
def install
libexec.install Dir["*"]
bin.write_jar_script libexec/"languagetool-commandline.jar", "languagetool"
(bin+"languagetool-server").write server_script(libexec/"languagetool-server.jar")
bin.write_jar_script libexec/"languagetool.jar", "languagetool-gui"
end
test do
pipe_output("#{bin}/languagetool -l en-US -", "This is a test.")
end
end
languagetool 3.5 (#6556)
class Languagetool < Formula
desc "Style and grammar checker"
homepage "https://www.languagetool.org/"
url "https://languagetool.org/download/LanguageTool-3.5.zip"
sha256 "643029da89041ccfb48772deaf0dd0e16942de2d381c5dc8ae9372f1b043bcab"
bottle :unneeded
def server_script(server_jar); <<-EOS.undent
#!/bin/bash
exec java -cp #{server_jar} org.languagetool.server.HTTPServer "$@"
EOS
end
def install
libexec.install Dir["*"]
bin.write_jar_script libexec/"languagetool-commandline.jar", "languagetool"
(bin+"languagetool-server").write server_script(libexec/"languagetool-server.jar")
bin.write_jar_script libexec/"languagetool.jar", "languagetool-gui"
end
test do
pipe_output("#{bin}/languagetool -l en-US -", "This is a test.")
end
end
|
class Libcouchbase < Formula
desc "C library for Couchbase"
homepage "https://docs.couchbase.com/c-sdk/3.0/hello-world/start-using-sdk.html"
url "https://packages.couchbase.com/clients/c/libcouchbase-3.0.6.tar.gz"
sha256 "5e5879207cfe655759ba12378be6e5500546a52aae06ce446d86e639c3a40ab9"
license "Apache-2.0"
head "https://github.com/couchbase/libcouchbase.git"
bottle do
sha256 "cac82c27a7b0fffddaa3d5ff60fdd13a8436f3e5870c39fc74355fb6afc166fa" => :catalina
sha256 "0b065a2160a48ee2f9d5085180842dbc27976152ed503b24af93abfba500cd9f" => :mojave
sha256 "01935cfa1eed4ca89fa1247a501093d647f9a88749c4209976f5babdc6f3c677" => :high_sierra
end
depends_on "cmake" => :build
depends_on "libev"
depends_on "libevent"
depends_on "libuv"
depends_on "openssl@1.1"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args,
"-DLCB_NO_TESTS=1",
"-DLCB_BUILD_LIBEVENT=ON",
"-DLCB_BUILD_LIBEV=ON",
"-DLCB_BUILD_LIBUV=ON"
system "make", "install"
end
end
test do
assert_match /LCB_ERR_CONNECTION_REFUSED/,
shell_output("#{bin}/cbc cat document_id -U couchbase://localhost:1 2>&1", 1).strip
end
end
libcouchbase: update 3.0.6 bottle.
class Libcouchbase < Formula
desc "C library for Couchbase"
homepage "https://docs.couchbase.com/c-sdk/3.0/hello-world/start-using-sdk.html"
url "https://packages.couchbase.com/clients/c/libcouchbase-3.0.6.tar.gz"
sha256 "5e5879207cfe655759ba12378be6e5500546a52aae06ce446d86e639c3a40ab9"
license "Apache-2.0"
head "https://github.com/couchbase/libcouchbase.git"
bottle do
sha256 "cac82c27a7b0fffddaa3d5ff60fdd13a8436f3e5870c39fc74355fb6afc166fa" => :catalina
sha256 "0b065a2160a48ee2f9d5085180842dbc27976152ed503b24af93abfba500cd9f" => :mojave
sha256 "01935cfa1eed4ca89fa1247a501093d647f9a88749c4209976f5babdc6f3c677" => :high_sierra
sha256 "249a68ff5b9d5eb6ddece163482f81727a6be69d4195e8eabe9084c478c5137e" => :x86_64_linux
end
depends_on "cmake" => :build
depends_on "libev"
depends_on "libevent"
depends_on "libuv"
depends_on "openssl@1.1"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args,
"-DLCB_NO_TESTS=1",
"-DLCB_BUILD_LIBEVENT=ON",
"-DLCB_BUILD_LIBEV=ON",
"-DLCB_BUILD_LIBUV=ON"
system "make", "install"
end
end
test do
assert_match /LCB_ERR_CONNECTION_REFUSED/,
shell_output("#{bin}/cbc cat document_id -U couchbase://localhost:1 2>&1", 1).strip
end
end
|
class Libextractor < Formula
desc "Library to extract meta data from files"
homepage "https://www.gnu.org/software/libextractor/"
url "https://ftp.gnu.org/gnu/libextractor/libextractor-1.10.tar.gz"
mirror "https://ftpmirror.gnu.org/libextractor/libextractor-1.10.tar.gz"
sha256 "9eed11b5ddc7c929ba112c50de8cfaa379f1d99a0c8e064101775837cf432357"
bottle do
sha256 "1f9781fe4c690eca0d719016cd4f23bd94890ae69cc30c4c1caa47d919286483" => :catalina
sha256 "0929de2de549d871c775fb2b3aaf22dc52377b504b8ed3d01ca9350a52704e39" => :mojave
sha256 "5a30c428cb327ef0bfd2458feeeb638200df28acf63b688d598a79591cb1c812" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "libtool"
conflicts_with "csound", :because => "both install `extract` binaries"
conflicts_with "pkcrack", :because => "both install `extract` binaries"
def install
ENV.deparallelize
system "./configure", "--disable-silent-rules",
"--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
fixture = test_fixtures("test.png")
assert_match /Keywords for file/, shell_output("#{bin}/extract #{fixture}")
end
end
libextractor: add license
class Libextractor < Formula
desc "Library to extract meta data from files"
homepage "https://www.gnu.org/software/libextractor/"
url "https://ftp.gnu.org/gnu/libextractor/libextractor-1.10.tar.gz"
mirror "https://ftpmirror.gnu.org/libextractor/libextractor-1.10.tar.gz"
sha256 "9eed11b5ddc7c929ba112c50de8cfaa379f1d99a0c8e064101775837cf432357"
license "GPL-3.0"
bottle do
sha256 "1f9781fe4c690eca0d719016cd4f23bd94890ae69cc30c4c1caa47d919286483" => :catalina
sha256 "0929de2de549d871c775fb2b3aaf22dc52377b504b8ed3d01ca9350a52704e39" => :mojave
sha256 "5a30c428cb327ef0bfd2458feeeb638200df28acf63b688d598a79591cb1c812" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "libtool"
conflicts_with "csound", :because => "both install `extract` binaries"
conflicts_with "pkcrack", :because => "both install `extract` binaries"
def install
ENV.deparallelize
system "./configure", "--disable-silent-rules",
"--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
fixture = test_fixtures("test.png")
assert_match /Keywords for file/, shell_output("#{bin}/extract #{fixture}")
end
end
|
class LiterateGit < Formula
include Language::Python::Virtualenv
desc "Render hierarchical git repositories into HTML"
homepage "https://github.com/bennorth/literate-git"
url "https://github.com/bennorth/literate-git/archive/v0.3.1.tar.gz"
sha256 "f1dec77584236a5ab2bcee9169e16b5d976e83cd53d279512136bdc90b04940a"
license "GPL-3.0-or-later"
revision 11
bottle do
sha256 cellar: :any, arm64_monterey: "c2031c7652e019684c8a4fe2e4002df04a6026c12cbb39f8590283094d0f26f3"
sha256 cellar: :any, arm64_big_sur: "b331d1904320a141a66c84a3995671a33215bc27ed0843ed841ea00083ef505b"
sha256 cellar: :any, monterey: "cc84a4cdf421f1a5295ded1b57b7b44c51ed95b7488c539c8cb5c579b27f1153"
sha256 cellar: :any, big_sur: "ec1b9726828e6759864d461fcc8feadc4df90d6739aa8571a3d7e4d931bc31d1"
sha256 cellar: :any, catalina: "baa921185d969af27ffda77668f241a8a60c36f11bcaa8d3e054e3429eb732ec"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e46878c8fd6fb42c5689456331292833ab9bff08bf2a035a872760cd9250d41d"
end
depends_on "libgit2"
depends_on "python@3.10"
uses_from_macos "libffi"
on_linux do
depends_on "pkg-config" => :build
end
resource "cached-property" do
url "https://files.pythonhosted.org/packages/57/8e/0698e10350a57d46b3bcfe8eff1d4181642fd1724073336079cb13c5cf7f/cached-property-1.5.1.tar.gz"
sha256 "9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/66/6a/98e023b3d11537a5521902ac6b50db470c826c682be6a8c661549cb7717a/cffi-1.14.4.tar.gz"
sha256 "1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"
end
resource "docopt" do
url "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz"
sha256 "49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/d8/03/e491f423379ea14bb3a02a5238507f7d446de639b623187bccc111fbecdf/Jinja2-2.11.1.tar.gz"
sha256 "93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250"
end
resource "markdown2" do
url "https://files.pythonhosted.org/packages/e3/93/d37055743009d1a492b2670cc215831a388b3d6e4a28b7672fdf0f7854f5/markdown2-2.3.8.tar.gz"
sha256 "7ff88e00b396c02c8e1ecd8d176cfa418fb01fe81234dcea77803e7ce4f05dbe"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "pygit2" do
url "https://files.pythonhosted.org/packages/e7/8a/e52a1c8b9878e9d9743089393f8289bb9c8a81eaab722df22df46a38b9e9/pygit2-1.10.0.tar.gz"
sha256 "7c751eee88c731b922e4e487ee287e2e40906b2bd32d0bfd2105947f63e867de"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/cb/9f/27d4844ac5bf158a33900dbad7985951e2910397998e85712da03ce125f0/Pygments-2.5.2.tar.gz"
sha256 "98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
end
def install
virtualenv_install_with_resources
end
test do
system "git", "init"
(testpath/"foo.txt").write "Hello"
system "git", "add", "foo.txt"
system "git", "commit", "-m", "foo"
system "git", "branch", "one"
(testpath/"bar.txt").write "World"
system "git", "add", "bar.txt"
system "git", "commit", "-m", "bar"
system "git", "branch", "two"
(testpath/"create_url.py").write <<~EOS
class CreateUrl:
@staticmethod
def result_url(sha1):
return ''
@staticmethod
def source_url(sha1):
return ''
EOS
assert_match "<!DOCTYPE html>",
shell_output("git literate-render test one two create_url.CreateUrl")
end
end
literate-git: update 0.3.1_11 bottle.
class LiterateGit < Formula
include Language::Python::Virtualenv
desc "Render hierarchical git repositories into HTML"
homepage "https://github.com/bennorth/literate-git"
url "https://github.com/bennorth/literate-git/archive/v0.3.1.tar.gz"
sha256 "f1dec77584236a5ab2bcee9169e16b5d976e83cd53d279512136bdc90b04940a"
license "GPL-3.0-or-later"
revision 11
bottle do
sha256 cellar: :any, arm64_monterey: "237bff42ded8e1cffc7bfcb259bb8a91e3af32241cd72635497fa9beeed27723"
sha256 cellar: :any, arm64_big_sur: "0e27e1e5a4e8499984159799c38aa36dade73429cd4a397bb859a46c8c62d6bf"
sha256 cellar: :any, monterey: "3025ca493f1f1ec2881d3524e0adaf55b8bbb205cadba82b6aaa6ae41bbcb595"
sha256 cellar: :any, big_sur: "151d700dc1d150e2bf6c8389a3945deeee61630d764373a435182503091acb5e"
sha256 cellar: :any, catalina: "168f27ec64117729e279c9cc57a9367ec74b1b80d6ef23ea36e8db966e7a5383"
sha256 cellar: :any_skip_relocation, x86_64_linux: "f23a0c1c7abb6f1b9b686354877026dfffbe52bdbbebd50c48dcd52698702eff"
end
depends_on "libgit2"
depends_on "python@3.10"
uses_from_macos "libffi"
on_linux do
depends_on "pkg-config" => :build
end
resource "cached-property" do
url "https://files.pythonhosted.org/packages/57/8e/0698e10350a57d46b3bcfe8eff1d4181642fd1724073336079cb13c5cf7f/cached-property-1.5.1.tar.gz"
sha256 "9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/66/6a/98e023b3d11537a5521902ac6b50db470c826c682be6a8c661549cb7717a/cffi-1.14.4.tar.gz"
sha256 "1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"
end
resource "docopt" do
url "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz"
sha256 "49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/d8/03/e491f423379ea14bb3a02a5238507f7d446de639b623187bccc111fbecdf/Jinja2-2.11.1.tar.gz"
sha256 "93187ffbc7808079673ef52771baa950426fd664d3aad1d0fa3e95644360e250"
end
resource "markdown2" do
url "https://files.pythonhosted.org/packages/e3/93/d37055743009d1a492b2670cc215831a388b3d6e4a28b7672fdf0f7854f5/markdown2-2.3.8.tar.gz"
sha256 "7ff88e00b396c02c8e1ecd8d176cfa418fb01fe81234dcea77803e7ce4f05dbe"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "pygit2" do
url "https://files.pythonhosted.org/packages/e7/8a/e52a1c8b9878e9d9743089393f8289bb9c8a81eaab722df22df46a38b9e9/pygit2-1.10.0.tar.gz"
sha256 "7c751eee88c731b922e4e487ee287e2e40906b2bd32d0bfd2105947f63e867de"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/cb/9f/27d4844ac5bf158a33900dbad7985951e2910397998e85712da03ce125f0/Pygments-2.5.2.tar.gz"
sha256 "98c8aa5a9f778fcd1026a17361ddaf7330d1b7c62ae97c3bb0ae73e0b9b6b0fe"
end
def install
virtualenv_install_with_resources
end
test do
system "git", "init"
(testpath/"foo.txt").write "Hello"
system "git", "add", "foo.txt"
system "git", "commit", "-m", "foo"
system "git", "branch", "one"
(testpath/"bar.txt").write "World"
system "git", "add", "bar.txt"
system "git", "commit", "-m", "bar"
system "git", "branch", "two"
(testpath/"create_url.py").write <<~EOS
class CreateUrl:
@staticmethod
def result_url(sha1):
return ''
@staticmethod
def source_url(sha1):
return ''
EOS
assert_match "<!DOCTYPE html>",
shell_output("git literate-render test one two create_url.CreateUrl")
end
end
|
require 'formula'
class JavaJDK < Requirement
fatal true
def self.home
[
`/usr/libexec/java_home`.chomp!,
ENV['JAVA_HOME']
].find { |dir| dir && File.exist?("#{dir}/bin/javac") &&
(File.exist?("#{dir}/include" || File.exist?("#{dir}/bundle"))) }
end
satisfy :build_env => false do
self.class.home
end
def message; <<-EOS.undent
Could not find a JDK (i.e. not a JRE)
Do one of the following:
- install a JDK that is detected with /usr/libexec/java_home
- set the JAVA_HOME environment variable
- specify --without-java
EOS
end
end
class Mapserver64 < Formula
homepage 'http://mapserver.org/'
url 'http://download.osgeo.org/mapserver/mapserver-6.4.1.tar.gz'
sha1 'f7d2e7f44cd9a4ff5d9483d21bb71c1cc28e09ab'
bottle do
root_url "http://qgis.dakotacarto.com/osgeo4mac/bottles"
sha1 "aeba2a7532b88599892bff09b9aa02bcd28f801b" => :mavericks
end
head do
url 'https://github.com/mapserver/mapserver.git', :branch => 'master'
depends_on 'harfbuzz'
depends_on 'v8' => :optional
end
conflicts_with 'mapserver', :because => 'mapserver is in main tap'
option 'without-php', 'Build PHP MapScript module'
option 'without-rpath', "Don't embed rpath to installed libmapserver in modules"
option 'without-geos', 'Build without GEOS spatial operations support'
option 'without-postgresql', 'Build without PostgreSQL data source support'
option 'without-xml-mapfile', 'Build with native XML mapfile support'
option 'with-java', 'Build Java MapScript module'
option 'with-gd', 'Build with GD support (deprecated)' unless build.head?
option 'with-librsvg', 'Build with SVG symbology support'
option 'with-docs', 'Download and generate HTML documentation'
option "with-unit-tests", "Download and install full unit test suite"
depends_on 'cmake' => :build
depends_on :freetype
depends_on :libpng
depends_on :python
depends_on 'swig' => :build
depends_on JavaJDK if build.with? 'java'
depends_on 'giflib'
depends_on 'gd' => :optional unless build.head?
depends_on 'proj'
depends_on 'geos' => :recommended
depends_on 'gdal'
depends_on :postgresql => :recommended
depends_on :mysql => :optional
depends_on 'fcgi' => :recommended
depends_on 'cairo' => :recommended
depends_on 'libxml2' if build.with? 'xml-mapfile' or MacOS.version < :mountain_lion
depends_on 'libxslt' if build.with? 'xml-mapfile'
depends_on 'librsvg' => :optional
depends_on 'fribidi'
depends_on :python => %w[sphinx] if build.with? 'docs'
resource "sphinx" do
url "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.2.2.tar.gz"
sha1 "9e424b03fe1f68e0326f3905738adcf27782f677"
end
resource 'docs' do
# NOTE: seems to be no tagged releases for `docs`, just active branches
url 'https://github.com/mapserver/docs.git', :branch => 'branch-6-4'
version '6.4'
end
resource "unittests" do
url "https://github.com/mapserver/msautotest.git",
:revision => "b0ba5ccbfb6b0395820f492eb5a190cf643b5ed8"
version "6.4"
end
def png_prefix
png = Formula['libpng']
(png.installed? or MacOS.version >= :mountain_lion) ? png.opt_prefix : MacOS::X11.prefix
end
def freetype_prefix
ft = Formula['freetype']
(ft.installed? or MacOS.version >= :mountain_lion) ? ft.opt_prefix : MacOS::X11.prefix
end
def install
# install unit tests
(prefix/"msautotest").install resource("unittests") if build.with? "unit-tests"
ENV.prepend_path 'CMAKE_PREFIX_PATH', freetype_prefix
ENV.prepend_path 'CMAKE_PREFIX_PATH', png_prefix
args = std_cmake_args
if MacOS.prefer_64_bit?
args << "-DCMAKE_OSX_ARCHITECTURES=#{Hardware::CPU.arch_64_bit}"
else
args << '-DCMAKE_OSX_ARCHITECTURES=i386'
end
# defaults different than CMakeLists.txt (they don't incur extra dependencies)
args.concat %W[
-DWITH_KML=ON
-DWITH_CURL=ON
-DWITH_CLIENT_WMS=ON
-DWITH_CLIENT_WFS=ON
-DWITH_SOS=ON
]
args << '-DWITH_XMLMAPFILE=ON' if build.with? 'xml-mapfile'
args << '-DWITH_MYSQL=ON' if build.with? 'mysql'
args << '-DWITH_GD=ON' if build.with? 'gd' && !build.head?
args << '-DWITH_RSVG=ON' if build.with? 'librsvg'
mapscr_dir = prefix/'mapscript'
mapscr_dir.mkpath
rpath = %Q{-Wl,-rpath,"#{opt_prefix/'lib'}"}
use_rpath = build.with? 'rpath'
cd 'mapscript' do
args << '-DWITH_PYTHON=ON'
inreplace 'python/CMakeLists.txt' do |s|
s.gsub! '${PYTHON_SITE_PACKAGES}', %Q{"#{lib/which_python/'site-packages'}"}
s.sub! '${MAPSERVER_LIBMAPSERVER}',
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
# override language extension install locations, e.g. install to prefix/'mapscript/lang'
args << '-DWITH_RUBY=ON'
(mapscr_dir/'ruby').mkpath
inreplace 'ruby/CMakeLists.txt' do |s|
s.gsub! '${RUBY_SITEARCHDIR}', %Q{"#{mapscr_dir}/ruby"}
s.sub! '${MAPSERVER_LIBMAPSERVER}',
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
if build.with? 'php'
args << '-DWITH_PHP=ON'
(mapscr_dir/'php').mkpath
inreplace 'php/CMakeLists.txt' do |s|
s.gsub! '${PHP5_EXTENSION_DIR}', %Q{"#{mapscr_dir}/php"}
s.sub! '${MAPSERVER_LIBMAPSERVER}',
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
end
args << '-DWITH_PERL=ON'
(mapscr_dir/'perl').mkpath
args << "-DCUSTOM_PERL_SITE_ARCH_DIR=#{mapscr_dir}/perl"
inreplace 'perl/CMakeLists.txt', '${MAPSERVER_LIBMAPSERVER}',
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
if build.with? 'java'
args << '-DWITH_JAVA=ON'
ENV['JAVA_HOME'] = JavaJDK.home
(mapscr_dir/'java').mkpath
inreplace 'java/CMakeLists.txt' do |s|
s.gsub! 'DESTINATION ${CMAKE_INSTALL_LIBDIR}',
%Q|${CMAKE_CURRENT_BINARY_DIR}/mapscript.jar DESTINATION "#{mapscr_dir}/java"|
s.sub! '${MAPSERVER_LIBMAPSERVER}',
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
end
end
mkdir 'build' do
system 'cmake', '..', *args
system 'make', 'install'
end
# install devel headers
(include/'mapserver').install Dir['*.h']
prefix.install 'tests'
(mapscr_dir/"python").install "mapscript/python/tests"
cd 'mapscript' do
%w[python ruby perl].each {|x|(mapscr_dir/"#{x}").install "#{x}/examples"}
(mapscr_dir/'php').install 'php/examples' if build.with? 'php'
(mapscr_dir/'java').install 'java/examples' if build.with? 'java'
end
# write install instructions for modules
s = ''
mapscr_opt_dir = opt_prefix/'mapscript'
if build.with? 'php'
s += <<-EOS.undent
Using the built PHP module:
* Add the following line to php.ini:
extension="#{mapscr_opt_dir}/php/php_mapscript.so"
* Execute "php -m"
* You should see MapScript in the module list
EOS
end
%w[ruby perl java].each do |m|
if m != 'java' or build.with? m
cmd = []
case m
when 'ruby'
ruby_site = %x[ruby -r rbconfig -e 'puts RbConfig::CONFIG["sitearchdir"]'].chomp
cmd << "sudo cp -f mapscript.bundle #{ruby_site}/"
when 'perl'
perl_site = %x[perl -MConfig -e 'print $Config{"sitearch"};'].chomp
cmd << "sudo cp -f mapscript.pm #{perl_site}/"
cmd << "sudo cp -fR auto/mapscript #{perl_site}/auto/"
when 'java'
cmd << 'sudo cp -f libjavamapscript.jnilib mapscript.jar /Library/Java/Extensions/'
else
end
s += <<-EOS.undent
Install the built #{m.upcase} module with:
cd #{mapscr_opt_dir}/#{m}
#{cmd[0]}
#{cmd[1] + "\n" if cmd[1]}
EOS
end
end
(mapscr_dir/'Install_Modules.txt').write s
if build.with? 'docs'
unless which("sphinx-build")
# vendor a local sphinx install
sphinx_site = libexec/"lib/python2.7/site-packages"
ENV.prepend_create_path "PYTHONPATH", sphinx_site
resource("sphinx").stage {quiet_system "python2.7", "setup.py", "install", "--prefix=#{libexec}"}
ENV.prepend_path "PATH", libexec/"bin"
end
resource('docs').stage do
# just build the en docs
inreplace "Makefile", "$(TRANSLATIONS_I18N) $(TRANSLATIONS_STATIC)", ""
system 'make', 'html'
doc.install 'build/html' => 'html'
end
end
end
def caveats; <<-EOS.undent
The Mapserver CGI executable is #{opt_prefix}/bin/mapserv
Instructions for installing any built, but uninstalled, mapscript modules:
#{opt_prefix}/mapscript/Install_Modules.txt
EOS
end
test do
mapscr_opt_dir = opt_prefix/'mapscript'
system "#{bin}/mapserv", '-v'
system 'python', '-c', '"import mapscript"'
system 'ruby', '-e', "\"require '#{mapscr_opt_dir}/ruby/mapscript'\""
system 'perl', "-I#{mapscr_opt_dir}/perl", '-e', '"use mapscript;"'
cd "#{mapscr_opt_dir}/java/examples" do
system "#{JavaJDK.home}/bin/javac",
'-classpath', '../', '-Djava.ext.dirs=../', 'RFC24.java'
system "#{JavaJDK.home}/bin/java",
'-classpath', '../', '-Djava.library.path=../', '-Djava.ext.dirs=../',
'RFC24', '../../../tests/test.map'
end if build.with? 'java'
end
private
def which_python
"python" + %x(python -c "import sys;print(sys.version[:3])").strip
end
end
mapserver-64: convert quotes
require "formula"
class JavaJDK < Requirement
fatal true
def self.home
[
`/usr/libexec/java_home`.chomp!,
ENV["JAVA_HOME"]
].find { |dir| dir && File.exist?("#{dir}/bin/javac") &&
(File.exist?("#{dir}/include" || File.exist?("#{dir}/bundle"))) }
end
satisfy :build_env => false do
self.class.home
end
def message; <<-EOS.undent
Could not find a JDK (i.e. not a JRE)
Do one of the following:
- install a JDK that is detected with /usr/libexec/java_home
- set the JAVA_HOME environment variable
- specify --without-java
EOS
end
end
class Mapserver64 < Formula
homepage "http://mapserver.org/"
url "http://download.osgeo.org/mapserver/mapserver-6.4.1.tar.gz"
sha1 "f7d2e7f44cd9a4ff5d9483d21bb71c1cc28e09ab"
bottle do
root_url "http://qgis.dakotacarto.com/osgeo4mac/bottles"
sha1 "aeba2a7532b88599892bff09b9aa02bcd28f801b" => :mavericks
end
head do
url "https://github.com/mapserver/mapserver.git", :branch => "master"
depends_on "harfbuzz"
depends_on "v8" => :optional
end
conflicts_with "mapserver", :because => "mapserver is in main tap"
option "without-php", "Build PHP MapScript module"
option "without-rpath", "Don't embed rpath to installed libmapserver in modules"
option "without-geos", "Build without GEOS spatial operations support"
option "without-postgresql", "Build without PostgreSQL data source support"
option "without-xml-mapfile", "Build with native XML mapfile support"
option "with-java", "Build Java MapScript module"
option "with-gd", "Build with GD support (deprecated)" unless build.head?
option "with-librsvg", "Build with SVG symbology support"
option "with-docs", "Download and generate HTML documentation"
option "with-unit-tests", "Download and install full unit test suite"
depends_on "cmake" => :build
depends_on :freetype
depends_on :libpng
depends_on :python
depends_on "swig" => :build
depends_on JavaJDK if build.with? "java"
depends_on "giflib"
depends_on "gd" => :optional unless build.head?
depends_on "proj"
depends_on "geos" => :recommended
depends_on "gdal"
depends_on :postgresql => :recommended
depends_on :mysql => :optional
depends_on "fcgi" => :recommended
depends_on "cairo" => :recommended
depends_on "libxml2" if build.with? "xml-mapfile" or MacOS.version < :mountain_lion
depends_on "libxslt" if build.with? "xml-mapfile"
depends_on "librsvg" => :optional
depends_on "fribidi"
depends_on :python => %w[sphinx] if build.with? "docs"
resource "sphinx" do
url "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.2.2.tar.gz"
sha1 "9e424b03fe1f68e0326f3905738adcf27782f677"
end
resource "docs" do
# NOTE: seems to be no tagged releases for `docs`, just active branches
url "https://github.com/mapserver/docs.git", :branch => "branch-6-4"
version "6.4"
end
resource "unittests" do
url "https://github.com/mapserver/msautotest.git",
:revision => "b0ba5ccbfb6b0395820f492eb5a190cf643b5ed8"
version "6.4"
end
def png_prefix
png = Formula["libpng"]
(png.installed? or MacOS.version >= :mountain_lion) ? png.opt_prefix : MacOS::X11.prefix
end
def freetype_prefix
ft = Formula["freetype"]
(ft.installed? or MacOS.version >= :mountain_lion) ? ft.opt_prefix : MacOS::X11.prefix
end
def install
# install unit tests
(prefix/"msautotest").install resource("unittests") if build.with? "unit-tests"
ENV.prepend_path "CMAKE_PREFIX_PATH", freetype_prefix
ENV.prepend_path "CMAKE_PREFIX_PATH", png_prefix
args = std_cmake_args
if MacOS.prefer_64_bit?
args << "-DCMAKE_OSX_ARCHITECTURES=#{Hardware::CPU.arch_64_bit}"
else
args << "-DCMAKE_OSX_ARCHITECTURES=i386"
end
# defaults different than CMakeLists.txt (they don't incur extra dependencies)
args.concat %W[
-DWITH_KML=ON
-DWITH_CURL=ON
-DWITH_CLIENT_WMS=ON
-DWITH_CLIENT_WFS=ON
-DWITH_SOS=ON
]
args << "-DWITH_XMLMAPFILE=ON" if build.with? "xml-mapfile"
args << "-DWITH_MYSQL=ON" if build.with? "mysql"
args << "-DWITH_GD=ON" if build.with? "gd" && !build.head?
args << "-DWITH_RSVG=ON" if build.with? "librsvg"
mapscr_dir = prefix/"mapscript"
mapscr_dir.mkpath
rpath = %Q{-Wl,-rpath,"#{opt_prefix/"lib"}"}
use_rpath = build.with? "rpath"
cd "mapscript" do
args << "-DWITH_PYTHON=ON"
inreplace "python/CMakeLists.txt" do |s|
s.gsub! "${PYTHON_SITE_PACKAGES}", %Q{"#{lib/which_python/"site-packages"}"}
s.sub! "${MAPSERVER_LIBMAPSERVER}",
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
# override language extension install locations, e.g. install to prefix/"mapscript/lang"
args << "-DWITH_RUBY=ON"
(mapscr_dir/"ruby").mkpath
inreplace "ruby/CMakeLists.txt" do |s|
s.gsub! "${RUBY_SITEARCHDIR}", %Q{"#{mapscr_dir}/ruby"}
s.sub! "${MAPSERVER_LIBMAPSERVER}",
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
if build.with? "php"
args << "-DWITH_PHP=ON"
(mapscr_dir/"php").mkpath
inreplace "php/CMakeLists.txt" do |s|
s.gsub! "${PHP5_EXTENSION_DIR}", %Q{"#{mapscr_dir}/php"}
s.sub! "${MAPSERVER_LIBMAPSERVER}",
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
end
args << "-DWITH_PERL=ON"
(mapscr_dir/"perl").mkpath
args << "-DCUSTOM_PERL_SITE_ARCH_DIR=#{mapscr_dir}/perl"
inreplace "perl/CMakeLists.txt", "${MAPSERVER_LIBMAPSERVER}",
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
if build.with? "java"
args << "-DWITH_JAVA=ON"
ENV["JAVA_HOME"] = JavaJDK.home
(mapscr_dir/"java").mkpath
inreplace "java/CMakeLists.txt" do |s|
s.gsub! "DESTINATION ${CMAKE_INSTALL_LIBDIR}",
%Q|${CMAKE_CURRENT_BINARY_DIR}/mapscript.jar DESTINATION "#{mapscr_dir}/java"|
s.sub! "${MAPSERVER_LIBMAPSERVER}",
"#{rpath} ${MAPSERVER_LIBMAPSERVER}" if use_rpath
end
end
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
end
# install devel headers
(include/"mapserver").install Dir["*.h"]
prefix.install "tests"
(mapscr_dir/"python").install "mapscript/python/tests"
cd "mapscript" do
%w[python ruby perl].each {|x|(mapscr_dir/"#{x}").install "#{x}/examples"}
(mapscr_dir/"php").install "php/examples" if build.with? "php"
(mapscr_dir/"java").install "java/examples" if build.with? "java"
end
# write install instructions for modules
s = ""
mapscr_opt_dir = opt_prefix/"mapscript"
if build.with? "php"
s += <<-EOS.undent
Using the built PHP module:
* Add the following line to php.ini:
extension="#{mapscr_opt_dir}/php/php_mapscript.so"
* Execute "php -m"
* You should see MapScript in the module list
EOS
end
%w[ruby perl java].each do |m|
if m != "java" or build.with? m
cmd = []
case m
when "ruby"
ruby_site = %x[ruby -r rbconfig -e 'puts RbConfig::CONFIG["sitearchdir"]'].chomp
cmd << "sudo cp -f mapscript.bundle #{ruby_site}/"
when "perl"
perl_site = %x[perl -MConfig -e 'print $Config{"sitearch"};'].chomp
cmd << "sudo cp -f mapscript.pm #{perl_site}/"
cmd << "sudo cp -fR auto/mapscript #{perl_site}/auto/"
when "java"
cmd << "sudo cp -f libjavamapscript.jnilib mapscript.jar /Library/Java/Extensions/"
else
end
s += <<-EOS.undent
Install the built #{m.upcase} module with:
cd #{mapscr_opt_dir}/#{m}
#{cmd[0]}
#{cmd[1] + "\n" if cmd[1]}
EOS
end
end
(mapscr_dir/"Install_Modules.txt").write s
if build.with? "docs"
unless which("sphinx-build")
# vendor a local sphinx install
sphinx_site = libexec/"lib/python2.7/site-packages"
ENV.prepend_create_path "PYTHONPATH", sphinx_site
resource("sphinx").stage {quiet_system "python2.7", "setup.py", "install", "--prefix=#{libexec}"}
ENV.prepend_path "PATH", libexec/"bin"
end
resource("docs").stage do
# just build the en docs
inreplace "Makefile", "$(TRANSLATIONS_I18N) $(TRANSLATIONS_STATIC)", ""
system "make", "html"
doc.install "build/html" => "html"
end
end
end
def caveats; <<-EOS.undent
The Mapserver CGI executable is #{opt_prefix}/bin/mapserv
Instructions for installing any built, but uninstalled, mapscript modules:
#{opt_prefix}/mapscript/Install_Modules.txt
EOS
end
test do
mapscr_opt_dir = opt_prefix/"mapscript"
system "#{bin}/mapserv", "-v"
system "python", "-c", '"import mapscript"'
system "ruby", "-e", "\"require '#{mapscr_opt_dir}/ruby/mapscript'\""
system "perl", "-I#{mapscr_opt_dir}/perl", "-e", '"use mapscript;"'
cd "#{mapscr_opt_dir}/java/examples" do
system "#{JavaJDK.home}/bin/javac",
"-classpath", "../", "-Djava.ext.dirs=../", "RFC24.java"
system "#{JavaJDK.home}/bin/java",
"-classpath", "../", "-Djava.library.path=../", "-Djava.ext.dirs=../",
"RFC24", "../../../tests/test.map"
end if build.with? "java"
end
private
def which_python
"python" + %x(python -c "import sys;print(sys.version[:3])").strip
end
end
|
class Mathlibtools < Formula
include Language::Python::Virtualenv
desc "Lean prover mathlib supporting tools"
homepage "https://pypi.org/project/mathlibtools"
url "https://files.pythonhosted.org/packages/72/42/9631b9379c7fe5b6c6fb5c45d11910d58bfe28b5c7cfbe9e9e62de5546b0/mathlibtools-1.3.0.tar.gz"
sha256 "88161287963c318ab38de36def479446be344922a1d31e35a657a679a68e7f2f"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "19ce453b66d0d8e8428ddfd2f4b976b05f8516800a794ea6a0b690c56f409c95"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e81e5813036af22b1e81198268924b4a977614cdc427700906312f684199a0bf"
sha256 cellar: :any_skip_relocation, monterey: "df14818088786e7c06e9209a2d3e4f8a832bdab9f39ed7a5d1397852c338b3a4"
sha256 cellar: :any_skip_relocation, big_sur: "769ee87daf4f1076b01e4eb0cfcceb392b2b5226643d702b1e16c721d407e04b"
sha256 cellar: :any_skip_relocation, catalina: "e25e751b4cb1250616311e974b19157ff8c69183a2e63ab3137c625f2b510620"
sha256 cellar: :any_skip_relocation, x86_64_linux: "550412db6e15ee8571e33f895b69bd2b82d0858fdcdfb420cce5d6507d580c8d"
end
depends_on "lean" => :test
depends_on "python@3.11"
depends_on "pyyaml"
depends_on "six"
resource "atomicwrites" do
url "https://files.pythonhosted.org/packages/87/c6/53da25344e3e3a9c01095a89f16dbcda021c609ddb42dd6d7c0528236fb2/atomicwrites-1.4.1.tar.gz"
sha256 "81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/cb/a4/7de7cd59e429bd0ee6521ba58a75adaec136d32f91a761b28a11d8088d44/certifi-2022.9.24.tar.gz"
sha256 "0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2b/a8/050ab4f0c3d4c1b8aaa805f70e26e84d0e27004907c5b8ecc1d31815f92a/cffi-1.15.1.tar.gz"
sha256 "d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/a1/34/44964211e5410b051e4b8d2869c470ae8a68ae274953b1c7de6d98bbcf94/charset-normalizer-2.1.1.tar.gz"
sha256 "5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"
end
resource "click" do
url "https://files.pythonhosted.org/packages/59/87/84326af34517fca8c58418d148f2403df25303e02736832403587318e9e8/click-8.1.3.tar.gz"
sha256 "7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"
end
resource "Deprecated" do
url "https://files.pythonhosted.org/packages/c8/d1/e412abc2a358a6b9334250629565fe12697ca1cdee4826239eddf944ddd0/Deprecated-1.2.13.tar.gz"
sha256 "43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/fc/44/64e02ef96f20b347385f0e9c03098659cb5a1285d36c3d17c56e534d80cf/gitdb-4.0.9.tar.gz"
sha256 "bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/22/ab/3dd8b8a24399cee9c903d5f7600d20e8703d48904020f46f7fa5ac5474e9/GitPython-3.1.29.tar.gz"
sha256 "cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/8b/e1/43beb3d38dba6cb420cefa297822eac205a277ab43e5ba5d5c46faf96438/idna-3.4.tar.gz"
sha256 "814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"
end
resource "networkx" do
url "https://files.pythonhosted.org/packages/cd/16/c44e8550012735b8f21b3df7f39e8ba5a987fb764ac017ad5f3589735889/networkx-2.8.8.tar.gz"
sha256 "230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/5e/0b/95d387f5f4433cb0f53ff7ad859bd2c6051051cebbb564f139a999ab46de/pycparser-2.21.tar.gz"
sha256 "e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
end
resource "pydot" do
url "https://files.pythonhosted.org/packages/13/6e/916cdf94f9b38ae0777b254c75c3bdddee49a54cc4014aac1460a7a172b3/pydot-1.4.2.tar.gz"
sha256 "248081a39bcb56784deb018977e428605c1c758f10897a339fce1dd728ff007d"
end
resource "PyGithub" do
url "https://files.pythonhosted.org/packages/6d/57/1c8a10e67e50cbee3c721bb554ac9a422f9f0accfc8f6e16a3fe9e65d387/PyGithub-1.57.tar.gz"
sha256 "c273f252b278fb81f1769505cc6921bdb6791e1cebd6ac850cc97dad13c31ff3"
end
resource "PyJWT" do
url "https://files.pythonhosted.org/packages/75/65/db64904a7f23e12dbf0565b53de01db04d848a497c6c9b87e102f74c9304/PyJWT-2.6.0.tar.gz"
sha256 "69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"
end
resource "PyNaCl" do
url "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz"
sha256 "8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/71/22/207523d16464c40a0310d2d4d8926daffa00ac1f5b1576170a32db749636/pyparsing-3.0.9.tar.gz"
sha256 "2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/a5/61/a867851fd5ab77277495a8709ddda0861b28163c4613b011bc00228cc724/requests-2.28.1.tar.gz"
sha256 "7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/21/2d/39c6c57032f786f1965022563eec60623bb3e1409ade6ad834ff703724f3/smmap-5.0.0.tar.gz"
sha256 "c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"
end
resource "toml" do
url "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz"
sha256 "b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
end
resource "tqdm" do
url "https://files.pythonhosted.org/packages/c1/c2/d8a40e5363fb01806870e444fc1d066282743292ff32a9da54af51ce36a2/tqdm-4.64.1.tar.gz"
sha256 "5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b2/56/d87d6d3c4121c0bcec116919350ca05dc3afd2eeb7dc88d07e8083f8ea94/urllib3-1.26.12.tar.gz"
sha256 "3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"
end
resource "wrapt" do
url "https://files.pythonhosted.org/packages/11/eb/e06e77394d6cf09977d92bff310cb0392930c08a338f99af6066a5a98f92/wrapt-1.14.1.tar.gz"
sha256 "380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"leanproject", "new", "my_project"
project_toml = testpath/"my_project/leanpkg.toml"
assert_predicate project_toml, :exist?, "leanpkg.toml should have been created"
assert_includes project_toml.read, "my_project"
end
end
mathlibtools: update 1.3.0 bottle.
class Mathlibtools < Formula
include Language::Python::Virtualenv
desc "Lean prover mathlib supporting tools"
homepage "https://pypi.org/project/mathlibtools"
url "https://files.pythonhosted.org/packages/72/42/9631b9379c7fe5b6c6fb5c45d11910d58bfe28b5c7cfbe9e9e62de5546b0/mathlibtools-1.3.0.tar.gz"
sha256 "88161287963c318ab38de36def479446be344922a1d31e35a657a679a68e7f2f"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, arm64_ventura: "a797bd3200b10e37c73fa8fd0cb021185deed87ba8f45d2a6902dc7d3a6e7aaa"
sha256 cellar: :any_skip_relocation, arm64_monterey: "19ce453b66d0d8e8428ddfd2f4b976b05f8516800a794ea6a0b690c56f409c95"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "e81e5813036af22b1e81198268924b4a977614cdc427700906312f684199a0bf"
sha256 cellar: :any_skip_relocation, monterey: "df14818088786e7c06e9209a2d3e4f8a832bdab9f39ed7a5d1397852c338b3a4"
sha256 cellar: :any_skip_relocation, big_sur: "769ee87daf4f1076b01e4eb0cfcceb392b2b5226643d702b1e16c721d407e04b"
sha256 cellar: :any_skip_relocation, catalina: "e25e751b4cb1250616311e974b19157ff8c69183a2e63ab3137c625f2b510620"
sha256 cellar: :any_skip_relocation, x86_64_linux: "550412db6e15ee8571e33f895b69bd2b82d0858fdcdfb420cce5d6507d580c8d"
end
depends_on "lean" => :test
depends_on "python@3.11"
depends_on "pyyaml"
depends_on "six"
resource "atomicwrites" do
url "https://files.pythonhosted.org/packages/87/c6/53da25344e3e3a9c01095a89f16dbcda021c609ddb42dd6d7c0528236fb2/atomicwrites-1.4.1.tar.gz"
sha256 "81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/cb/a4/7de7cd59e429bd0ee6521ba58a75adaec136d32f91a761b28a11d8088d44/certifi-2022.9.24.tar.gz"
sha256 "0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2b/a8/050ab4f0c3d4c1b8aaa805f70e26e84d0e27004907c5b8ecc1d31815f92a/cffi-1.15.1.tar.gz"
sha256 "d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/a1/34/44964211e5410b051e4b8d2869c470ae8a68ae274953b1c7de6d98bbcf94/charset-normalizer-2.1.1.tar.gz"
sha256 "5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"
end
resource "click" do
url "https://files.pythonhosted.org/packages/59/87/84326af34517fca8c58418d148f2403df25303e02736832403587318e9e8/click-8.1.3.tar.gz"
sha256 "7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"
end
resource "Deprecated" do
url "https://files.pythonhosted.org/packages/c8/d1/e412abc2a358a6b9334250629565fe12697ca1cdee4826239eddf944ddd0/Deprecated-1.2.13.tar.gz"
sha256 "43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"
end
resource "gitdb" do
url "https://files.pythonhosted.org/packages/fc/44/64e02ef96f20b347385f0e9c03098659cb5a1285d36c3d17c56e534d80cf/gitdb-4.0.9.tar.gz"
sha256 "bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"
end
resource "GitPython" do
url "https://files.pythonhosted.org/packages/22/ab/3dd8b8a24399cee9c903d5f7600d20e8703d48904020f46f7fa5ac5474e9/GitPython-3.1.29.tar.gz"
sha256 "cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/8b/e1/43beb3d38dba6cb420cefa297822eac205a277ab43e5ba5d5c46faf96438/idna-3.4.tar.gz"
sha256 "814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"
end
resource "networkx" do
url "https://files.pythonhosted.org/packages/cd/16/c44e8550012735b8f21b3df7f39e8ba5a987fb764ac017ad5f3589735889/networkx-2.8.8.tar.gz"
sha256 "230d388117af870fce5647a3c52401fcf753e94720e6ea6b4197a5355648885e"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/5e/0b/95d387f5f4433cb0f53ff7ad859bd2c6051051cebbb564f139a999ab46de/pycparser-2.21.tar.gz"
sha256 "e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
end
resource "pydot" do
url "https://files.pythonhosted.org/packages/13/6e/916cdf94f9b38ae0777b254c75c3bdddee49a54cc4014aac1460a7a172b3/pydot-1.4.2.tar.gz"
sha256 "248081a39bcb56784deb018977e428605c1c758f10897a339fce1dd728ff007d"
end
resource "PyGithub" do
url "https://files.pythonhosted.org/packages/6d/57/1c8a10e67e50cbee3c721bb554ac9a422f9f0accfc8f6e16a3fe9e65d387/PyGithub-1.57.tar.gz"
sha256 "c273f252b278fb81f1769505cc6921bdb6791e1cebd6ac850cc97dad13c31ff3"
end
resource "PyJWT" do
url "https://files.pythonhosted.org/packages/75/65/db64904a7f23e12dbf0565b53de01db04d848a497c6c9b87e102f74c9304/PyJWT-2.6.0.tar.gz"
sha256 "69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"
end
resource "PyNaCl" do
url "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz"
sha256 "8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/71/22/207523d16464c40a0310d2d4d8926daffa00ac1f5b1576170a32db749636/pyparsing-3.0.9.tar.gz"
sha256 "2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/a5/61/a867851fd5ab77277495a8709ddda0861b28163c4613b011bc00228cc724/requests-2.28.1.tar.gz"
sha256 "7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"
end
resource "smmap" do
url "https://files.pythonhosted.org/packages/21/2d/39c6c57032f786f1965022563eec60623bb3e1409ade6ad834ff703724f3/smmap-5.0.0.tar.gz"
sha256 "c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"
end
resource "toml" do
url "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz"
sha256 "b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"
end
resource "tqdm" do
url "https://files.pythonhosted.org/packages/c1/c2/d8a40e5363fb01806870e444fc1d066282743292ff32a9da54af51ce36a2/tqdm-4.64.1.tar.gz"
sha256 "5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b2/56/d87d6d3c4121c0bcec116919350ca05dc3afd2eeb7dc88d07e8083f8ea94/urllib3-1.26.12.tar.gz"
sha256 "3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"
end
resource "wrapt" do
url "https://files.pythonhosted.org/packages/11/eb/e06e77394d6cf09977d92bff310cb0392930c08a338f99af6066a5a98f92/wrapt-1.14.1.tar.gz"
sha256 "380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"
end
def install
virtualenv_install_with_resources
end
test do
system bin/"leanproject", "new", "my_project"
project_toml = testpath/"my_project/leanpkg.toml"
assert_predicate project_toml, :exist?, "leanpkg.toml should have been created"
assert_includes project_toml.read, "my_project"
end
end
|
class NewrelicCli < Formula
desc "Command-line interface for New Relic"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.52.4.tar.gz"
sha256 "7a27b42d09195d1641a1deb91649be34faf5523851da0c32f97fe1281d2dd59c"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "eaae35581267c73684dbc56322d42eac7d576b895af5eb843516c6458274481e"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "7a48dbab158c9d0d739498b8844c3a8c13230a8c3602fc2134988e2d93f86f2d"
sha256 cellar: :any_skip_relocation, monterey: "5c8dc80591eef3483371a8dcf766372ad8a6396d98bbe3b0e1dab260c421e40e"
sha256 cellar: :any_skip_relocation, big_sur: "5d9d629fdf572aec9648053f322d1a978174ece8e0f5798269e10e5f48992e7a"
sha256 cellar: :any_skip_relocation, catalina: "4c8ccbc64e052ea1c95ae5b518a699b089fa75e0649bf5bf143c42cb9588e6cd"
sha256 cellar: :any_skip_relocation, x86_64_linux: "8f3a869317f3509b9de279b09ae7ecdf74bdd899d7fbdb73fa6871cbad298a93"
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/#{OS.kernel_name.downcase}/newrelic"
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "fish")
(fish_completion/"newrelic.fish").write output
end
test do
output = shell_output("#{bin}/newrelic config list")
assert_match "loglevel", output
assert_match "plugindir", output
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
newrelic-cli: update 0.52.4 bottle.
class NewrelicCli < Formula
desc "Command-line interface for New Relic"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.52.4.tar.gz"
sha256 "7a27b42d09195d1641a1deb91649be34faf5523851da0c32f97fe1281d2dd59c"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "72ce36b3891674a12ffbfe820666d1e5cc3f79781ac755453893ec2d3d9c0357"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "639c34ba9a9a99fbc9d713f19148fc381b2292fa00739ae8af8844b8b5ad6b1f"
sha256 cellar: :any_skip_relocation, monterey: "4d7bf63d7c7724faf5353cfbf8cc00d4fec83bbc70ec565b286ef68d8c35f825"
sha256 cellar: :any_skip_relocation, big_sur: "4f2e5a2e0a290d07972b21fe67281dbbfd44c0452117c7c13053c635b02e2c87"
sha256 cellar: :any_skip_relocation, catalina: "5aeaf119e2e308782957440c67fbd01e4d7a54668d00d8d063b4573655e41277"
sha256 cellar: :any_skip_relocation, x86_64_linux: "1bfaa3f8a52f2ebcc42f13f328f8b8f5a3202167a195059e77891788f595dff1"
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/#{OS.kernel_name.downcase}/newrelic"
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "fish")
(fish_completion/"newrelic.fish").write output
end
test do
output = shell_output("#{bin}/newrelic config list")
assert_match "loglevel", output
assert_match "plugindir", output
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
|
class NewrelicCli < Formula
desc "The New Relic Command-line Interface"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.11.0.tar.gz"
sha256 "0be136042bb49a89a5206d8a7b5428062c4d2cf8819ccfdc416be17eb07ac25d"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git"
bottle do
cellar :any_skip_relocation
sha256 "8538e04f0ce509ad5d7eb7c26980ec4b2f0137b877d7d5b6938cbc1b4964c8aa" => :catalina
sha256 "51cad2fac2d69d7f374db8040e29c45c13f3f8ee30679ad5d74a76fc058366c0" => :mojave
sha256 "2146e9d28d8738b2381b8566dfc1407655b60de1fe6cf0ba235ba8928492a539" => :high_sierra
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/darwin/newrelic"
output = Utils.safe_popen_read("#{bin}/newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read("#{bin}/newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
end
test do
assert_match /pluginDir/, shell_output("#{bin}/newrelic config list")
assert_match /logLevel/, shell_output("#{bin}/newrelic config list")
assert_match /sendUsageData/, shell_output("#{bin}/newrelic config list")
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
newrelic-cli: update 0.11.0 bottle.
class NewrelicCli < Formula
desc "The New Relic Command-line Interface"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.11.0.tar.gz"
sha256 "0be136042bb49a89a5206d8a7b5428062c4d2cf8819ccfdc416be17eb07ac25d"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git"
bottle do
cellar :any_skip_relocation
sha256 "71f762e112d6c8873afbc798ee241dca22dc590fed421c66cccd2a305e2b9151" => :catalina
sha256 "81cbb0996f1aeb7c85705cec2a0d95f6be33e504cb3b564521fdf09e07bcce70" => :mojave
sha256 "429c194a582d97a7c422ac1b7d53a0547c2b0c19ac91d5d7488c64d08afc793f" => :high_sierra
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/darwin/newrelic"
output = Utils.safe_popen_read("#{bin}/newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read("#{bin}/newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
end
test do
assert_match /pluginDir/, shell_output("#{bin}/newrelic config list")
assert_match /logLevel/, shell_output("#{bin}/newrelic config list")
assert_match /sendUsageData/, shell_output("#{bin}/newrelic config list")
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
|
class NewrelicCli < Formula
desc "Command-line interface for New Relic"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.41.22.tar.gz"
sha256 "3a8fbf3d4f4e0a09782f909b001d758744719f59795c4abf0bfe232ff48428ae"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "12b91ba1a4a000b872c6dd0778a1be8936114cc51c4a65c99600ed37a775a337"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "befc16005363a4608907a61570d270ad4edfae7a92dec9f0390fe5f6f05cad61"
sha256 cellar: :any_skip_relocation, monterey: "7e86af4023650714e9a277cca80eac906b1e47baa1c84b7385c60b3c9a375c2f"
sha256 cellar: :any_skip_relocation, big_sur: "2a240525cc39182290cac1699ec67ae7eadf826be7e390844d9f69d6d6e186f1"
sha256 cellar: :any_skip_relocation, catalina: "fc777c91f05999ee765eb4da6ece93ba364292bf3a188522f27d1e54aeab6677"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aa61eb3cf35e5cf433538b568432178c123c8be954a0b0a7f0421e47bf73a15b"
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/#{OS.kernel_name.downcase}/newrelic"
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "fish")
(fish_completion/"newrelic.fish").write output
end
test do
output = shell_output("#{bin}/newrelic config list")
assert_match "loglevel", output
assert_match "plugindir", output
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
newrelic-cli 0.41.24
Closes #95198.
Signed-off-by: Nanda H Krishna <b1c1d8736f20db3fb6c1c66bb1455ed43909f0d8@nandahkrishna.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class NewrelicCli < Formula
desc "Command-line interface for New Relic"
homepage "https://github.com/newrelic/newrelic-cli"
url "https://github.com/newrelic/newrelic-cli/archive/v0.41.24.tar.gz"
sha256 "6f864384a92c996569b4774c50778f9bc2ba5dcd288ad4ab489c06acb1e3fbed"
license "Apache-2.0"
head "https://github.com/newrelic/newrelic-cli.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "12b91ba1a4a000b872c6dd0778a1be8936114cc51c4a65c99600ed37a775a337"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "befc16005363a4608907a61570d270ad4edfae7a92dec9f0390fe5f6f05cad61"
sha256 cellar: :any_skip_relocation, monterey: "7e86af4023650714e9a277cca80eac906b1e47baa1c84b7385c60b3c9a375c2f"
sha256 cellar: :any_skip_relocation, big_sur: "2a240525cc39182290cac1699ec67ae7eadf826be7e390844d9f69d6d6e186f1"
sha256 cellar: :any_skip_relocation, catalina: "fc777c91f05999ee765eb4da6ece93ba364292bf3a188522f27d1e54aeab6677"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aa61eb3cf35e5cf433538b568432178c123c8be954a0b0a7f0421e47bf73a15b"
end
depends_on "go" => :build
def install
ENV["PROJECT_VER"] = version
system "make", "compile-only"
bin.install "bin/#{OS.kernel_name.downcase}/newrelic"
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "bash")
(bash_completion/"newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "zsh")
(zsh_completion/"_newrelic").write output
output = Utils.safe_popen_read(bin/"newrelic", "completion", "--shell", "fish")
(fish_completion/"newrelic.fish").write output
end
test do
output = shell_output("#{bin}/newrelic config list")
assert_match "loglevel", output
assert_match "plugindir", output
assert_match version.to_s, shell_output("#{bin}/newrelic version 2>&1")
end
end
|
class OpenMpiAT16 < Formula
desc "High performance message passing library"
homepage "https://www.open-mpi.org/"
url "https://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2"
sha256 "fe37bab89b5ef234e0ac82dc798282c2ab08900bf564a1ec27239d3f1ad1fc85"
revision 1
bottle do
sha256 "ae8d527e2b9802f27001c5b1a4a64e215208b9fc6af7b6a118f9fb7b042d0f05" => :sierra
sha256 "77ebcca1ed3f06a7a8d3db0b9b4dc45e79eed9c3e3dd02712d39a3e1f057ad27" => :el_capitan
sha256 "592617bbe7ed6037d66c7548008b1eff9b29293541515ec32a8b01af334ac150" => :yosemite
end
keg_only :versioned_formula
option "without-fortran", "Do not build the Fortran bindings"
option "with-mpi-thread-multiple", "Enable MPI_THREAD_MULTIPLE"
deprecated_option "disable-fortran" => "without-fortran"
deprecated_option "enable-mpi-thread-multiple" => "with-mpi-thread-multiple"
depends_on :fortran => :recommended
# Fixes error in tests, which makes them fail on clang.
# Upstream ticket: https://svn.open-mpi.org/trac/ompi/ticket/4255
patch :DATA
def install
args = %W[
--prefix=#{prefix}
--disable-dependency-tracking
--disable-silent-rules
--enable-ipv6
]
args << "--disable-mpi-f77" << "--disable-mpi-f90" if build.without? "fortran"
args << "--enable-mpi-thread-multiple" if build.with? "mpi-thread-multiple"
system "./configure", *args
system "make", "all"
system "make", "check"
system "make", "install"
# If Fortran bindings were built, there will be a stray `.mod` file
# (Fortran header) in `lib` that needs to be moved to `include`.
include.install lib/"mpi.mod" if File.exist? "#{lib}/mpi.mod"
# Not sure why the wrapped script has a jar extension - adamv
libexec.install bin/"vtsetup.jar"
bin.write_jar_script libexec/"vtsetup.jar", "vtsetup.jar"
end
test do
(testpath/"hello.c").write <<-EOS.undent
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
end
end
__END__
diff --git a/test/datatype/ddt_lib.c b/test/datatype/ddt_lib.c
index 015419d..c349384 100644
--- a/test/datatype/ddt_lib.c
+++ b/test/datatype/ddt_lib.c
@@ -209,7 +209,7 @@ int mpich_typeub2( void )
int mpich_typeub3( void )
{
- int blocklen[2], err = 0, idisp[3];
+ int blocklen[3], err = 0, idisp[3];
size_t sz;
MPI_Aint disp[3], lb, ub, ex;
ompi_datatype_t *types[3], *dt1, *dt2, *dt3, *dt4, *dt5;
diff --git a/test/datatype/opal_ddt_lib.c b/test/datatype/opal_ddt_lib.c
index 4491dcc..b58136d 100644
--- a/test/datatype/opal_ddt_lib.c
+++ b/test/datatype/opal_ddt_lib.c
@@ -761,7 +761,7 @@ int mpich_typeub2( void )
int mpich_typeub3( void )
{
- int blocklen[2], err = 0, idisp[3];
+ int blocklen[3], err = 0, idisp[3];
size_t sz;
OPAL_PTRDIFF_TYPE disp[3], lb, ub, ex;
opal_datatype_t *types[3], *dt1, *dt2, *dt3, *dt4, *dt5;
open-mpi@1.6: update 1.6.5_1 bottle.
class OpenMpiAT16 < Formula
desc "High performance message passing library"
homepage "https://www.open-mpi.org/"
url "https://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2"
sha256 "fe37bab89b5ef234e0ac82dc798282c2ab08900bf564a1ec27239d3f1ad1fc85"
revision 1
bottle do
sha256 "1544bc7afa0770d1b16d68834bebc6a6df3ec5786cddc208c2ab98bf3830302b" => :sierra
sha256 "f2e4d5ebbec3cf80800d23b1280347eda1af9199f11ff3e911561ae68da6b590" => :el_capitan
sha256 "14b7c80bb0b0db1bb77ccb36f4ee149ab1007b014706752efe19819d59f18772" => :yosemite
end
keg_only :versioned_formula
option "without-fortran", "Do not build the Fortran bindings"
option "with-mpi-thread-multiple", "Enable MPI_THREAD_MULTIPLE"
deprecated_option "disable-fortran" => "without-fortran"
deprecated_option "enable-mpi-thread-multiple" => "with-mpi-thread-multiple"
depends_on :fortran => :recommended
# Fixes error in tests, which makes them fail on clang.
# Upstream ticket: https://svn.open-mpi.org/trac/ompi/ticket/4255
patch :DATA
def install
args = %W[
--prefix=#{prefix}
--disable-dependency-tracking
--disable-silent-rules
--enable-ipv6
]
args << "--disable-mpi-f77" << "--disable-mpi-f90" if build.without? "fortran"
args << "--enable-mpi-thread-multiple" if build.with? "mpi-thread-multiple"
system "./configure", *args
system "make", "all"
system "make", "check"
system "make", "install"
# If Fortran bindings were built, there will be a stray `.mod` file
# (Fortran header) in `lib` that needs to be moved to `include`.
include.install lib/"mpi.mod" if File.exist? "#{lib}/mpi.mod"
# Not sure why the wrapped script has a jar extension - adamv
libexec.install bin/"vtsetup.jar"
bin.write_jar_script libexec/"vtsetup.jar", "vtsetup.jar"
end
test do
(testpath/"hello.c").write <<-EOS.undent
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
end
end
__END__
diff --git a/test/datatype/ddt_lib.c b/test/datatype/ddt_lib.c
index 015419d..c349384 100644
--- a/test/datatype/ddt_lib.c
+++ b/test/datatype/ddt_lib.c
@@ -209,7 +209,7 @@ int mpich_typeub2( void )
int mpich_typeub3( void )
{
- int blocklen[2], err = 0, idisp[3];
+ int blocklen[3], err = 0, idisp[3];
size_t sz;
MPI_Aint disp[3], lb, ub, ex;
ompi_datatype_t *types[3], *dt1, *dt2, *dt3, *dt4, *dt5;
diff --git a/test/datatype/opal_ddt_lib.c b/test/datatype/opal_ddt_lib.c
index 4491dcc..b58136d 100644
--- a/test/datatype/opal_ddt_lib.c
+++ b/test/datatype/opal_ddt_lib.c
@@ -761,7 +761,7 @@ int mpich_typeub2( void )
int mpich_typeub3( void )
{
- int blocklen[2], err = 0, idisp[3];
+ int blocklen[3], err = 0, idisp[3];
size_t sz;
OPAL_PTRDIFF_TYPE disp[3], lb, ub, ex;
opal_datatype_t *types[3], *dt1, *dt2, *dt3, *dt4, *dt5;
|
class OperatorSdk < Formula
desc "SDK for building Kubernetes applications"
homepage "https://coreos.com/operators/"
url "https://github.com/operator-framework/operator-sdk.git",
tag: "v1.0.1",
revision: "4169b318b578156ed56530f373d328276d040a1b"
license "Apache-2.0"
head "https://github.com/operator-framework/operator-sdk.git"
bottle do
sha256 "a052c0cdcce583e75ac76ae4401cb13ba8e9c6d26e1f1c26c9b1ce285fe97a19" => :catalina
sha256 "3a611f3c02f063cfdfb93efab0d80a3720ec08fd39d9a844b4651ce6df367561" => :mojave
sha256 "fe693789e064921691cfef0fe4f57c0000186f80476de0f84b804235b2293a91" => :high_sierra
end
depends_on "go"
def install
ENV["GOBIN"] = bin
system "make", "install"
# Install bash completion
output = Utils.safe_popen_read("#{bin}/operator-sdk", "completion", "bash")
(bash_completion/"operator-sdk").write output
# Install zsh completion
output = Utils.safe_popen_read("#{bin}/operator-sdk", "completion", "zsh")
(zsh_completion/"_operator-sdk").write output
end
test do
if build.stable?
version_output = shell_output("#{bin}/operator-sdk version")
assert_match "version: \"v#{version}\"", version_output
assert_match stable.specs[:revision], version_output
end
system bin/"operator-sdk", "init", "--domain=example.com", "--repo=example.com/example/example"
assert_predicate testpath/"bin/manager", :exist?
end
end
operator-sdk: update 1.0.1 bottle.
class OperatorSdk < Formula
desc "SDK for building Kubernetes applications"
homepage "https://coreos.com/operators/"
url "https://github.com/operator-framework/operator-sdk.git",
tag: "v1.0.1",
revision: "4169b318b578156ed56530f373d328276d040a1b"
license "Apache-2.0"
head "https://github.com/operator-framework/operator-sdk.git"
bottle do
sha256 "a052c0cdcce583e75ac76ae4401cb13ba8e9c6d26e1f1c26c9b1ce285fe97a19" => :catalina
sha256 "3a611f3c02f063cfdfb93efab0d80a3720ec08fd39d9a844b4651ce6df367561" => :mojave
sha256 "fe693789e064921691cfef0fe4f57c0000186f80476de0f84b804235b2293a91" => :high_sierra
sha256 "0bf3d49a6d3fab7df9a225b68d45db3c2ca210f622cc5b8b84f01f1485c4316a" => :x86_64_linux
end
depends_on "go"
def install
ENV["GOBIN"] = bin
system "make", "install"
# Install bash completion
output = Utils.safe_popen_read("#{bin}/operator-sdk", "completion", "bash")
(bash_completion/"operator-sdk").write output
# Install zsh completion
output = Utils.safe_popen_read("#{bin}/operator-sdk", "completion", "zsh")
(zsh_completion/"_operator-sdk").write output
end
test do
if build.stable?
version_output = shell_output("#{bin}/operator-sdk version")
assert_match "version: \"v#{version}\"", version_output
assert_match stable.specs[:revision], version_output
end
system bin/"operator-sdk", "init", "--domain=example.com", "--repo=example.com/example/example"
assert_predicate testpath/"bin/manager", :exist?
end
end
|
class Osmcoastline < Formula
desc "Extracts coastline data from OpenStreetMap planet file"
homepage "https://osmcode.org/osmcoastline/"
url "https://github.com/osmcode/osmcoastline/archive/v2.3.1.tar.gz"
sha256 "ab4a94b9bc5a5ab37b14ac4e9cbdf113d5fcf2d5a040a4eed958ffbc6cc1aa63"
license "GPL-3.0-or-later"
revision 3
bottle do
sha256 cellar: :any, arm64_monterey: "a4a1308b8b9800389d18c4bee83847da6b96175148f3e24d4a147349216da5da"
sha256 cellar: :any, arm64_big_sur: "d4ea887639727da1386fe1cf96aa457b5763a0d7efdeea8f92687e737a6dccc5"
sha256 cellar: :any, monterey: "39616d8539d2852880da3337a58608bb18d36d4146106790ec19c7a666ece4f3"
sha256 cellar: :any, big_sur: "2ff36b87287f9d07f05f930b09311c7885bed51d9389173e72273b51f7a636d5"
sha256 cellar: :any, catalina: "b64b4333a29538181379adcf123a948d4aa8643a14a4afbeda3475c1a95c175c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "93c920545c2abb9745b211f14188a6b75a1245780b5b75d8754b032a170accb0"
end
depends_on "cmake" => :build
depends_on "libosmium" => :build
depends_on "gdal"
depends_on "geos"
depends_on "libspatialite"
depends_on "lz4"
uses_from_macos "sqlite"
uses_from_macos "zlib"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
protozero = Formula["libosmium"].opt_libexec/"include"
system "cmake", ".", "-DPROTOZERO_INCLUDE_DIR=#{protozero}", *std_cmake_args
system "make", "install"
end
test do
(testpath/"input.opl").write <<~EOS
n100 v1 x1.01 y1.01
n101 v1 x1.04 y1.01
n102 v1 x1.04 y1.04
n103 v1 x1.01 y1.04
w200 v1 Tnatural=coastline Nn100,n101,n102,n103,n100
EOS
system "#{bin}/osmcoastline", "-v", "-o", "output.db", "input.opl"
end
end
osmcoastline: revision bump (gdal 3.5.0)
Signed-off-by: Rui Chen <907c7afd57be493757f13ccd1dd45dddf02db069@chenrui.dev>
class Osmcoastline < Formula
desc "Extracts coastline data from OpenStreetMap planet file"
homepage "https://osmcode.org/osmcoastline/"
url "https://github.com/osmcode/osmcoastline/archive/v2.3.1.tar.gz"
sha256 "ab4a94b9bc5a5ab37b14ac4e9cbdf113d5fcf2d5a040a4eed958ffbc6cc1aa63"
license "GPL-3.0-or-later"
revision 4
bottle do
sha256 cellar: :any, arm64_monterey: "a4a1308b8b9800389d18c4bee83847da6b96175148f3e24d4a147349216da5da"
sha256 cellar: :any, arm64_big_sur: "d4ea887639727da1386fe1cf96aa457b5763a0d7efdeea8f92687e737a6dccc5"
sha256 cellar: :any, monterey: "39616d8539d2852880da3337a58608bb18d36d4146106790ec19c7a666ece4f3"
sha256 cellar: :any, big_sur: "2ff36b87287f9d07f05f930b09311c7885bed51d9389173e72273b51f7a636d5"
sha256 cellar: :any, catalina: "b64b4333a29538181379adcf123a948d4aa8643a14a4afbeda3475c1a95c175c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "93c920545c2abb9745b211f14188a6b75a1245780b5b75d8754b032a170accb0"
end
depends_on "cmake" => :build
depends_on "libosmium" => :build
depends_on "gdal"
depends_on "geos"
depends_on "libspatialite"
depends_on "lz4"
uses_from_macos "sqlite"
uses_from_macos "zlib"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
protozero = Formula["libosmium"].opt_libexec/"include"
system "cmake", ".", "-DPROTOZERO_INCLUDE_DIR=#{protozero}", *std_cmake_args
system "make", "install"
end
test do
(testpath/"input.opl").write <<~EOS
n100 v1 x1.01 y1.01
n101 v1 x1.04 y1.01
n102 v1 x1.04 y1.04
n103 v1 x1.01 y1.04
w200 v1 Tnatural=coastline Nn100,n101,n102,n103,n100
EOS
system "#{bin}/osmcoastline", "-v", "-o", "output.db", "input.opl"
end
end
|
class Pcapplusplus < Formula
desc "C++ network sniffing, packet parsing and crafting framework"
homepage "https://seladb.github.io/PcapPlusPlus-Doc"
url "https://github.com/seladb/PcapPlusPlus/archive/v19.04.tar.gz"
sha256 "0b44074ebbaaa8666e16471311b6b99b0a5bf52d16bbe1452d26bacecfd90add"
bottle do
cellar :any_skip_relocation
sha256 "a862a2c39d37c54d2dd719fc4874f8eb21f49477400f9537523c49a18c5cf7bb" => :catalina
sha256 "66e87be04a8af4d24911300dc912481258533644dedbd1d8541368b8cf750be1" => :mojave
sha256 "8309ef07fefb2edaf0eb7f8697a56d85faaad8f034fbb6ad5d2b526da89b3e5d" => :high_sierra
sha256 "a856979800a5007e3f686f3d39a323bb25702457745929b34448c94df1b442b3" => :sierra
end
def install
inreplace "mk/PcapPlusPlus.mk.macosx", "-I", "-I#{MacOS.sdk_path}"
system "./configure-mac_os_x.sh", "--install-dir", prefix
# library requires to run 'make all' and
# 'make install' in two separate commands.
system "make", "all"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include "stdlib.h"
#include "PcapLiveDeviceList.h"
int main() {
const std::vector<pcpp::PcapLiveDevice*>& devList =
pcpp::PcapLiveDeviceList::getInstance().getPcapLiveDevicesList();
if (devList.size() > 0) {
if (devList[0]->getName() == NULL)
return 1;
return 0;
}
return 0;
}
EOS
(testpath/"Makefile").write <<~EOS
include #{etc}/PcapPlusPlus.mk
all:
\tg++ $(PCAPPP_BUILD_FLAGS) $(PCAPPP_INCLUDES) -c -o test.o test.cpp
\tg++ -L#{lib} -o test test.o $(PCAPPP_LIBS)
EOS
system "make", "all"
system "./test"
end
end
pcapplusplus 19.12
class Pcapplusplus < Formula
desc "C++ network sniffing, packet parsing and crafting framework"
homepage "https://seladb.github.io/PcapPlusPlus-Doc"
url "https://github.com/seladb/PcapPlusPlus/archive/v19.12.tar.gz"
sha256 "9bebe2972a6678b8fb80f93b92a3caf9babae346137f2171e6941f35b56f88bb"
bottle do
cellar :any_skip_relocation
sha256 "a862a2c39d37c54d2dd719fc4874f8eb21f49477400f9537523c49a18c5cf7bb" => :catalina
sha256 "66e87be04a8af4d24911300dc912481258533644dedbd1d8541368b8cf750be1" => :mojave
sha256 "8309ef07fefb2edaf0eb7f8697a56d85faaad8f034fbb6ad5d2b526da89b3e5d" => :high_sierra
sha256 "a856979800a5007e3f686f3d39a323bb25702457745929b34448c94df1b442b3" => :sierra
end
def install
system "./configure-mac_os_x.sh", "--install-dir", prefix
# library requires to run 'make all' and
# 'make install' in two separate commands.
system "make", "all"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include "stdlib.h"
#include "PcapLiveDeviceList.h"
int main() {
const std::vector<pcpp::PcapLiveDevice*>& devList =
pcpp::PcapLiveDeviceList::getInstance().getPcapLiveDevicesList();
if (devList.size() > 0) {
if (devList[0]->getName() == NULL)
return 1;
return 0;
}
return 0;
}
EOS
(testpath/"Makefile").write <<~EOS
include #{etc}/PcapPlusPlus.mk
all:
\tg++ $(PCAPPP_BUILD_FLAGS) $(PCAPPP_INCLUDES) -c -o test.o test.cpp
\tg++ -L#{lib} -o test test.o $(PCAPPP_LIBS)
EOS
system "make", "all"
system "./test"
end
end
|
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php54Hprose < AbstractPhp54Extension
init
desc "Hprose for PHP."
homepage "https://pecl.php.net/package/hprose"
url "https://pecl.php.net/get/hprose-1.5.4.tgz"
sha256 "28733983073d5272b7096129dc23717c42485f905556082352bb0954b64bb996"
head "https://github.com/hprose/hprose-pecl.git"
bottle do
sha256 "5c68c512d6a3c3562ab9208d7c52d121e1e05f51c640e23d0e5e2e478ac00d14" => :yosemite
sha256 "bbc66e713fe54c643760192ddd5b0ee84dbb0a5790d588ae3dc6305067776933" => :mavericks
sha256 "b0c926da285b8e91b0f9aec59beb1274e14f352eacf0c5bd3119e1a8880fd7c0" => :mountain_lion
end
def install
Dir.chdir "hprose-#{version}" unless build.head?
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install "modules/hprose.so"
write_config_file if build.with? "config-file"
end
end
php54-hprose: update 1.5.4 bottle.
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php54Hprose < AbstractPhp54Extension
init
desc "Hprose for PHP."
homepage "https://pecl.php.net/package/hprose"
url "https://pecl.php.net/get/hprose-1.5.4.tgz"
sha256 "28733983073d5272b7096129dc23717c42485f905556082352bb0954b64bb996"
head "https://github.com/hprose/hprose-pecl.git"
bottle do
cellar :any_skip_relocation
revision 1
sha256 "48c1c44d03641b19e1f9671a07d7a39f7772b20109f9b6fb88e5e9f90c7e08ea" => :el_capitan
sha256 "35e5bd2c794d11fd89bb13920c9a3e3e9b5526d7a2eddcc3ca1f1d218aa0c725" => :yosemite
sha256 "1aebbd52100d96c0cc79006e36df71ad250846f4639441e4210e1141387c1b80" => :mavericks
end
def install
Dir.chdir "hprose-#{version}" unless build.head?
ENV.universal_binary if build.universal?
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install "modules/hprose.so"
write_config_file if build.with? "config-file"
end
end
|
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php54Pimple < AbstractPhp54Extension
init
desc "Pimple is a simple PHP Dependency Injection Container."
homepage "http://pimple.sensiolabs.org/"
url "https://github.com/silexphp/Pimple/archive/v3.0.0.tar.gz"
sha256 "591e706f5cdce06bdd17d306db3fe9df521bee0ef4fcb0ee56524ff258ef66ba"
head "https://github.com/silexphp/Pimple.git"
bottle do
cellar :any_skip_relocation
revision 1
sha256 "da768507b34c5a70d96be8a9a4a603ad8aaff193bb7bb781bb12a62b5f7f5402" => :el_capitan
sha256 "f40bec3c42a6639c5c299f17e2e023378a8f20dd53d6becee44e8f14cc8eee5d" => :yosemite
sha256 "85205555101fae5f85528206afd2c39ca68cfc54167a02cb75865a72de182f99" => :mavericks
end
def install
ENV.universal_binary if build.universal?
Dir.chdir "ext/pimple" do
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install %w[modules/pimple.so]
end
write_config_file if build.with? "config-file"
end
end
php54-pimple: bottle revision to rebuild.
require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php54Pimple < AbstractPhp54Extension
init
desc "Pimple is a simple PHP Dependency Injection Container."
homepage "http://pimple.sensiolabs.org/"
url "https://github.com/silexphp/Pimple/archive/v3.0.0.tar.gz"
sha256 "591e706f5cdce06bdd17d306db3fe9df521bee0ef4fcb0ee56524ff258ef66ba"
head "https://github.com/silexphp/Pimple.git"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "da768507b34c5a70d96be8a9a4a603ad8aaff193bb7bb781bb12a62b5f7f5402" => :el_capitan
sha256 "f40bec3c42a6639c5c299f17e2e023378a8f20dd53d6becee44e8f14cc8eee5d" => :yosemite
sha256 "85205555101fae5f85528206afd2c39ca68cfc54167a02cb75865a72de182f99" => :mavericks
end
def install
ENV.universal_binary if build.universal?
Dir.chdir "ext/pimple" do
safe_phpize
system "./configure", "--prefix=#{prefix}",
phpconfig
system "make"
prefix.install %w[modules/pimple.so]
end
write_config_file if build.with? "config-file"
end
end
|
class Proteinortho < Formula
desc "Detecting orthologous genes within different species"
homepage "https://gitlab.com/paulklemm_PHD/proteinortho"
url "https://gitlab.com/paulklemm_PHD/proteinortho/-/archive/v6.0.22/proteinortho-v6.0.22.tar.gz"
sha256 "9a30ae580a360f2898019d9651ab8d6522bfc1abeceb32d17ed1454ba807193d"
license "GPL-3.0-or-later"
bottle do
cellar :any
sha256 "49dbf56ff37813fba6bbb4f82d79fd098240c4d4a794656e60acc7697e5d9d70" => :catalina
sha256 "f6852c5015483ee946d04d9ead6ba577695a33b3cbc5fc9d2028bf7a37b77eb2" => :mojave
sha256 "1a3d9722dc5396df00c218e3aed145d16fc000f5fe897897eac4b36d2cd1405e" => :high_sierra
end
depends_on "diamond"
depends_on "openblas"
def install
bin.mkpath
system "make", "install", "PREFIX=#{bin}"
doc.install "manual.html"
end
test do
system "#{bin}/proteinortho", "-test"
system "#{bin}/proteinortho_clustering", "-test"
end
end
proteinortho 6.0.23
use c++11
Closes #61067.
Signed-off-by: Claudia Pellegrino <e1eca4697f2d36f13cfcfc5dac03666161d5ce1a@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Proteinortho < Formula
desc "Detecting orthologous genes within different species"
homepage "https://gitlab.com/paulklemm_PHD/proteinortho"
url "https://gitlab.com/paulklemm_PHD/proteinortho/-/archive/v6.0.23/proteinortho-v6.0.23.tar.gz"
sha256 "e2a93fee6bcbc422e0bbeb98150bf3287c536007397d2bdc4859fc955abb3dc2"
license "GPL-3.0-or-later"
bottle do
cellar :any
sha256 "49dbf56ff37813fba6bbb4f82d79fd098240c4d4a794656e60acc7697e5d9d70" => :catalina
sha256 "f6852c5015483ee946d04d9ead6ba577695a33b3cbc5fc9d2028bf7a37b77eb2" => :mojave
sha256 "1a3d9722dc5396df00c218e3aed145d16fc000f5fe897897eac4b36d2cd1405e" => :high_sierra
end
depends_on "diamond"
depends_on "openblas"
def install
ENV.cxx11
bin.mkpath
system "make", "install", "PREFIX=#{bin}"
doc.install "manual.html"
end
test do
system "#{bin}/proteinortho", "-test"
system "#{bin}/proteinortho_clustering", "-test"
end
end
|
class Pyqt5Webkit < Formula
desc "Python bindings for v5 of Qt's Webkit"
homepage "https://www.riverbankcomputing.com/software/pyqt/download5"
url "https://downloads.sourceforge.net/project/pyqt/PyQt5/PyQt-5.9/PyQt5_gpl-5.9.tar.gz"
sha256 "ab0e7999cf202cc72962c78aefe461d16497b3c1a8282ab966ad90b6cb271096"
bottle do
root_url "https://osgeo4mac.s3.amazonaws.com/bottles"
sha256 "b969d8feec95070c8ef5f1bf4be73c49b7728afbf86398f8d38f171bf31edf4d" => :sierra
sha256 "b969d8feec95070c8ef5f1bf4be73c49b7728afbf86398f8d38f171bf31edf4d" => :high_sierra
end
option "with-debug", "Build with debug symbols"
depends_on "qt"
depends_on "osgeo/osgeo4mac/qt5-webkit"
depends_on "sip"
depends_on "pyqt"
depends_on "python@2" => :recommended
depends_on "python" => :recommended
def install
if build.without?("python3") && build.without?("python")
odie "pyqt: --with-python3 must be specified when using --without-python"
end
# sneak the WebKit modules into the Qt.modules setup before referencing in .pro files
wk_mods = Formula["qt5-webkit"].opt_prefix/"mkspecs/modules"
inreplace "configure.py" do |s|
s.sub! /('TEMPLATE = lib'\])/,
"\\1\n" + <<-EOS
pro_lines.append('include(#{wk_mods}/qt_lib_webkit.pri)')
pro_lines.append('include(#{wk_mods}/qt_lib_webkitwidgets.pri)')
EOS
end
Language::Python.each_python(build) do |python, version|
# check if the module already exists in pyqt prefix
if (Formula["pyqt"].lib/"python#{version}/site-packages/PyQt5/QtWebKit.so").exist?
opoo "PyQt5 formula already has a Python #{version} PyQt5.QtWebKit module (i.e. `qt` probably built `--with-webkit`)"
next
end
args = ["--confirm-license",
"--bindir=#{bin}",
"--destdir=#{lib}/python#{version}/site-packages",
"--stubsdir=#{lib}/python#{version}/site-packages/PyQt5",
"--sipdir=#{share}/sip/Qt5",
# sip.h could not be found automatically
"--sip-incdir=#{Formula["sip"].opt_include}",
"--qmake=#{Formula["qt"].bin}/qmake",
# Force deployment target to avoid libc++ issues
"QMAKE_MACOSX_DEPLOYMENT_TARGET=#{MacOS.version}",
"--enable=QtWebKit",
"--enable=QtWebKitWidgets",
"--no-designer-plugin",
"--no-python-dbus",
"--no-qml-plugin",
"--no-qsci-api",
"--no-sip-files",
"--no-tools",
"--verbose"]
args << "--debug" if build.with? "debug"
system python, "configure.py", *args
system "make"
system "make", "install"
system "make", "clean"
# clean out non-WebKit artifacts (already in pyqt5 formula prefix)
rm_r prefix/"share"
cd "#{lib}/python#{version}/site-packages/PyQt5" do
rm "__init__.py"
rm "Qt.so"
rm_r "uic"
end
end
end
test do
Language::Python.each_python(build) do |python, python_version|
next unless (HOMEBREW_PREFIX/"lib/python#{python_version}/site-packages").exist?
ENV["PYTHONPATH"] = HOMEBREW_PREFIX/"lib/python#{python_version}/site-packages"
%w[
WebKit
WebKitWidgets
].each { |mod| system python, "-c", "import PyQt5.Qt#{mod}" }
end
end
end
Upgrade pyqt5-webkit to 5.11
class Pyqt5Webkit < Formula
desc "Python bindings for v5 of Qt's Webkit"
homepage "https://www.riverbankcomputing.com/software/pyqt/download5"
url "https://sourceforge.net/projects/pyqt/files/PyQt5/PyQt-5.11.1/PyQt5_gpl-5.11.1.tar.gz"
sha256 "265fe677d89ecbe34f52fc4f9d2d4f424638fc64c28534be1906da54855e3b48"
bottle do
root_url "https://osgeo4mac.s3.amazonaws.com/bottles"
sha256 "b969d8feec95070c8ef5f1bf4be73c49b7728afbf86398f8d38f171bf31edf4d" => :sierra
sha256 "b969d8feec95070c8ef5f1bf4be73c49b7728afbf86398f8d38f171bf31edf4d" => :high_sierra
end
option "with-debug", "Build with debug symbols"
depends_on "qt"
depends_on "osgeo/osgeo4mac/qt5-webkit"
depends_on "sip"
depends_on "pyqt"
depends_on "python@2" => :recommended
depends_on "python" => :recommended
def install
if build.without?("python3") && build.without?("python")
odie "pyqt: --with-python3 must be specified when using --without-python"
end
# sneak the WebKit modules into the Qt.modules setup before referencing in .pro files
wk_mods = Formula["qt5-webkit"].opt_prefix/"mkspecs/modules"
inreplace "configure.py" do |s|
s.sub! /('TEMPLATE = lib'\])/,
"\\1\n" + <<-EOS
pro_lines.append('include(#{wk_mods}/qt_lib_webkit.pri)')
pro_lines.append('include(#{wk_mods}/qt_lib_webkitwidgets.pri)')
EOS
end
Language::Python.each_python(build) do |python, version|
# check if the module already exists in pyqt prefix
if (Formula["pyqt"].lib/"python#{version}/site-packages/PyQt5/QtWebKit.so").exist?
opoo "PyQt5 formula already has a Python #{version} PyQt5.QtWebKit module (i.e. `qt` probably built `--with-webkit`)"
next
end
args = ["--confirm-license",
"--bindir=#{bin}",
"--destdir=#{lib}/python#{version}/site-packages",
"--stubsdir=#{lib}/python#{version}/site-packages/PyQt5",
"--sipdir=#{share}/sip/Qt5",
# sip.h could not be found automatically
"--sip-incdir=#{Formula["sip"].opt_include}",
"--qmake=#{Formula["qt"].bin}/qmake",
# Force deployment target to avoid libc++ issues
"QMAKE_MACOSX_DEPLOYMENT_TARGET=#{MacOS.version}",
"--enable=QtWebKit",
"--enable=QtWebKitWidgets",
"--no-designer-plugin",
"--no-python-dbus",
"--no-qml-plugin",
"--no-qsci-api",
"--no-sip-files",
"--no-tools",
"--verbose"]
args << "--debug" if build.with? "debug"
system python, "configure.py", *args
system "make"
system "make", "install"
system "make", "clean"
# clean out non-WebKit artifacts (already in pyqt5 formula prefix)
rm_r prefix/"share"
cd "#{lib}/python#{version}/site-packages/PyQt5" do
rm "__init__.py"
rm "Qt.so"
rm_r "uic"
end
end
end
test do
Language::Python.each_python(build) do |python, python_version|
next unless (HOMEBREW_PREFIX/"lib/python#{python_version}/site-packages").exist?
ENV["PYTHONPATH"] = HOMEBREW_PREFIX/"lib/python#{python_version}/site-packages"
%w[
WebKit
WebKitWidgets
].each { |mod| system python, "-c", "import PyQt5.Qt#{mod}" }
end
end
end
|
class RdiffBackup < Formula
desc "Reverse differential backup tool, over a network or locally"
homepage "https://rdiff-backup.net/"
url "https://github.com/rdiff-backup/rdiff-backup/releases/download/v2.0.5/rdiff-backup-2.0.5.tar.gz"
sha256 "2bb7837b4a9712b6efaebfa7da8ed6348ffcb02fcecff0e19d8fff732e933b87"
license "GPL-2.0-or-later"
revision 2
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "9841037fad91c2e5567e03916d7413fd1e74d6858dff760b2a3728c53e1baf80"
sha256 cellar: :any, arm64_big_sur: "66247de6c20d7350372ecb4efb63b3f5bec4c7f2fe29c4ed80723ebdcd0018fa"
sha256 cellar: :any, monterey: "c64863e034cc7deb4de5574243baac6b0c180ab556ccea2b8fde137cd1910d74"
sha256 cellar: :any, big_sur: "3aaeb0620c7dd027efea476c6b3af79425a7baf2056abc29ed88e405bf2f107a"
sha256 cellar: :any, catalina: "e53a41d9556104c8b72a6b876969b2634d48a1153552af42af86456b5c1add67"
sha256 cellar: :any, mojave: "f3d24f92212373f45e8323a8d054cef1b1ee0b392c96034cbf461bb60b0effd6"
end
depends_on "librsync"
depends_on "python@3.10"
def install
os = OS.mac? ? "macosx" : "linux-x86_64"
system "python3", "setup.py", "build", "--librsync-dir=#{prefix}"
libexec.install Dir["build/lib.#{os}*/rdiff_backup"]
libexec.install Dir["build/scripts-*/*"]
man1.install Dir["docs/*.1"]
bin.install_symlink Dir["#{libexec}/rdiff-backup*"]
end
test do
system "#{bin}/rdiff-backup", "--version"
end
end
rdiff-backup: update 2.0.5_2 bottle.
class RdiffBackup < Formula
desc "Reverse differential backup tool, over a network or locally"
homepage "https://rdiff-backup.net/"
url "https://github.com/rdiff-backup/rdiff-backup/releases/download/v2.0.5/rdiff-backup-2.0.5.tar.gz"
sha256 "2bb7837b4a9712b6efaebfa7da8ed6348ffcb02fcecff0e19d8fff732e933b87"
license "GPL-2.0-or-later"
revision 2
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "9841037fad91c2e5567e03916d7413fd1e74d6858dff760b2a3728c53e1baf80"
sha256 cellar: :any, arm64_big_sur: "66247de6c20d7350372ecb4efb63b3f5bec4c7f2fe29c4ed80723ebdcd0018fa"
sha256 cellar: :any, monterey: "c64863e034cc7deb4de5574243baac6b0c180ab556ccea2b8fde137cd1910d74"
sha256 cellar: :any, big_sur: "3aaeb0620c7dd027efea476c6b3af79425a7baf2056abc29ed88e405bf2f107a"
sha256 cellar: :any, catalina: "e53a41d9556104c8b72a6b876969b2634d48a1153552af42af86456b5c1add67"
sha256 cellar: :any, mojave: "f3d24f92212373f45e8323a8d054cef1b1ee0b392c96034cbf461bb60b0effd6"
sha256 cellar: :any_skip_relocation, x86_64_linux: "dedf7b7d0f5341a6159e46485c358502f3e50682db4f33f6ac69877830d0c99e"
end
depends_on "librsync"
depends_on "python@3.10"
def install
os = OS.mac? ? "macosx" : "linux-x86_64"
system "python3", "setup.py", "build", "--librsync-dir=#{prefix}"
libexec.install Dir["build/lib.#{os}*/rdiff_backup"]
libexec.install Dir["build/scripts-*/*"]
man1.install Dir["docs/*.1"]
bin.install_symlink Dir["#{libexec}/rdiff-backup*"]
end
test do
system "#{bin}/rdiff-backup", "--version"
end
end
|
class StellarCore < Formula
desc "The backbone of the Stellar (XLM) network"
homepage "https://www.stellar.org/"
url "https://github.com/stellar/stellar-core.git",
tag: "v14.0.0",
revision: "eb0153c118c2c4e7913d4e681beadf5cab194b35"
license "Apache-2.0"
head "https://github.com/stellar/stellar-core.git"
bottle do
cellar :any
sha256 "36cbcda3bb3064cdbedf6a37925757227087d50d513b65ed6fd83e731b9496ab" => :catalina
sha256 "c789eb0cedc23e5878e702119b9a6c6f1ef853b512ac926d8a9ef13146231649" => :mojave
sha256 "76d0a3743be0948ccb584677a98a1994711ff8d74750f8c5149ff3f858bd6b33" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pandoc" => :build
depends_on "pkg-config" => :build
depends_on "parallel" => :test
depends_on "libpq"
depends_on "libpqxx"
depends_on "libsodium"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
def install
system "./autogen.sh"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-postgres"
system "make", "install"
end
test do
system "#{bin}/stellar-core", "test",
"'[bucket],[crypto],[herder],[upgrades],[accountsubentriescount]," \
"[bucketlistconsistent],[cacheisconsistent],[fs]'"
end
end
stellar-core: update 14.0.0 bottle.
class StellarCore < Formula
desc "The backbone of the Stellar (XLM) network"
homepage "https://www.stellar.org/"
url "https://github.com/stellar/stellar-core.git",
tag: "v14.0.0",
revision: "eb0153c118c2c4e7913d4e681beadf5cab194b35"
license "Apache-2.0"
head "https://github.com/stellar/stellar-core.git"
bottle do
cellar :any
sha256 "90e3a2a7ee27cf6bf0fbf73326f31efd5211704a94d6c266dd1860f02734c841" => :catalina
sha256 "b9a0e969d218908ca36ba35cc71bfd866f8d8c2858d0635f3d63c92ac5d6f91c" => :mojave
sha256 "4aec90c47396ee3c63b3fc6961e30ce8f7feda4f7b396dbe26aaa483e0b5aac9" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pandoc" => :build
depends_on "pkg-config" => :build
depends_on "parallel" => :test
depends_on "libpq"
depends_on "libpqxx"
depends_on "libsodium"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
def install
system "./autogen.sh"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-postgres"
system "make", "install"
end
test do
system "#{bin}/stellar-core", "test",
"'[bucket],[crypto],[herder],[upgrades],[accountsubentriescount]," \
"[bucketlistconsistent],[cacheisconsistent],[fs]'"
end
end
|
class TektoncdCli < Formula
desc "CLI for interacting with TektonCD"
homepage "https://github.com/tektoncd/cli"
url "https://github.com/tektoncd/cli/archive/v0.10.0.tar.gz"
sha256 "d85663f160c2902789194502c036953a856f57d7c0481c23ab12e719a7a21d42"
bottle do
cellar :any_skip_relocation
sha256 "535ec002c95d1fd0c0ee84828d3844ea9845493e55e099941366d004bdfa6b55" => :catalina
sha256 "c56a08ee17e686bc22a17b9982d5879848660e9472464dfdae3bb7dd0d5b55f3" => :mojave
sha256 "f19731f06d92e88fe464b72b14457b85230c926e20e5a428e8fcf2500e357fd5" => :high_sierra
end
depends_on "go" => :build
def install
system "make", "bin/tkn"
bin.install "bin/tkn" => "tkn"
output = Utils.popen_read("SHELL=bash #{bin}/tkn completion bash")
(bash_completion/"tkn").write output
output = Utils.popen_read("SHELL=zsh #{bin}/tkn completion zsh")
(zsh_completion/"_tkn").write output
prefix.install_metafiles
end
test do
cmd = "#{bin}/tkn pipelinerun describe homebrew-formula"
io = IO.popen(cmd, :err => [:child, :out])
assert_match "Error: Couldn't get kubeConfiguration namespace", io.read
end
end
tektoncd-cli: update 0.10.0 bottle.
class TektoncdCli < Formula
desc "CLI for interacting with TektonCD"
homepage "https://github.com/tektoncd/cli"
url "https://github.com/tektoncd/cli/archive/v0.10.0.tar.gz"
sha256 "d85663f160c2902789194502c036953a856f57d7c0481c23ab12e719a7a21d42"
bottle do
cellar :any_skip_relocation
sha256 "535ec002c95d1fd0c0ee84828d3844ea9845493e55e099941366d004bdfa6b55" => :catalina
sha256 "c56a08ee17e686bc22a17b9982d5879848660e9472464dfdae3bb7dd0d5b55f3" => :mojave
sha256 "f19731f06d92e88fe464b72b14457b85230c926e20e5a428e8fcf2500e357fd5" => :high_sierra
sha256 "14f5cb61d2629917db9f7fb24b50bd35d840a23362ab9b6b9cef632e5a2a8f37" => :x86_64_linux
end
depends_on "go" => :build
def install
system "make", "bin/tkn"
bin.install "bin/tkn" => "tkn"
output = Utils.popen_read("SHELL=bash #{bin}/tkn completion bash")
(bash_completion/"tkn").write output
output = Utils.popen_read("SHELL=zsh #{bin}/tkn completion zsh")
(zsh_completion/"_tkn").write output
prefix.install_metafiles
end
test do
cmd = "#{bin}/tkn pipelinerun describe homebrew-formula"
io = IO.popen(cmd, :err => [:child, :out])
assert_match "Error: Couldn't get kubeConfiguration namespace", io.read
end
end
|
class Terraforming < Formula
desc "Export existing AWS resources to Terraform style (tf, tfstate)"
homepage "https://terraforming.dtan4.net/"
url "https://github.com/dtan4/terraforming.git",
tag: "v0.18.0",
revision: "67cb9299f283bc16bd70c197f25edc419bee280f"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ae14e8533d790a2d2a7e937f0d36c2bac7a62087375b80e27f2dd2f2171981d6"
sha256 cellar: :any_skip_relocation, big_sur: "72f190c258f2ab9a73d635ff533f9814219a79a7261dd0d0b4e1b5cb6eddcb8a"
sha256 cellar: :any_skip_relocation, catalina: "5b1a20c820982585fdad1e588ab6ac171e8d3f963da62b50a598e3002635331b"
sha256 cellar: :any_skip_relocation, mojave: "e4997ba46e6e796833c2f881f68b20cd52006510371ede211d422190a5223454"
sha256 cellar: :any_skip_relocation, high_sierra: "59001edf7447dbab2dd760fcec4fc0a77d711ec43e7d95658aa9c663f7baf44d"
sha256 cellar: :any_skip_relocation, sierra: "f1d900508e9b2a38a1e417ee9f0faa050c89332cf9eff1a3de83c96eebead164"
end
uses_from_macos "ruby"
resource "aws-sdk-autoscaling" do
url "https://rubygems.org/gems/aws-sdk-autoscaling-1.20.0.gem"
sha256 "85525581b3084d1ce04d468961bdde2397ab340914579bf0515c45a706cd8815"
end
resource "aws-sdk-cloudwatch" do
url "https://rubygems.org/gems/aws-sdk-cloudwatch-1.20.0.gem"
sha256 "d904807e172a5cf88b1f13f1944a4595fa51a842c10d1a0ac5065fdf874ac6af"
end
resource "aws-sdk-dynamodb" do
url "https://rubygems.org/gems/aws-sdk-dynamodb-1.25.0.gem"
sha256 "529c3b1b46c997b5db79274747922669ff9f52caefcf3ee40454bf0c3e3424c8"
end
resource "aws-sdk-ec2" do
url "https://rubygems.org/gems/aws-sdk-ec2-1.80.0.gem"
sha256 "bb73cefdf95ad413ae7b0fe6fcc2ead6e66f8980ed87bd96a1a7c43fb589551e"
end
resource "aws-sdk-efs" do
url "https://rubygems.org/gems/aws-sdk-efs-1.13.0.gem"
sha256 "c322bd04fed83efa1d5a4b276cab788b39258c4ecab362a789cc16cc61be05e4"
end
resource "aws-sdk-elasticache" do
url "https://rubygems.org/gems/aws-sdk-elasticache-1.14.0.gem"
sha256 "a78ae9d6c927f6b5c2b9af40c5bc03453b39d9693dcb05df2730293a52186844"
end
resource "aws-sdk-elasticloadbalancing" do
url "https://rubygems.org/gems/aws-sdk-elasticloadbalancing-1.12.0.gem"
sha256 "39c04663c91b1a467dd5d9b541d4792be4e5b9e25ee2ffb52e473aeb97d44301"
end
resource "aws-sdk-elasticloadbalancingv2" do
url "https://rubygems.org/gems/aws-sdk-elasticloadbalancingv2-1.26.0.gem"
sha256 "1dc95fc21b1b1ffeb15801084affc5d915d3c386f6f052f55c760a773424dd6d"
end
resource "aws-sdk-iam" do
url "https://rubygems.org/gems/aws-sdk-iam-1.18.0.gem"
sha256 "0efba7b586c81d7b17cb3086bf5cb287e68db5487d344877a444c107ee3b2130"
end
resource "aws-sdk-kms" do
url "https://rubygems.org/gems/aws-sdk-kms-1.17.0.gem"
sha256 "f6e6500300ede3e31edaf14aea9ad05a60aba4402c11946fe147f9d03abc584e"
end
resource "aws-sdk-rds" do
url "https://rubygems.org/gems/aws-sdk-rds-1.50.0.gem"
sha256 "f62b6f0c87cf358a59b440a40ebbb79d6be6eeb5c2f4e5f159fc8ee3d1cf7a1b"
end
resource "aws-sdk-redshift" do
url "https://rubygems.org/gems/aws-sdk-redshift-1.23.0.gem"
sha256 "99ecbd9f050e4dd80c80f1119a273c75abdb5a5abf02b37c61f39234ee762678"
end
resource "aws-sdk-route53" do
url "https://rubygems.org/gems/aws-sdk-route53-1.22.0.gem"
sha256 "1b7aaabc67e4133a34c07c5fa979b00374866026d3f3bd130b992fa163f6b211"
end
resource "aws-sdk-s3" do
url "https://rubygems.org/gems/aws-sdk-s3-1.36.1.gem"
sha256 "b5baf7c91119791354a14424ef7af8917b6806a2b33878bf80f22b256104d0bd"
end
resource "aws-sdk-sns" do
url "https://rubygems.org/gems/aws-sdk-sns-1.12.0.gem"
sha256 "ac98e9dd72a8ecfe18f0e6482c02563050f0638f179725872bd414791a856138"
end
resource "aws-sdk-sqs" do
url "https://rubygems.org/gems/aws-sdk-sqs-1.13.0.gem"
sha256 "a0bb59cefb6a3a152192303236d0e3a0c0dabd27b7ab6ac3c6993b69598df5b2"
end
resource "multi_json" do
url "https://rubygems.org/gems/multi_json-1.12.2.gem"
sha256 "5dcc0b569969f3d1658c68b5d597fcdc1fc3a34d4ae92b4615c740d95aaa51e5"
end
resource "thor" do
url "https://rubygems.org/gems/thor-0.20.3.gem"
sha256 "49bc217fe28f6af34c6e60b003e3405c27595a55689077d82e9e61d4d3b519fa"
end
def install
ENV["GEM_HOME"] = libexec
resources.each do |r|
r.fetch
system "gem", "install", r.cached_download, "--no-document",
"--install-dir", libexec
end
system "gem", "build", "terraforming.gemspec"
system "gem", "install", "--ignore-dependencies",
"terraforming-#{version}.gem"
bin.install libexec/"bin/terraforming"
bin.env_script_all_files(libexec/"bin", GEM_HOME: ENV["GEM_HOME"])
end
test do
output = shell_output("#{bin}/terraforming help ec2")
assert_match "Usage:", output
assert_match "terraforming ec2", output
end
end
terraforming: update 0.18.0 bottle.
class Terraforming < Formula
desc "Export existing AWS resources to Terraform style (tf, tfstate)"
homepage "https://terraforming.dtan4.net/"
url "https://github.com/dtan4/terraforming.git",
tag: "v0.18.0",
revision: "67cb9299f283bc16bd70c197f25edc419bee280f"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ae14e8533d790a2d2a7e937f0d36c2bac7a62087375b80e27f2dd2f2171981d6"
sha256 cellar: :any_skip_relocation, big_sur: "72f190c258f2ab9a73d635ff533f9814219a79a7261dd0d0b4e1b5cb6eddcb8a"
sha256 cellar: :any_skip_relocation, catalina: "5b1a20c820982585fdad1e588ab6ac171e8d3f963da62b50a598e3002635331b"
sha256 cellar: :any_skip_relocation, mojave: "e4997ba46e6e796833c2f881f68b20cd52006510371ede211d422190a5223454"
sha256 cellar: :any_skip_relocation, high_sierra: "59001edf7447dbab2dd760fcec4fc0a77d711ec43e7d95658aa9c663f7baf44d"
sha256 cellar: :any_skip_relocation, sierra: "f1d900508e9b2a38a1e417ee9f0faa050c89332cf9eff1a3de83c96eebead164"
sha256 cellar: :any_skip_relocation, all: "9fec9a488184066c7ef10f728be33fcc30afe004d9d5bc6dbe7c187a768b8165"
end
uses_from_macos "ruby"
resource "aws-sdk-autoscaling" do
url "https://rubygems.org/gems/aws-sdk-autoscaling-1.20.0.gem"
sha256 "85525581b3084d1ce04d468961bdde2397ab340914579bf0515c45a706cd8815"
end
resource "aws-sdk-cloudwatch" do
url "https://rubygems.org/gems/aws-sdk-cloudwatch-1.20.0.gem"
sha256 "d904807e172a5cf88b1f13f1944a4595fa51a842c10d1a0ac5065fdf874ac6af"
end
resource "aws-sdk-dynamodb" do
url "https://rubygems.org/gems/aws-sdk-dynamodb-1.25.0.gem"
sha256 "529c3b1b46c997b5db79274747922669ff9f52caefcf3ee40454bf0c3e3424c8"
end
resource "aws-sdk-ec2" do
url "https://rubygems.org/gems/aws-sdk-ec2-1.80.0.gem"
sha256 "bb73cefdf95ad413ae7b0fe6fcc2ead6e66f8980ed87bd96a1a7c43fb589551e"
end
resource "aws-sdk-efs" do
url "https://rubygems.org/gems/aws-sdk-efs-1.13.0.gem"
sha256 "c322bd04fed83efa1d5a4b276cab788b39258c4ecab362a789cc16cc61be05e4"
end
resource "aws-sdk-elasticache" do
url "https://rubygems.org/gems/aws-sdk-elasticache-1.14.0.gem"
sha256 "a78ae9d6c927f6b5c2b9af40c5bc03453b39d9693dcb05df2730293a52186844"
end
resource "aws-sdk-elasticloadbalancing" do
url "https://rubygems.org/gems/aws-sdk-elasticloadbalancing-1.12.0.gem"
sha256 "39c04663c91b1a467dd5d9b541d4792be4e5b9e25ee2ffb52e473aeb97d44301"
end
resource "aws-sdk-elasticloadbalancingv2" do
url "https://rubygems.org/gems/aws-sdk-elasticloadbalancingv2-1.26.0.gem"
sha256 "1dc95fc21b1b1ffeb15801084affc5d915d3c386f6f052f55c760a773424dd6d"
end
resource "aws-sdk-iam" do
url "https://rubygems.org/gems/aws-sdk-iam-1.18.0.gem"
sha256 "0efba7b586c81d7b17cb3086bf5cb287e68db5487d344877a444c107ee3b2130"
end
resource "aws-sdk-kms" do
url "https://rubygems.org/gems/aws-sdk-kms-1.17.0.gem"
sha256 "f6e6500300ede3e31edaf14aea9ad05a60aba4402c11946fe147f9d03abc584e"
end
resource "aws-sdk-rds" do
url "https://rubygems.org/gems/aws-sdk-rds-1.50.0.gem"
sha256 "f62b6f0c87cf358a59b440a40ebbb79d6be6eeb5c2f4e5f159fc8ee3d1cf7a1b"
end
resource "aws-sdk-redshift" do
url "https://rubygems.org/gems/aws-sdk-redshift-1.23.0.gem"
sha256 "99ecbd9f050e4dd80c80f1119a273c75abdb5a5abf02b37c61f39234ee762678"
end
resource "aws-sdk-route53" do
url "https://rubygems.org/gems/aws-sdk-route53-1.22.0.gem"
sha256 "1b7aaabc67e4133a34c07c5fa979b00374866026d3f3bd130b992fa163f6b211"
end
resource "aws-sdk-s3" do
url "https://rubygems.org/gems/aws-sdk-s3-1.36.1.gem"
sha256 "b5baf7c91119791354a14424ef7af8917b6806a2b33878bf80f22b256104d0bd"
end
resource "aws-sdk-sns" do
url "https://rubygems.org/gems/aws-sdk-sns-1.12.0.gem"
sha256 "ac98e9dd72a8ecfe18f0e6482c02563050f0638f179725872bd414791a856138"
end
resource "aws-sdk-sqs" do
url "https://rubygems.org/gems/aws-sdk-sqs-1.13.0.gem"
sha256 "a0bb59cefb6a3a152192303236d0e3a0c0dabd27b7ab6ac3c6993b69598df5b2"
end
resource "multi_json" do
url "https://rubygems.org/gems/multi_json-1.12.2.gem"
sha256 "5dcc0b569969f3d1658c68b5d597fcdc1fc3a34d4ae92b4615c740d95aaa51e5"
end
resource "thor" do
url "https://rubygems.org/gems/thor-0.20.3.gem"
sha256 "49bc217fe28f6af34c6e60b003e3405c27595a55689077d82e9e61d4d3b519fa"
end
def install
ENV["GEM_HOME"] = libexec
resources.each do |r|
r.fetch
system "gem", "install", r.cached_download, "--no-document",
"--install-dir", libexec
end
system "gem", "build", "terraforming.gemspec"
system "gem", "install", "--ignore-dependencies",
"terraforming-#{version}.gem"
bin.install libexec/"bin/terraforming"
bin.env_script_all_files(libexec/"bin", GEM_HOME: ENV["GEM_HOME"])
end
test do
output = shell_output("#{bin}/terraforming help ec2")
assert_match "Usage:", output
assert_match "terraforming ec2", output
end
end
|
class Threadweaver < Formula
desc "Helper for multithreaded programming"
homepage "https://api.kde.org/frameworks/threadweaver/html/index.html"
url "https://download.kde.org/stable/frameworks/5.97/threadweaver-5.97.0.tar.xz"
sha256 "46975d03feea09c41ac369fd076d7b2c92ad4468a81f48c2eeff622eabfc408f"
license "LGPL-2.0-or-later"
head "https://invent.kde.org/frameworks/threadweaver.git", branch: "master"
# We check the tags from the `head` repository because the latest stable
# version doesn't seem to be easily available elsewhere.
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "d90943a6e120701ced520f7a8b1a55dbd7c35845daf0b5b363c0329fdc191c0c"
sha256 cellar: :any, arm64_big_sur: "65e2d18ba6a0f5849354e89741f7e0fd8a8a1e3105ca01fae294318b2fceeacf"
sha256 cellar: :any, monterey: "6ca0edb6df9b965d4d830f296b932f1d7e3995fe9f038206a4c2f4de5db65133"
sha256 cellar: :any, big_sur: "9af28a8355af43b464d44d4a3020979cba04f6d22b4c8da74ab73fa07d89f3f3"
sha256 cellar: :any, catalina: "4d74cca53a87ba0cc790ed58768cf4373cf38d768314470eeb9fdf072deaf794"
sha256 cellar: :any_skip_relocation, x86_64_linux: "2d041b5632e0134c82f4e29a662681b745a3a9b1199f25faee92a02c69bba0cc"
end
depends_on "cmake" => [:build, :test]
depends_on "doxygen" => :build
depends_on "extra-cmake-modules" => [:build, :test]
depends_on "graphviz" => :build
depends_on "qt@5"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
args = std_cmake_args + %w[
-S .
-B build
-DBUILD_QCH=ON
]
system "cmake", *args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
pkgshare.install "examples"
end
test do
ENV.delete "CPATH"
qt5_args = ["-DQt5Core_DIR=#{Formula["qt@5"].opt_lib}/cmake/Qt5Core"]
qt5_args << "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath,#{Formula["qt@5"].opt_lib}" unless OS.mac?
system "cmake", (pkgshare/"examples/HelloWorld"), *std_cmake_args, *qt5_args
system "cmake", "--build", "."
assert_equal "Hello World!", shell_output("./ThreadWeaver_HelloWorld 2>&1").strip
end
end
threadweaver: update 5.97.0 bottle.
class Threadweaver < Formula
desc "Helper for multithreaded programming"
homepage "https://api.kde.org/frameworks/threadweaver/html/index.html"
url "https://download.kde.org/stable/frameworks/5.97/threadweaver-5.97.0.tar.xz"
sha256 "46975d03feea09c41ac369fd076d7b2c92ad4468a81f48c2eeff622eabfc408f"
license "LGPL-2.0-or-later"
head "https://invent.kde.org/frameworks/threadweaver.git", branch: "master"
# We check the tags from the `head` repository because the latest stable
# version doesn't seem to be easily available elsewhere.
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "3cd1b966274b17803d823e41130c0cedcf366842d62cdb78f07cde40f46e1afc"
sha256 cellar: :any, arm64_big_sur: "9871ed44a0a184f2222d41b837ed7de8e8dae92415089a81d0241370695b2b48"
sha256 cellar: :any, monterey: "f8c5e6f74232be9f45762fa1056df7cd3f5723378a0d47f7c2bf3fba1a84595f"
sha256 cellar: :any, big_sur: "cc1f2b1e4ca40ec8cc752beded38bc50e2c34d4a28f484ecb32f86a8de376590"
sha256 cellar: :any, catalina: "88e57fb7b220fa0e533dcc283ade1aca7e5a85688a5d82a873b0046240095c24"
sha256 cellar: :any_skip_relocation, x86_64_linux: "55e02a0cbb63d8fcab891d5c5514feac33b51a53ed562000a0c47e0030b0dd8b"
end
depends_on "cmake" => [:build, :test]
depends_on "doxygen" => :build
depends_on "extra-cmake-modules" => [:build, :test]
depends_on "graphviz" => :build
depends_on "qt@5"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
args = std_cmake_args + %w[
-S .
-B build
-DBUILD_QCH=ON
]
system "cmake", *args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
pkgshare.install "examples"
end
test do
ENV.delete "CPATH"
qt5_args = ["-DQt5Core_DIR=#{Formula["qt@5"].opt_lib}/cmake/Qt5Core"]
qt5_args << "-DCMAKE_EXE_LINKER_FLAGS=-Wl,-rpath,#{Formula["qt@5"].opt_lib}" unless OS.mac?
system "cmake", (pkgshare/"examples/HelloWorld"), *std_cmake_args, *qt5_args
system "cmake", "--build", "."
assert_equal "Hello World!", shell_output("./ThreadWeaver_HelloWorld 2>&1").strip
end
end
|
class Threadweaver < Formula
desc "Helper for multithreaded programming"
homepage "https://api.kde.org/frameworks/threadweaver/html/index.html"
url "https://download.kde.org/stable/frameworks/5.87/threadweaver-5.87.0.tar.xz"
sha256 "904db85af3f4cf5a7b0125264926d83405489feec66cacf675c67019c5fe17bf"
license "LGPL-2.0-or-later"
head "https://invent.kde.org/frameworks/threadweaver.git", branch: "master"
# We check the tags from the `head` repository because the latest stable
# version doesn't seem to be easily available elsewhere.
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "1393ab0dc4b73feda73ec7492cf573755fb2f046c77f7ca14cea2b0d4b517901"
sha256 cellar: :any, big_sur: "a068b36279c0b3b55fc77d136e42712d8f37f5efa370f819f51ff65d39358535"
sha256 cellar: :any, catalina: "14fab5d7bfac23a31ec7c4ea8940cbe47097728f4f2642a648c41aa9aa7c4f49"
sha256 cellar: :any, mojave: "6edccbaece80ca49186ac727770008f817d2744f1206760f01e609d0e5408b76"
end
depends_on "cmake" => [:build, :test]
depends_on "doxygen" => :build
depends_on "extra-cmake-modules" => [:build, :test]
depends_on "graphviz" => :build
depends_on "qt@5"
def install
args = std_cmake_args
args << "-DBUILD_TESTING=OFF"
args << "-DBUILD_QCH=ON"
system "cmake", "-S", ".", "-B", "build", *args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
pkgshare.install "examples"
end
test do
ENV.delete "CPATH"
qt5_arg = "-DQt5Core_DIR=#{Formula["qt@5"].opt_prefix/"lib/cmake/Qt5Core"}"
system "cmake", (pkgshare/"examples/HelloWorld"), *std_cmake_args, qt5_arg
system "make"
assert_equal "Hello World!", shell_output("./ThreadWeaver_HelloWorld 2>&1").strip
end
end
threadweaver 5.88.0
Closes #89398.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Threadweaver < Formula
desc "Helper for multithreaded programming"
homepage "https://api.kde.org/frameworks/threadweaver/html/index.html"
url "https://download.kde.org/stable/frameworks/5.88/threadweaver-5.88.0.tar.xz"
sha256 "b2f3079158a52c8e49ea0989e18a509dc9693f6a26f0739b133bee0f4b02df1f"
license "LGPL-2.0-or-later"
head "https://invent.kde.org/frameworks/threadweaver.git", branch: "master"
# We check the tags from the `head` repository because the latest stable
# version doesn't seem to be easily available elsewhere.
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "1393ab0dc4b73feda73ec7492cf573755fb2f046c77f7ca14cea2b0d4b517901"
sha256 cellar: :any, big_sur: "a068b36279c0b3b55fc77d136e42712d8f37f5efa370f819f51ff65d39358535"
sha256 cellar: :any, catalina: "14fab5d7bfac23a31ec7c4ea8940cbe47097728f4f2642a648c41aa9aa7c4f49"
sha256 cellar: :any, mojave: "6edccbaece80ca49186ac727770008f817d2744f1206760f01e609d0e5408b76"
end
depends_on "cmake" => [:build, :test]
depends_on "doxygen" => :build
depends_on "extra-cmake-modules" => [:build, :test]
depends_on "graphviz" => :build
depends_on "qt@5"
def install
args = std_cmake_args
args << "-DBUILD_TESTING=OFF"
args << "-DBUILD_QCH=ON"
system "cmake", "-S", ".", "-B", "build", *args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
pkgshare.install "examples"
end
test do
ENV.delete "CPATH"
qt5_arg = "-DQt5Core_DIR=#{Formula["qt@5"].opt_prefix/"lib/cmake/Qt5Core"}"
system "cmake", (pkgshare/"examples/HelloWorld"), *std_cmake_args, qt5_arg
system "make"
assert_equal "Hello World!", shell_output("./ThreadWeaver_HelloWorld 2>&1").strip
end
end
|
class UBootTools < Formula
desc "Universal boot loader"
homepage "https://www.denx.de/wiki/U-Boot/"
url "https://ftp.denx.de/pub/u-boot/u-boot-2020.10.tar.bz2"
sha256 "0d481bbdc05c0ee74908ec2f56a6daa53166cc6a78a0e4fac2ac5d025770a622"
livecheck do
url "https://ftp.denx.de/pub/u-boot/"
regex(/href=.*?u-boot[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any
sha256 "c4e1c77a34e57576f9ab599781be090820a4d5911f4147e10d0e99114cd3c8c6" => :catalina
sha256 "44d21cc3ac974b0538d24d4e5a74f25e3df764c8b5fc3458214890bacfa138ac" => :mojave
sha256 "afb5dea722a9ae646809a3c8b59dbbd80b55042e3c3de8f45741e6ebb460df6a" => :high_sierra
end
depends_on "openssl@1.1"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
def install
# Replace keyword not present in make 3.81
inreplace "Makefile", "undefine MK_ARCH", "unexport MK_ARCH"
system "make", "sandbox_defconfig"
system "make", "tools", "NO_SDL=1"
bin.install "tools/mkimage"
bin.install "tools/dumpimage"
man1.install "doc/mkimage.1"
end
test do
system bin/"mkimage", "-V"
system bin/"dumpimage", "-V"
end
end
u-boot-tools: update 2020.10 bottle.
class UBootTools < Formula
desc "Universal boot loader"
homepage "https://www.denx.de/wiki/U-Boot/"
url "https://ftp.denx.de/pub/u-boot/u-boot-2020.10.tar.bz2"
sha256 "0d481bbdc05c0ee74908ec2f56a6daa53166cc6a78a0e4fac2ac5d025770a622"
livecheck do
url "https://ftp.denx.de/pub/u-boot/"
regex(/href=.*?u-boot[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any
sha256 "c4e1c77a34e57576f9ab599781be090820a4d5911f4147e10d0e99114cd3c8c6" => :catalina
sha256 "44d21cc3ac974b0538d24d4e5a74f25e3df764c8b5fc3458214890bacfa138ac" => :mojave
sha256 "afb5dea722a9ae646809a3c8b59dbbd80b55042e3c3de8f45741e6ebb460df6a" => :high_sierra
sha256 "790a16df06987ebba7a1f0b6120098f5808aa29af313f7fc966a0aa877746d9c" => :x86_64_linux
end
depends_on "openssl@1.1"
uses_from_macos "bison" => :build
uses_from_macos "flex" => :build
def install
# Replace keyword not present in make 3.81
inreplace "Makefile", "undefine MK_ARCH", "unexport MK_ARCH"
system "make", "sandbox_defconfig"
system "make", "tools", "NO_SDL=1"
bin.install "tools/mkimage"
bin.install "tools/dumpimage"
man1.install "doc/mkimage.1"
end
test do
system bin/"mkimage", "-V"
system bin/"dumpimage", "-V"
end
end
|
Pod::Spec.new do |s|
s.name = 'JSONParserSwift'
s.version = '0.1.3'
s.summary = 'Parse your JSON data directly to Swift object.'
s.homepage = 'https://github.com/mukeshydv/JSONParserSwift'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'mukeshydv' => 'mails4ymukesh@gmail.com' }
s.source = { :git => 'https://github.com/mukeshydv/JSONParserSwift.git', :tag => s.version.to_s }
s.requires_arc = true
s.osx.deployment_target = "10.9"
s.ios.deployment_target = "8.0"
s.watchos.deployment_target = "2.0"
s.tvos.deployment_target = "9.0"
s.source_files = 'JSONParserSwift/Classes/**/*'
s.pod_target_xcconfig = {
'SWIFT_VERSION' => '3.0',
}
end
version 0.2.4
Pod::Spec.new do |s|
s.name = 'JSONParserSwift'
s.version = '0.2.4'
s.summary = 'Parse your JSON data directly to Swift object.'
s.homepage = 'https://github.com/mukeshydv/JSONParserSwift'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'mukeshydv' => 'mails4ymukesh@gmail.com' }
s.source = { :git => 'https://github.com/mukeshydv/JSONParserSwift.git', :tag => s.version.to_s }
s.requires_arc = true
s.osx.deployment_target = "10.9"
s.ios.deployment_target = "8.0"
s.watchos.deployment_target = "2.0"
s.tvos.deployment_target = "9.0"
s.source_files = 'JSONParserSwift/Classes/**/*'
s.pod_target_xcconfig = {
'SWIFT_VERSION' => '3.0',
}
end
|
class Proj < Formula
desc "PROJ.4, a Cartographic Projections Library"
homepage "http://trac.osgeo.org/proj/"
url "http://download.osgeo.org/proj/proj-4.9.1.tar.gz"
sha256 "fca0388f3f8bc5a1a803d2f6ff30017532367992b30cf144f2d39be88f36c319"
head "http://svn.osgeo.org/metacrs/proj/trunk/proj"
option "with-vdatum", "Install vertical datum files (~380 MB)"
bottle do
sha256 "6485ac1d1b0413371b244d38553b527a81b001aa92b0ef547ee5b9f7c9672dc8" => :yosemite
sha256 "17ccc289bc788e8823a1fa3285a4ae926feafb9a4cd1a534e56c19b343c6c2fd" => :mavericks
sha256 "6e7a4cd42928b468bf304eb656d94fcf57a9a4647e5a28d7d9a0eb215891b128" => :mountain_lion
end
# The datum grid files are required to support datum shifting
resource "datumgrid" do
url "http://download.osgeo.org/proj/proj-datumgrid-1.5.zip"
sha256 "723c4017d95d7a8abdf3bda4e18d3c15d79b00f9326d453da5fdf13f96c287db"
end
# Vertical datum files
resource "usa_geoid2012" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2012.zip"
sha256 "afe49dc2c405d19a467ec756483944a3c9148e8c1460cb7e82dc8d4a64c4c472"
end
resource "usa_geoid2009" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2009.zip"
sha256 "1a232fb7fe34d2dad2d48872025597ac7696882755ded1493118a573f60008b1"
end
resource "usa_geoid2003" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2003.zip"
sha256 "1d15950f46e96e422ebc9202c24aadec221774587b7a4cd963c63f8837421351"
end
resource "usa_geoid1999" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid1999.zip"
sha256 "665cd4dfc991f2517752f9db84d632b56bba31a1ed6a5f0dc397e4b0b3311f36"
end
resource "vertconc" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertconc.gtx"
sha256 "ecf7bce7bf9e56f6f79a2356d8d6b20b9cb49743701f81db802d979b5a01fcff"
end
resource "vertcone" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertcone.gtx"
sha256 "f6da1c615c2682ecb7adcfdf22b1d37aba2771c2ea00abe8907acea07413903b"
end
resource "vertconw" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertconw.gtx"
sha256 "de648c0f6e8b5ebfc4b2d82f056c7b993ca3c37373a7f6b7844fe9bd4871821b"
end
resource "egm96_15" do
url "http://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx"
sha256 "c02a6eb70a7a78efebe5adf3ade626eb75390e170bb8b3f36136a2c28f5326a0"
end
resource "egm08_25" do
url "http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx"
sha256 "c18f20d1fe88616e3497a3eff993227371e1d9acc76f96253e8d84b475bbe6bf"
end
skip_clean :la
fails_with :llvm do
build 2334
end
def install
resources.each do |r|
if r.name == "datumgrid"
(buildpath/"nad").install r
elsif build.with? "vdatum"
(share/"proj").install r
end
end
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test").write <<-EOS.undent
45d15n 71d07w Boston, United States
40d40n 73d58w New York, United States
48d51n 2d20e Paris, France
51d30n 7'w London, England
EOS
match = <<-EOS.undent
-4887590.49\t7317961.48 Boston, United States
-5542524.55\t6982689.05 New York, United States
171224.94\t5415352.81 Paris, France
-8101.66\t5707500.23 London, England
EOS
assert_equal match,
`#{bin}/proj +proj=poly +ellps=clrk66 -r #{testpath}/test`
end
end
proj: update 4.9.1 bottle.
class Proj < Formula
desc "PROJ.4, a Cartographic Projections Library"
homepage "http://trac.osgeo.org/proj/"
url "http://download.osgeo.org/proj/proj-4.9.1.tar.gz"
sha256 "fca0388f3f8bc5a1a803d2f6ff30017532367992b30cf144f2d39be88f36c319"
head "http://svn.osgeo.org/metacrs/proj/trunk/proj"
option "with-vdatum", "Install vertical datum files (~380 MB)"
bottle do
sha256 "a25e829f6fd646cc0476b9519ad5e7dd47ad0671b6ecf0d48aa4240ef172db20" => :el_capitan
sha256 "6485ac1d1b0413371b244d38553b527a81b001aa92b0ef547ee5b9f7c9672dc8" => :yosemite
sha256 "17ccc289bc788e8823a1fa3285a4ae926feafb9a4cd1a534e56c19b343c6c2fd" => :mavericks
sha256 "6e7a4cd42928b468bf304eb656d94fcf57a9a4647e5a28d7d9a0eb215891b128" => :mountain_lion
end
# The datum grid files are required to support datum shifting
resource "datumgrid" do
url "http://download.osgeo.org/proj/proj-datumgrid-1.5.zip"
sha256 "723c4017d95d7a8abdf3bda4e18d3c15d79b00f9326d453da5fdf13f96c287db"
end
# Vertical datum files
resource "usa_geoid2012" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2012.zip"
sha256 "afe49dc2c405d19a467ec756483944a3c9148e8c1460cb7e82dc8d4a64c4c472"
end
resource "usa_geoid2009" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2009.zip"
sha256 "1a232fb7fe34d2dad2d48872025597ac7696882755ded1493118a573f60008b1"
end
resource "usa_geoid2003" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid2003.zip"
sha256 "1d15950f46e96e422ebc9202c24aadec221774587b7a4cd963c63f8837421351"
end
resource "usa_geoid1999" do
url "http://download.osgeo.org/proj/vdatum/usa_geoid1999.zip"
sha256 "665cd4dfc991f2517752f9db84d632b56bba31a1ed6a5f0dc397e4b0b3311f36"
end
resource "vertconc" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertconc.gtx"
sha256 "ecf7bce7bf9e56f6f79a2356d8d6b20b9cb49743701f81db802d979b5a01fcff"
end
resource "vertcone" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertcone.gtx"
sha256 "f6da1c615c2682ecb7adcfdf22b1d37aba2771c2ea00abe8907acea07413903b"
end
resource "vertconw" do
url "http://download.osgeo.org/proj/vdatum/vertcon/vertconw.gtx"
sha256 "de648c0f6e8b5ebfc4b2d82f056c7b993ca3c37373a7f6b7844fe9bd4871821b"
end
resource "egm96_15" do
url "http://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx"
sha256 "c02a6eb70a7a78efebe5adf3ade626eb75390e170bb8b3f36136a2c28f5326a0"
end
resource "egm08_25" do
url "http://download.osgeo.org/proj/vdatum/egm08_25/egm08_25.gtx"
sha256 "c18f20d1fe88616e3497a3eff993227371e1d9acc76f96253e8d84b475bbe6bf"
end
skip_clean :la
fails_with :llvm do
build 2334
end
def install
resources.each do |r|
if r.name == "datumgrid"
(buildpath/"nad").install r
elsif build.with? "vdatum"
(share/"proj").install r
end
end
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test").write <<-EOS.undent
45d15n 71d07w Boston, United States
40d40n 73d58w New York, United States
48d51n 2d20e Paris, France
51d30n 7'w London, England
EOS
match = <<-EOS.undent
-4887590.49\t7317961.48 Boston, United States
-5542524.55\t6982689.05 New York, United States
171224.94\t5415352.81 Paris, France
-8101.66\t5707500.23 London, England
EOS
assert_equal match,
`#{bin}/proj +proj=poly +ellps=clrk66 -r #{testpath}/test`
end
end
|
require 'formula'
class Qpdf < Formula
homepage 'http://qpdf.sourceforge.net/'
url 'http://downloads.sourceforge.net/project/qpdf/qpdf/5.1.0/qpdf-5.1.0.tar.gz'
sha1 'a2aafad5c49efb62e98e6895bb96ca423179bf43'
depends_on 'pcre'
def install
# find Homebrew's libpcre
ENV.append 'LDFLAGS', "-L#{Formula.factory('pcre').opt_prefix}/lib"
system "./configure", "--prefix=#{prefix}"
system "make"
system "make install"
end
test do
system "#{bin}/qpdf", "--version"
end
end
qpdf: Depend on newer 'make' on Tiger
Also, delete patches that are not relevant to Tiger or Leopard.
require 'formula'
class Qpdf < Formula
homepage 'http://qpdf.sourceforge.net/'
url 'http://downloads.sourceforge.net/project/qpdf/qpdf/5.1.0/qpdf-5.1.0.tar.gz'
sha1 'a2aafad5c49efb62e98e6895bb96ca423179bf43'
depends_on 'pcre'
depends_on 'homebrew/dupes/make' if MacOS.version == :tiger
def install
# find Homebrew's libpcre
ENV.append 'LDFLAGS', "-L#{Formula.factory('pcre').opt_prefix}/lib"
system "./configure", "--prefix=#{prefix}"
system "make"
system "make install"
end
test do
system "#{bin}/qpdf", "--version"
end
end
|
# typed: false
# frozen_string_literal: true
require "keg_relocate"
require "language/python"
require "lock_file"
require "ostruct"
require "extend/cachable"
# Installation prefix of a formula.
#
# @api private
class Keg
extend T::Sig
extend Cachable
# Error for when a keg is already linked.
class AlreadyLinkedError < RuntimeError
def initialize(keg)
super <<~EOS
Cannot link #{keg.name}
Another version is already linked: #{keg.linked_keg_record.resolved_path}
EOS
end
end
# Error for when a keg cannot be linked.
class LinkError < RuntimeError
attr_reader :keg, :src, :dst
def initialize(keg, src, dst, cause)
@src = src
@dst = dst
@keg = keg
@cause = cause
super(cause.message)
set_backtrace(cause.backtrace)
end
end
# Error for when a file already exists or belongs to another keg.
class ConflictError < LinkError
extend T::Sig
sig { returns(String) }
def suggestion
conflict = Keg.for(dst)
rescue NotAKegError, Errno::ENOENT
"already exists. You may want to remove it:\n rm '#{dst}'\n"
else
<<~EOS
is a symlink belonging to #{conflict.name}. You can unlink it:
brew unlink #{conflict.name}
EOS
end
sig { returns(String) }
def to_s
s = []
s << "Could not symlink #{src}"
s << "Target #{dst}" << suggestion
s << <<~EOS
To force the link and overwrite all conflicting files:
brew link --overwrite #{keg.name}
To list all files that would be deleted:
brew link --overwrite --dry-run #{keg.name}
EOS
s.join("\n")
end
end
# Error for when a directory is not writable.
class DirectoryNotWritableError < LinkError
extend T::Sig
sig { returns(String) }
def to_s
<<~EOS
Could not symlink #{src}
#{dst.dirname} is not writable.
EOS
end
end
# Locale-specific directories have the form `language[_territory][.codeset][@modifier]`
LOCALEDIR_RX = %r{(locale|man)/([a-z]{2}|C|POSIX)(_[A-Z]{2})?(\.[a-zA-Z\-0-9]+(@.+)?)?}.freeze
INFOFILE_RX = %r{info/([^.].*?\.info|dir)$}.freeze
KEG_LINK_DIRECTORIES = %w[
bin etc include lib sbin share var
].freeze
MUST_EXIST_SUBDIRECTORIES = (
KEG_LINK_DIRECTORIES - %w[var] + %w[
opt
var/homebrew/linked
]
).map { |dir| HOMEBREW_PREFIX/dir }.sort.uniq.freeze
# Keep relatively in sync with
# {https://github.com/Homebrew/install/blob/HEAD/install.sh}
MUST_EXIST_DIRECTORIES = (MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CELLAR,
].sort.uniq).freeze
MUST_BE_WRITABLE_DIRECTORIES = (
%w[
etc/bash_completion.d lib/pkgconfig
share/aclocal share/doc share/info share/locale share/man
share/man/man1 share/man/man2 share/man/man3 share/man/man4
share/man/man5 share/man/man6 share/man/man7 share/man/man8
share/zsh share/zsh/site-functions
var/log
].map { |dir| HOMEBREW_PREFIX/dir } + MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CACHE,
HOMEBREW_CELLAR,
HOMEBREW_LOCKS,
HOMEBREW_LOGS,
HOMEBREW_REPOSITORY,
Language::Python.homebrew_site_packages,
]
).sort.uniq.freeze
# These paths relative to the keg's share directory should always be real
# directories in the prefix, never symlinks.
SHARE_PATHS = %w[
aclocal doc info java locale man
man/man1 man/man2 man/man3 man/man4
man/man5 man/man6 man/man7 man/man8
man/cat1 man/cat2 man/cat3 man/cat4
man/cat5 man/cat6 man/cat7 man/cat8
applications gnome gnome/help icons
mime-info pixmaps sounds postgresql
].freeze
ELISP_EXTENSIONS = %w[.el .elc].freeze
PYC_EXTENSIONS = %w[.pyc .pyo].freeze
LIBTOOL_EXTENSIONS = %w[.la .lai].freeze
# Given an array of kegs, this method will try to find some other kegs
# that depend on them. If it does, it returns:
#
# - some kegs in the passed array that have installed dependents
# - some installed dependents of those kegs.
#
# If it doesn't, it returns nil.
#
# Note that nil will be returned if the only installed dependents
# in the passed kegs are other kegs in the array.
#
# For efficiency, we don't bother trying to get complete data.
def self.find_some_installed_dependents(kegs)
keg_names = kegs.select(&:optlinked?).map(&:name)
keg_formulae = []
kegs_by_source = kegs.group_by do |keg|
# First, attempt to resolve the keg to a formula
# to get up-to-date name and tap information.
f = keg.to_formula
keg_formulae << f
[f.name, f.tap]
rescue
# If the formula for the keg can't be found,
# fall back to the information in the tab.
[keg.name, keg.tab.tap]
end
all_required_kegs = Set.new
all_dependents = []
# Don't include dependencies of kegs that were in the given array.
formulae_to_check = Formula.installed - keg_formulae
formulae_to_check.each do |dependent|
required = dependent.missing_dependencies(hide: keg_names)
required_kegs = required.map do |f|
f_kegs = kegs_by_source[[f.name, f.tap]]
next unless f_kegs
f_kegs.max_by(&:version)
end.compact
next if required_kegs.empty?
all_required_kegs += required_kegs
all_dependents << dependent.to_s
end
return if all_required_kegs.empty?
return if all_dependents.empty?
[all_required_kegs.to_a, all_dependents.sort]
end
# @param path if this is a file in a keg, returns the containing {Keg} object.
def self.for(path)
original_path = path
raise Errno::ENOENT, original_path.to_s unless original_path.exist?
if (path = original_path.realpath)
until path.root?
return Keg.new(path) if path.parent.parent == HOMEBREW_CELLAR.realpath
path = path.parent.realpath # realpath() prevents root? failing
end
end
raise NotAKegError, "#{original_path} is not inside a keg"
end
def self.all
Formula.racks.flat_map(&:subdirs).map { |d| new(d) }
end
attr_reader :path, :name, :linked_keg_record, :opt_record
protected :path
extend Forwardable
def_delegators :path,
:to_s, :hash, :abv, :disk_usage, :file_count, :directory?, :exist?, :/,
:join, :rename, :find
def initialize(path)
path = path.resolved_path if path.to_s.start_with?("#{HOMEBREW_PREFIX}/opt/")
raise "#{path} is not a valid keg" if path.parent.parent.realpath != HOMEBREW_CELLAR.realpath
raise "#{path} is not a directory" unless path.directory?
@path = path
@name = path.parent.basename.to_s
@linked_keg_record = HOMEBREW_LINKED_KEGS/name
@opt_record = HOMEBREW_PREFIX/"opt/#{name}"
@require_relocation = false
end
def rack
path.parent
end
alias to_path to_s
sig { returns(String) }
def inspect
"#<#{self.class.name}:#{path}>"
end
def ==(other)
instance_of?(other.class) && path == other.path
end
alias eql? ==
sig { returns(T::Boolean) }
def empty_installation?
Pathname.glob("#{path}/*") do |file|
return false if file.directory? && !file.children.reject(&:ds_store?).empty?
basename = file.basename.to_s
next if Metafiles.copy?(basename)
next if %w[.DS_Store INSTALL_RECEIPT.json].include?(basename)
return false
end
true
end
def require_relocation?
@require_relocation
end
def linked?
linked_keg_record.symlink? &&
linked_keg_record.directory? &&
path == linked_keg_record.resolved_path
end
def remove_linked_keg_record
linked_keg_record.unlink
linked_keg_record.parent.rmdir_if_possible
end
def optlinked?
opt_record.symlink? && path == opt_record.resolved_path
end
def remove_old_aliases
opt = opt_record.parent
linkedkegs = linked_keg_record.parent
tap = begin
to_formula.tap
rescue
# If the formula can't be found, just ignore aliases for now.
nil
end
if tap
bad_tap_opt = opt/tap.user
FileUtils.rm_rf bad_tap_opt if !bad_tap_opt.symlink? && bad_tap_opt.directory?
end
aliases.each do |a|
# versioned aliases are handled below
next if a.match?(/.+@./)
alias_opt_symlink = opt/a
if alias_opt_symlink.symlink? && alias_opt_symlink.exist?
alias_opt_symlink.delete if alias_opt_symlink.realpath == opt_record.realpath
elsif alias_opt_symlink.symlink? || alias_opt_symlink.exist?
alias_opt_symlink.delete
end
alias_linkedkegs_symlink = linkedkegs/a
alias_linkedkegs_symlink.delete if alias_linkedkegs_symlink.symlink? || alias_linkedkegs_symlink.exist?
end
Pathname.glob("#{opt_record}@*").each do |a|
a = a.basename.to_s
next if aliases.include?(a)
alias_opt_symlink = opt/a
if alias_opt_symlink.symlink? && alias_opt_symlink.exist? && rack == alias_opt_symlink.realpath.parent
alias_opt_symlink.delete
end
alias_linkedkegs_symlink = linkedkegs/a
alias_linkedkegs_symlink.delete if alias_linkedkegs_symlink.symlink? || alias_linkedkegs_symlink.exist?
end
end
def remove_opt_record
opt_record.unlink
opt_record.parent.rmdir_if_possible
end
def uninstall
CacheStoreDatabase.use(:linkage) do |db|
break unless db.created?
LinkageCacheStore.new(path, db).delete!
end
path.rmtree
path.parent.rmdir_if_possible
remove_opt_record if optlinked?
remove_old_aliases
remove_oldname_opt_record
rescue Errno::EACCES, Errno::ENOTEMPTY
odie <<~EOS
Could not remove #{name} keg! Do so manually:
sudo rm -rf #{path}
EOS
end
# TODO: refactor to use keyword arguments.
def unlink(**options)
ObserverPathnameExtension.reset_counts!
dirs = []
keg_directories = KEG_LINK_DIRECTORIES.map { |d| path/d }
.select(&:exist?)
keg_directories.each do |dir|
dir.find do |src|
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend(ObserverPathnameExtension)
dirs << dst if dst.directory? && !dst.symlink?
# check whether the file to be unlinked is from the current keg first
next unless dst.symlink?
next if src != dst.resolved_path
if options[:dry_run]
puts dst
Find.prune if src.directory?
next
end
dst.uninstall_info if dst.to_s.match?(INFOFILE_RX)
dst.unlink
remove_old_aliases
Find.prune if src.directory?
end
end
unless options[:dry_run]
remove_linked_keg_record if linked?
dirs.reverse_each(&:rmdir_if_possible)
end
ObserverPathnameExtension.n
end
def lock(&block)
FormulaLock.new(name).with_lock do
if oldname_opt_record
FormulaLock.new(oldname_opt_record.basename.to_s).with_lock(&block)
else
yield
end
end
end
def completion_installed?(shell)
dir = case shell
when :bash then path/"etc/bash_completion.d"
when :zsh
dir = path/"share/zsh/site-functions"
dir if dir.directory? && dir.children.any? { |f| f.basename.to_s.start_with?("_") }
when :fish then path/"share/fish/vendor_completions.d"
end
dir&.directory? && !dir.children.empty?
end
def functions_installed?(shell)
case shell
when :fish
dir = path/"share/fish/vendor_functions.d"
dir.directory? && !dir.children.empty?
when :zsh
# Check for non completion functions (i.e. files not started with an underscore),
# since those can be checked separately
dir = path/"share/zsh/site-functions"
dir.directory? && dir.children.any? { |f| !f.basename.to_s.start_with?("_") }
end
end
sig { returns(T::Boolean) }
def plist_installed?
!Dir["#{path}/*.plist"].empty?
end
def python_site_packages_installed?
(path/"lib/python2.7/site-packages").directory?
end
sig { returns(T::Boolean) }
def python_pth_files_installed?
!Dir["#{path}/lib/python2.7/site-packages/*.pth"].empty?
end
sig { returns(T::Array[Pathname]) }
def apps
app_prefix = optlinked? ? opt_record : path
Pathname.glob("#{app_prefix}/{,libexec/}*.app")
end
def elisp_installed?
return false unless (path/"share/emacs/site-lisp"/name).exist?
(path/"share/emacs/site-lisp"/name).children.any? { |f| ELISP_EXTENSIONS.include? f.extname }
end
def version
require "pkg_version"
PkgVersion.parse(path.basename.to_s)
end
def to_formula
Formulary.from_keg(self)
end
def oldname_opt_record
@oldname_opt_record ||= if (opt_dir = HOMEBREW_PREFIX/"opt").directory?
opt_dir.subdirs.find do |dir|
dir.symlink? && dir != opt_record && path.parent == dir.resolved_path.parent
end
end
end
# TODO: refactor to use keyword arguments.
def link(**options)
raise AlreadyLinkedError, self if linked_keg_record.directory?
ObserverPathnameExtension.reset_counts!
optlink(**options) unless options[:dry_run]
# yeah indeed, you have to force anything you need in the main tree into
# these dirs REMEMBER that *NOT* everything needs to be in the main tree
link_dir("etc", **options) { :mkpath }
link_dir("bin", **options) { :skip_dir }
link_dir("sbin", **options) { :skip_dir }
link_dir("include", **options) { :link }
link_dir("share", **options) do |relative_path|
case relative_path.to_s
when INFOFILE_RX then :info
when "locale/locale.alias",
%r{^icons/.*/icon-theme\.cache$}
:skip_file
when LOCALEDIR_RX,
%r{^icons/}, # all icons subfolders should also mkpath
/^zsh/,
/^fish/,
%r{^lua/}, # Lua, Lua51, Lua53 all need the same handling.
%r{^guile/},
*SHARE_PATHS
:mkpath
else
:link
end
end
link_dir("lib", **options) do |relative_path|
case relative_path.to_s
when "charset.alias"
:skip_file
when "pkgconfig", # pkg-config database gets explicitly created
"cmake", # cmake database gets explicitly created
"dtrace", # lib/language folders also get explicitly created
/^gdk-pixbuf/,
"ghc",
/^gio/,
"lua",
/^mecab/,
/^node/,
/^ocaml/,
/^perl5/,
"php",
/^python[23]\.\d/,
/^R/,
/^ruby/
:mkpath
else
# Everything else is symlinked to the cellar
:link
end
end
link_dir("Frameworks", **options) do |relative_path|
# Frameworks contain symlinks pointing into a subdir, so we have to use
# the :link strategy. However, for Foo.framework and
# Foo.framework/Versions we have to use :mkpath so that multiple formulae
# can link their versions into it and `brew [un]link` works.
if relative_path.to_s.match?(%r{[^/]*\.framework(/Versions)?$})
:mkpath
else
:link
end
end
make_relative_symlink(linked_keg_record, path, **options) unless options[:dry_run]
rescue LinkError
unlink(verbose: options[:verbose])
raise
else
ObserverPathnameExtension.n
end
def remove_oldname_opt_record
return unless oldname_opt_record
return if oldname_opt_record.resolved_path != path
@oldname_opt_record.unlink
@oldname_opt_record.parent.rmdir_if_possible
@oldname_opt_record = nil
end
def tab
Tab.for_keg(self)
end
def runtime_dependencies
Keg.cache[:runtime_dependencies] ||= {}
Keg.cache[:runtime_dependencies][path] ||= tab.runtime_dependencies
end
def aliases
tab.aliases || []
end
def optlink(**options)
opt_record.delete if opt_record.symlink? || opt_record.exist?
make_relative_symlink(opt_record, path, **options)
aliases.each do |a|
alias_opt_record = opt_record.parent/a
alias_opt_record.delete if alias_opt_record.symlink? || alias_opt_record.exist?
make_relative_symlink(alias_opt_record, path, **options)
end
return unless oldname_opt_record
oldname_opt_record.delete
make_relative_symlink(oldname_opt_record, path, **options)
end
def delete_pyc_files!
find { |pn| pn.delete if PYC_EXTENSIONS.include?(pn.extname) }
find { |pn| FileUtils.rm_rf pn if pn.basename.to_s == "__pycache__" }
end
private
def resolve_any_conflicts(dst, **options)
return unless dst.symlink?
src = dst.resolved_path
# src itself may be a symlink, so check lstat to ensure we are dealing with
# a directory, and not a symlink pointing at a directory (which needs to be
# treated as a file). In other words, we only want to resolve one symlink.
begin
stat = src.lstat
rescue Errno::ENOENT
# dst is a broken symlink, so remove it.
dst.unlink unless options[:dry_run]
return
end
return unless stat.directory?
begin
keg = Keg.for(src)
rescue NotAKegError
puts "Won't resolve conflicts for symlink #{dst} as it doesn't resolve into the Cellar" if options[:verbose]
return
end
dst.unlink unless options[:dry_run]
keg.link_dir(src, **options) { :mkpath }
true
end
def make_relative_symlink(dst, src, **options)
if dst.symlink? && src == dst.resolved_path
puts "Skipping; link already exists: #{dst}" if options[:verbose]
return
end
# cf. git-clean -n: list files to delete, don't really link or delete
if options[:dry_run] && options[:overwrite]
if dst.symlink?
puts "#{dst} -> #{dst.resolved_path}"
elsif dst.exist?
puts dst
end
return
end
# list all link targets
if options[:dry_run]
puts dst
return
end
dst.delete if options[:overwrite] && (dst.exist? || dst.symlink?)
dst.make_relative_symlink(src)
rescue Errno::EEXIST => e
# Retry if we're linking a different version of the same
# formula. The `AlreadyLinkedError` above won't catch
# this if a formula is missing an optlink. In that case,
# delete the symlink and retry.
if dst.symlink? && Keg.for(dst).name == name
dst.unlink
retry
end
raise ConflictError.new(self, src.relative_path_from(path), dst, e) if dst.exist?
if dst.symlink?
dst.unlink
retry
end
rescue Errno::EACCES => e
raise DirectoryNotWritableError.new(self, src.relative_path_from(path), dst, e)
rescue SystemCallError => e
raise LinkError.new(self, src.relative_path_from(path), dst, e)
end
protected
# symlinks the contents of path+relative_dir recursively into #{HOMEBREW_PREFIX}/relative_dir
def link_dir(relative_dir, **options)
root = path/relative_dir
return unless root.exist?
root.find do |src|
next if src == root
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend ObserverPathnameExtension
if src.symlink? || src.file?
Find.prune if File.basename(src) == ".DS_Store"
Find.prune if src.resolved_path == dst
# Don't link pyc or pyo files because Python overwrites these
# cached object files and next time brew wants to link, the
# file is in the way.
Find.prune if PYC_EXTENSIONS.include?(src.extname) && src.to_s.include?("/site-packages/")
case yield src.relative_path_from(root)
when :skip_file, nil
Find.prune
when :info
next if File.basename(src) == "dir" # skip historical local 'dir' files
make_relative_symlink dst, src, **options
dst.install_info
else
make_relative_symlink dst, src, **options
end
elsif src.directory?
# if the dst dir already exists, then great! walk the rest of the tree tho
next if dst.directory? && !dst.symlink?
# no need to put .app bundles in the path, the user can just use
# spotlight, or the open command and actual mac apps use an equivalent
Find.prune if src.extname == ".app"
case yield src.relative_path_from(root)
when :skip_dir
Find.prune
when :mkpath
dst.mkpath unless resolve_any_conflicts(dst, **options)
else
unless resolve_any_conflicts(dst, **options)
make_relative_symlink dst, src, **options
Find.prune
end
end
end
end
end
end
require "extend/os/keg"
keg: remove trailing whitespace.
# typed: false
# frozen_string_literal: true
require "keg_relocate"
require "language/python"
require "lock_file"
require "ostruct"
require "extend/cachable"
# Installation prefix of a formula.
#
# @api private
class Keg
extend T::Sig
extend Cachable
# Error for when a keg is already linked.
class AlreadyLinkedError < RuntimeError
def initialize(keg)
super <<~EOS
Cannot link #{keg.name}
Another version is already linked: #{keg.linked_keg_record.resolved_path}
EOS
end
end
# Error for when a keg cannot be linked.
class LinkError < RuntimeError
attr_reader :keg, :src, :dst
def initialize(keg, src, dst, cause)
@src = src
@dst = dst
@keg = keg
@cause = cause
super(cause.message)
set_backtrace(cause.backtrace)
end
end
# Error for when a file already exists or belongs to another keg.
class ConflictError < LinkError
extend T::Sig
sig { returns(String) }
def suggestion
conflict = Keg.for(dst)
rescue NotAKegError, Errno::ENOENT
"already exists. You may want to remove it:\n rm '#{dst}'\n"
else
<<~EOS
is a symlink belonging to #{conflict.name}. You can unlink it:
brew unlink #{conflict.name}
EOS
end
sig { returns(String) }
def to_s
s = []
s << "Could not symlink #{src}"
s << "Target #{dst}" << suggestion
s << <<~EOS
To force the link and overwrite all conflicting files:
brew link --overwrite #{keg.name}
To list all files that would be deleted:
brew link --overwrite --dry-run #{keg.name}
EOS
s.join("\n")
end
end
# Error for when a directory is not writable.
class DirectoryNotWritableError < LinkError
extend T::Sig
sig { returns(String) }
def to_s
<<~EOS
Could not symlink #{src}
#{dst.dirname} is not writable.
EOS
end
end
# Locale-specific directories have the form `language[_territory][.codeset][@modifier]`
LOCALEDIR_RX = %r{(locale|man)/([a-z]{2}|C|POSIX)(_[A-Z]{2})?(\.[a-zA-Z\-0-9]+(@.+)?)?}.freeze
INFOFILE_RX = %r{info/([^.].*?\.info|dir)$}.freeze
KEG_LINK_DIRECTORIES = %w[
bin etc include lib sbin share var
].freeze
MUST_EXIST_SUBDIRECTORIES = (
KEG_LINK_DIRECTORIES - %w[var] + %w[
opt
var/homebrew/linked
]
).map { |dir| HOMEBREW_PREFIX/dir }.sort.uniq.freeze
# Keep relatively in sync with
# {https://github.com/Homebrew/install/blob/HEAD/install.sh}
MUST_EXIST_DIRECTORIES = (MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CELLAR,
].sort.uniq).freeze
MUST_BE_WRITABLE_DIRECTORIES = (
%w[
etc/bash_completion.d lib/pkgconfig
share/aclocal share/doc share/info share/locale share/man
share/man/man1 share/man/man2 share/man/man3 share/man/man4
share/man/man5 share/man/man6 share/man/man7 share/man/man8
share/zsh share/zsh/site-functions
var/log
].map { |dir| HOMEBREW_PREFIX/dir } + MUST_EXIST_SUBDIRECTORIES + [
HOMEBREW_CACHE,
HOMEBREW_CELLAR,
HOMEBREW_LOCKS,
HOMEBREW_LOGS,
HOMEBREW_REPOSITORY,
Language::Python.homebrew_site_packages,
]
).sort.uniq.freeze
# These paths relative to the keg's share directory should always be real
# directories in the prefix, never symlinks.
SHARE_PATHS = %w[
aclocal doc info java locale man
man/man1 man/man2 man/man3 man/man4
man/man5 man/man6 man/man7 man/man8
man/cat1 man/cat2 man/cat3 man/cat4
man/cat5 man/cat6 man/cat7 man/cat8
applications gnome gnome/help icons
mime-info pixmaps sounds postgresql
].freeze
ELISP_EXTENSIONS = %w[.el .elc].freeze
PYC_EXTENSIONS = %w[.pyc .pyo].freeze
LIBTOOL_EXTENSIONS = %w[.la .lai].freeze
# Given an array of kegs, this method will try to find some other kegs
# that depend on them. If it does, it returns:
#
# - some kegs in the passed array that have installed dependents
# - some installed dependents of those kegs.
#
# If it doesn't, it returns nil.
#
# Note that nil will be returned if the only installed dependents
# in the passed kegs are other kegs in the array.
#
# For efficiency, we don't bother trying to get complete data.
def self.find_some_installed_dependents(kegs)
keg_names = kegs.select(&:optlinked?).map(&:name)
keg_formulae = []
kegs_by_source = kegs.group_by do |keg|
# First, attempt to resolve the keg to a formula
# to get up-to-date name and tap information.
f = keg.to_formula
keg_formulae << f
[f.name, f.tap]
rescue
# If the formula for the keg can't be found,
# fall back to the information in the tab.
[keg.name, keg.tab.tap]
end
all_required_kegs = Set.new
all_dependents = []
# Don't include dependencies of kegs that were in the given array.
formulae_to_check = Formula.installed - keg_formulae
formulae_to_check.each do |dependent|
required = dependent.missing_dependencies(hide: keg_names)
required_kegs = required.map do |f|
f_kegs = kegs_by_source[[f.name, f.tap]]
next unless f_kegs
f_kegs.max_by(&:version)
end.compact
next if required_kegs.empty?
all_required_kegs += required_kegs
all_dependents << dependent.to_s
end
return if all_required_kegs.empty?
return if all_dependents.empty?
[all_required_kegs.to_a, all_dependents.sort]
end
# @param path if this is a file in a keg, returns the containing {Keg} object.
def self.for(path)
original_path = path
raise Errno::ENOENT, original_path.to_s unless original_path.exist?
if (path = original_path.realpath)
until path.root?
return Keg.new(path) if path.parent.parent == HOMEBREW_CELLAR.realpath
path = path.parent.realpath # realpath() prevents root? failing
end
end
raise NotAKegError, "#{original_path} is not inside a keg"
end
def self.all
Formula.racks.flat_map(&:subdirs).map { |d| new(d) }
end
attr_reader :path, :name, :linked_keg_record, :opt_record
protected :path
extend Forwardable
def_delegators :path,
:to_s, :hash, :abv, :disk_usage, :file_count, :directory?, :exist?, :/,
:join, :rename, :find
def initialize(path)
path = path.resolved_path if path.to_s.start_with?("#{HOMEBREW_PREFIX}/opt/")
raise "#{path} is not a valid keg" if path.parent.parent.realpath != HOMEBREW_CELLAR.realpath
raise "#{path} is not a directory" unless path.directory?
@path = path
@name = path.parent.basename.to_s
@linked_keg_record = HOMEBREW_LINKED_KEGS/name
@opt_record = HOMEBREW_PREFIX/"opt/#{name}"
@require_relocation = false
end
def rack
path.parent
end
alias to_path to_s
sig { returns(String) }
def inspect
"#<#{self.class.name}:#{path}>"
end
def ==(other)
instance_of?(other.class) && path == other.path
end
alias eql? ==
sig { returns(T::Boolean) }
def empty_installation?
Pathname.glob("#{path}/*") do |file|
return false if file.directory? && !file.children.reject(&:ds_store?).empty?
basename = file.basename.to_s
next if Metafiles.copy?(basename)
next if %w[.DS_Store INSTALL_RECEIPT.json].include?(basename)
return false
end
true
end
def require_relocation?
@require_relocation
end
def linked?
linked_keg_record.symlink? &&
linked_keg_record.directory? &&
path == linked_keg_record.resolved_path
end
def remove_linked_keg_record
linked_keg_record.unlink
linked_keg_record.parent.rmdir_if_possible
end
def optlinked?
opt_record.symlink? && path == opt_record.resolved_path
end
def remove_old_aliases
opt = opt_record.parent
linkedkegs = linked_keg_record.parent
tap = begin
to_formula.tap
rescue
# If the formula can't be found, just ignore aliases for now.
nil
end
if tap
bad_tap_opt = opt/tap.user
FileUtils.rm_rf bad_tap_opt if !bad_tap_opt.symlink? && bad_tap_opt.directory?
end
aliases.each do |a|
# versioned aliases are handled below
next if a.match?(/.+@./)
alias_opt_symlink = opt/a
if alias_opt_symlink.symlink? && alias_opt_symlink.exist?
alias_opt_symlink.delete if alias_opt_symlink.realpath == opt_record.realpath
elsif alias_opt_symlink.symlink? || alias_opt_symlink.exist?
alias_opt_symlink.delete
end
alias_linkedkegs_symlink = linkedkegs/a
alias_linkedkegs_symlink.delete if alias_linkedkegs_symlink.symlink? || alias_linkedkegs_symlink.exist?
end
Pathname.glob("#{opt_record}@*").each do |a|
a = a.basename.to_s
next if aliases.include?(a)
alias_opt_symlink = opt/a
if alias_opt_symlink.symlink? && alias_opt_symlink.exist? && rack == alias_opt_symlink.realpath.parent
alias_opt_symlink.delete
end
alias_linkedkegs_symlink = linkedkegs/a
alias_linkedkegs_symlink.delete if alias_linkedkegs_symlink.symlink? || alias_linkedkegs_symlink.exist?
end
end
def remove_opt_record
opt_record.unlink
opt_record.parent.rmdir_if_possible
end
def uninstall
CacheStoreDatabase.use(:linkage) do |db|
break unless db.created?
LinkageCacheStore.new(path, db).delete!
end
path.rmtree
path.parent.rmdir_if_possible
remove_opt_record if optlinked?
remove_old_aliases
remove_oldname_opt_record
rescue Errno::EACCES, Errno::ENOTEMPTY
odie <<~EOS
Could not remove #{name} keg! Do so manually:
sudo rm -rf #{path}
EOS
end
# TODO: refactor to use keyword arguments.
def unlink(**options)
ObserverPathnameExtension.reset_counts!
dirs = []
keg_directories = KEG_LINK_DIRECTORIES.map { |d| path/d }
.select(&:exist?)
keg_directories.each do |dir|
dir.find do |src|
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend(ObserverPathnameExtension)
dirs << dst if dst.directory? && !dst.symlink?
# check whether the file to be unlinked is from the current keg first
next unless dst.symlink?
next if src != dst.resolved_path
if options[:dry_run]
puts dst
Find.prune if src.directory?
next
end
dst.uninstall_info if dst.to_s.match?(INFOFILE_RX)
dst.unlink
remove_old_aliases
Find.prune if src.directory?
end
end
unless options[:dry_run]
remove_linked_keg_record if linked?
dirs.reverse_each(&:rmdir_if_possible)
end
ObserverPathnameExtension.n
end
def lock(&block)
FormulaLock.new(name).with_lock do
if oldname_opt_record
FormulaLock.new(oldname_opt_record.basename.to_s).with_lock(&block)
else
yield
end
end
end
def completion_installed?(shell)
dir = case shell
when :bash then path/"etc/bash_completion.d"
when :zsh
dir = path/"share/zsh/site-functions"
dir if dir.directory? && dir.children.any? { |f| f.basename.to_s.start_with?("_") }
when :fish then path/"share/fish/vendor_completions.d"
end
dir&.directory? && !dir.children.empty?
end
def functions_installed?(shell)
case shell
when :fish
dir = path/"share/fish/vendor_functions.d"
dir.directory? && !dir.children.empty?
when :zsh
# Check for non completion functions (i.e. files not started with an underscore),
# since those can be checked separately
dir = path/"share/zsh/site-functions"
dir.directory? && dir.children.any? { |f| !f.basename.to_s.start_with?("_") }
end
end
sig { returns(T::Boolean) }
def plist_installed?
!Dir["#{path}/*.plist"].empty?
end
def python_site_packages_installed?
(path/"lib/python2.7/site-packages").directory?
end
sig { returns(T::Boolean) }
def python_pth_files_installed?
!Dir["#{path}/lib/python2.7/site-packages/*.pth"].empty?
end
sig { returns(T::Array[Pathname]) }
def apps
app_prefix = optlinked? ? opt_record : path
Pathname.glob("#{app_prefix}/{,libexec/}*.app")
end
def elisp_installed?
return false unless (path/"share/emacs/site-lisp"/name).exist?
(path/"share/emacs/site-lisp"/name).children.any? { |f| ELISP_EXTENSIONS.include? f.extname }
end
def version
require "pkg_version"
PkgVersion.parse(path.basename.to_s)
end
def to_formula
Formulary.from_keg(self)
end
def oldname_opt_record
@oldname_opt_record ||= if (opt_dir = HOMEBREW_PREFIX/"opt").directory?
opt_dir.subdirs.find do |dir|
dir.symlink? && dir != opt_record && path.parent == dir.resolved_path.parent
end
end
end
# TODO: refactor to use keyword arguments.
def link(**options)
raise AlreadyLinkedError, self if linked_keg_record.directory?
ObserverPathnameExtension.reset_counts!
optlink(**options) unless options[:dry_run]
# yeah indeed, you have to force anything you need in the main tree into
# these dirs REMEMBER that *NOT* everything needs to be in the main tree
link_dir("etc", **options) { :mkpath }
link_dir("bin", **options) { :skip_dir }
link_dir("sbin", **options) { :skip_dir }
link_dir("include", **options) { :link }
link_dir("share", **options) do |relative_path|
case relative_path.to_s
when INFOFILE_RX then :info
when "locale/locale.alias",
%r{^icons/.*/icon-theme\.cache$}
:skip_file
when LOCALEDIR_RX,
%r{^icons/}, # all icons subfolders should also mkpath
/^zsh/,
/^fish/,
%r{^lua/}, # Lua, Lua51, Lua53 all need the same handling.
%r{^guile/},
*SHARE_PATHS
:mkpath
else
:link
end
end
link_dir("lib", **options) do |relative_path|
case relative_path.to_s
when "charset.alias"
:skip_file
when "pkgconfig", # pkg-config database gets explicitly created
"cmake", # cmake database gets explicitly created
"dtrace", # lib/language folders also get explicitly created
/^gdk-pixbuf/,
"ghc",
/^gio/,
"lua",
/^mecab/,
/^node/,
/^ocaml/,
/^perl5/,
"php",
/^python[23]\.\d/,
/^R/,
/^ruby/
:mkpath
else
# Everything else is symlinked to the cellar
:link
end
end
link_dir("Frameworks", **options) do |relative_path|
# Frameworks contain symlinks pointing into a subdir, so we have to use
# the :link strategy. However, for Foo.framework and
# Foo.framework/Versions we have to use :mkpath so that multiple formulae
# can link their versions into it and `brew [un]link` works.
if relative_path.to_s.match?(%r{[^/]*\.framework(/Versions)?$})
:mkpath
else
:link
end
end
make_relative_symlink(linked_keg_record, path, **options) unless options[:dry_run]
rescue LinkError
unlink(verbose: options[:verbose])
raise
else
ObserverPathnameExtension.n
end
def remove_oldname_opt_record
return unless oldname_opt_record
return if oldname_opt_record.resolved_path != path
@oldname_opt_record.unlink
@oldname_opt_record.parent.rmdir_if_possible
@oldname_opt_record = nil
end
def tab
Tab.for_keg(self)
end
def runtime_dependencies
Keg.cache[:runtime_dependencies] ||= {}
Keg.cache[:runtime_dependencies][path] ||= tab.runtime_dependencies
end
def aliases
tab.aliases || []
end
def optlink(**options)
opt_record.delete if opt_record.symlink? || opt_record.exist?
make_relative_symlink(opt_record, path, **options)
aliases.each do |a|
alias_opt_record = opt_record.parent/a
alias_opt_record.delete if alias_opt_record.symlink? || alias_opt_record.exist?
make_relative_symlink(alias_opt_record, path, **options)
end
return unless oldname_opt_record
oldname_opt_record.delete
make_relative_symlink(oldname_opt_record, path, **options)
end
def delete_pyc_files!
find { |pn| pn.delete if PYC_EXTENSIONS.include?(pn.extname) }
find { |pn| FileUtils.rm_rf pn if pn.basename.to_s == "__pycache__" }
end
private
def resolve_any_conflicts(dst, **options)
return unless dst.symlink?
src = dst.resolved_path
# src itself may be a symlink, so check lstat to ensure we are dealing with
# a directory, and not a symlink pointing at a directory (which needs to be
# treated as a file). In other words, we only want to resolve one symlink.
begin
stat = src.lstat
rescue Errno::ENOENT
# dst is a broken symlink, so remove it.
dst.unlink unless options[:dry_run]
return
end
return unless stat.directory?
begin
keg = Keg.for(src)
rescue NotAKegError
puts "Won't resolve conflicts for symlink #{dst} as it doesn't resolve into the Cellar" if options[:verbose]
return
end
dst.unlink unless options[:dry_run]
keg.link_dir(src, **options) { :mkpath }
true
end
def make_relative_symlink(dst, src, **options)
if dst.symlink? && src == dst.resolved_path
puts "Skipping; link already exists: #{dst}" if options[:verbose]
return
end
# cf. git-clean -n: list files to delete, don't really link or delete
if options[:dry_run] && options[:overwrite]
if dst.symlink?
puts "#{dst} -> #{dst.resolved_path}"
elsif dst.exist?
puts dst
end
return
end
# list all link targets
if options[:dry_run]
puts dst
return
end
dst.delete if options[:overwrite] && (dst.exist? || dst.symlink?)
dst.make_relative_symlink(src)
rescue Errno::EEXIST => e
# Retry if we're linking a different version of the same
# formula. The `AlreadyLinkedError` above won't catch
# this if a formula is missing an optlink. In that case,
# delete the symlink and retry.
if dst.symlink? && Keg.for(dst).name == name
dst.unlink
retry
end
raise ConflictError.new(self, src.relative_path_from(path), dst, e) if dst.exist?
if dst.symlink?
dst.unlink
retry
end
rescue Errno::EACCES => e
raise DirectoryNotWritableError.new(self, src.relative_path_from(path), dst, e)
rescue SystemCallError => e
raise LinkError.new(self, src.relative_path_from(path), dst, e)
end
protected
# symlinks the contents of path+relative_dir recursively into #{HOMEBREW_PREFIX}/relative_dir
def link_dir(relative_dir, **options)
root = path/relative_dir
return unless root.exist?
root.find do |src|
next if src == root
dst = HOMEBREW_PREFIX + src.relative_path_from(path)
dst.extend ObserverPathnameExtension
if src.symlink? || src.file?
Find.prune if File.basename(src) == ".DS_Store"
Find.prune if src.resolved_path == dst
# Don't link pyc or pyo files because Python overwrites these
# cached object files and next time brew wants to link, the
# file is in the way.
Find.prune if PYC_EXTENSIONS.include?(src.extname) && src.to_s.include?("/site-packages/")
case yield src.relative_path_from(root)
when :skip_file, nil
Find.prune
when :info
next if File.basename(src) == "dir" # skip historical local 'dir' files
make_relative_symlink dst, src, **options
dst.install_info
else
make_relative_symlink dst, src, **options
end
elsif src.directory?
# if the dst dir already exists, then great! walk the rest of the tree tho
next if dst.directory? && !dst.symlink?
# no need to put .app bundles in the path, the user can just use
# spotlight, or the open command and actual mac apps use an equivalent
Find.prune if src.extname == ".app"
case yield src.relative_path_from(root)
when :skip_dir
Find.prune
when :mkpath
dst.mkpath unless resolve_any_conflicts(dst, **options)
else
unless resolve_any_conflicts(dst, **options)
make_relative_symlink dst, src, **options
Find.prune
end
end
end
end
end
end
require "extend/os/keg"
|
require "extend/cachable"
require "readall"
# a {Tap} is used to extend the formulae provided by Homebrew core.
# Usually, it's synced with a remote git repository. And it's likely
# a GitHub repository with the name of `user/homebrew-repo`. In such
# case, `user/repo` will be used as the {#name} of this {Tap}, where
# {#user} represents GitHub username and {#repo} represents repository
# name without leading `homebrew-`.
class Tap
extend Cachable
TAP_DIRECTORY = HOMEBREW_LIBRARY/"Taps"
def self.fetch(*args)
case args.length
when 1
user, repo = args.first.split("/", 2)
when 2
user = args.first
repo = args.second
end
if [user, repo].any? { |part| part.nil? || part.include?("/") }
raise "Invalid tap name '#{args.join("/")}'"
end
# We special case homebrew and linuxbrew so that users don't have to shift in a terminal.
user = user.capitalize if ["homebrew", "linuxbrew"].include? user
repo = repo.delete_prefix "homebrew-"
if ["Homebrew", "Linuxbrew"].include?(user) && ["core", "homebrew"].include?(repo)
return CoreTap.instance
end
cache_key = "#{user}/#{repo}".downcase
cache.fetch(cache_key) { |key| cache[key] = Tap.new(user, repo) }
end
def self.from_path(path)
match = File.expand_path(path).match(HOMEBREW_TAP_PATH_REGEX)
raise "Invalid tap path '#{path}'" unless match
fetch(match[:user], match[:repo])
rescue
# No need to error as a nil tap is sufficient to show failure.
nil
end
def self.default_cask_tap
@default_cask_tap ||= fetch("Homebrew", "cask")
end
extend Enumerable
# The user name of this {Tap}. Usually, it's the GitHub username of
# this #{Tap}'s remote repository.
attr_reader :user
# The repository name of this {Tap} without leading `homebrew-`.
attr_reader :repo
# The name of this {Tap}. It combines {#user} and {#repo} with a slash.
# {#name} is always in lowercase.
# e.g. `user/repo`
attr_reader :name
# The full name of this {Tap}, including the `homebrew-` prefix.
# It combines {#user} and 'homebrew-'-prefixed {#repo} with a slash.
# e.g. `user/homebrew-repo`
attr_reader :full_name
# The local path to this {Tap}.
# e.g. `/usr/local/Library/Taps/user/homebrew-repo`
attr_reader :path
# @private
def initialize(user, repo)
@user = user
@repo = repo
@name = "#{@user}/#{@repo}".downcase
@full_name = "#{@user}/homebrew-#{@repo}"
@path = TAP_DIRECTORY/@full_name.downcase
@path.extend(GitRepositoryExtension)
@alias_table = nil
@alias_reverse_table = nil
end
# clear internal cache
def clear_cache
@remote = nil
@repo_var = nil
@formula_dir = nil
@cask_dir = nil
@command_dir = nil
@formula_files = nil
@alias_dir = nil
@alias_files = nil
@aliases = nil
@alias_table = nil
@alias_reverse_table = nil
@command_files = nil
@formula_renames = nil
@tap_migrations = nil
@config = nil
remove_instance_variable(:@private) if instance_variable_defined?(:@private)
end
# The remote path to this {Tap}.
# e.g. `https://github.com/user/homebrew-repo`
def remote
raise TapUnavailableError, name unless installed?
@remote ||= path.git_origin
end
# The default remote path to this {Tap}.
def default_remote
"https://github.com/#{full_name}"
end
def repo_var
@repo_var ||= path.to_s
.delete_prefix(TAP_DIRECTORY.to_s)
.tr("^A-Za-z0-9", "_")
.upcase
end
# True if this {Tap} is a git repository.
def git?
path.git?
end
# git branch for this {Tap}.
def git_branch
raise TapUnavailableError, name unless installed?
path.git_branch
end
# git HEAD for this {Tap}.
def git_head
raise TapUnavailableError, name unless installed?
path.git_head
end
# git HEAD in short format for this {Tap}.
def git_short_head
raise TapUnavailableError, name unless installed?
path.git_short_head
end
# time since git last commit for this {Tap}.
def git_last_commit
raise TapUnavailableError, name unless installed?
path.git_last_commit
end
# git last commit date for this {Tap}.
def git_last_commit_date
raise TapUnavailableError, name unless installed?
path.git_last_commit_date
end
# The issues URL of this {Tap}.
# e.g. `https://github.com/user/homebrew-repo/issues`
def issues_url
return unless official? || !custom_remote?
"#{default_remote}/issues"
end
def to_s
name
end
def version_string
return "N/A" unless installed?
pretty_revision = git_short_head
return "(no git repository)" unless pretty_revision
"(git revision #{pretty_revision}; last commit #{git_last_commit_date})"
end
# True if this {Tap} is an official Homebrew tap.
def official?
user == "Homebrew"
end
# True if the remote of this {Tap} is a private repository.
def private?
return @private if instance_variable_defined?(:@private)
@private = read_or_set_private_config
end
# {TapConfig} of this {Tap}
def config
@config ||= begin
raise TapUnavailableError, name unless installed?
TapConfig.new(self)
end
end
# True if this {Tap} has been installed.
def installed?
path.directory?
end
# True if this {Tap} is not a full clone.
def shallow?
(path/".git/shallow").exist?
end
# @private
def core_tap?
false
end
# install this {Tap}.
#
# @param [Hash] options
# @option options [String] :clone_target If passed, it will be used as the clone remote.
# @option options [Boolean, nil] :force_auto_update If present, whether to override the
# logic that skips non-GitHub repositories during auto-updates.
# @option options [Boolean] :full_clone If set as true, full clone will be used.
# @option options [Boolean] :quiet If set, suppress all output.
def install(options = {})
require "descriptions"
full_clone = options.fetch(:full_clone, false)
quiet = options.fetch(:quiet, false)
requested_remote = options[:clone_target] || default_remote
# if :force_auto_update is unset, use nil, meaning "no change"
force_auto_update = options.fetch(:force_auto_update, nil)
if official? && DEPRECATED_OFFICIAL_TAPS.include?(repo)
odie "#{name} was deprecated. This tap is now empty as all its formulae were migrated."
end
if installed? && force_auto_update.nil?
raise TapAlreadyTappedError, name unless full_clone
raise TapAlreadyUnshallowError, name unless shallow?
end
# ensure git is installed
Utils.ensure_git_installed!
if installed?
unless force_auto_update.nil?
config["forceautoupdate"] = force_auto_update
return if !full_clone || !shallow?
end
if options[:clone_target] && requested_remote != remote
raise TapRemoteMismatchError.new(name, @remote, requested_remote)
end
ohai "Unshallowing #{name}" unless quiet
args = %w[fetch --unshallow]
args << "-q" if quiet
path.cd { safe_system "git", *args }
return
end
clear_cache
ohai "Tapping #{name}" unless quiet
args = %W[clone #{requested_remote} #{path}]
args << "--depth=1" unless full_clone
args << "-q" if quiet
begin
safe_system "git", *args
unless Readall.valid_tap?(self, aliases: true)
unless ARGV.homebrew_developer?
raise "Cannot tap #{name}: invalid syntax in tap!"
end
end
rescue Interrupt, RuntimeError
ignore_interrupts do
# wait for git to possibly cleanup the top directory when interrupt happens.
sleep 0.1
FileUtils.rm_rf path
path.parent.rmdir_if_possible
end
raise
end
config["forceautoupdate"] = force_auto_update unless force_auto_update.nil?
link_completions_and_manpages
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
puts "Tapped#{formatted_contents} (#{path.abv})." unless quiet
Descriptions.cache_formulae(formula_names)
return if options[:clone_target]
return unless private?
return if quiet
puts <<~EOS
It looks like you tapped a private repository. To avoid entering your
credentials each time you update, you can use git HTTP credential
caching or issue the following command:
cd #{path}
git remote set-url origin git@github.com:#{full_name}.git
EOS
end
def link_completions_and_manpages
command = "brew tap --repair"
Utils::Link.link_manpages(path, command)
Utils::Link.link_completions(path, command)
end
# uninstall this {Tap}.
def uninstall
require "descriptions"
raise TapUnavailableError, name unless installed?
puts "Untapping #{name}..."
abv = path.abv
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
unpin if pinned?
Descriptions.uncache_formulae(formula_names)
Utils::Link.unlink_manpages(path)
Utils::Link.unlink_completions(path)
path.rmtree
path.parent.rmdir_if_possible
puts "Untapped#{formatted_contents} (#{abv})."
clear_cache
end
# True if the {#remote} of {Tap} is customized.
def custom_remote?
return true unless remote
remote.casecmp(default_remote).nonzero?
end
# path to the directory of all {Formula} files for this {Tap}.
def formula_dir
@formula_dir ||= potential_formula_dirs.find(&:directory?) || path/"Formula"
end
def potential_formula_dirs
@potential_formula_dirs ||= [path/"Formula", path/"HomebrewFormula", path].freeze
end
# path to the directory of all {Cask} files for this {Tap}.
def cask_dir
@cask_dir ||= path/"Casks"
end
def contents
contents = []
if (command_count = command_files.count).positive?
contents << "#{command_count} #{"command".pluralize(command_count)}"
end
if (cask_count = cask_files.count).positive?
contents << "#{cask_count} #{"cask".pluralize(cask_count)}"
end
if (formula_count = formula_files.count).positive?
contents << "#{formula_count} #{"formula".pluralize(formula_count)}"
end
contents
end
# an array of all {Formula} files of this {Tap}.
def formula_files
@formula_files ||= if formula_dir.directory?
formula_dir.children.select(&method(:formula_file?))
else
[]
end
end
# an array of all {Cask} files of this {Tap}.
def cask_files
@cask_files ||= if cask_dir.directory?
cask_dir.children.select(&method(:cask_file?))
else
[]
end
end
# return true if given path would present a {Formula} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def formula_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.extname == ".rb" && file.parent == formula_dir
end
# return true if given path would present a {Cask} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def cask_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.extname == ".rb" && file.parent == cask_dir
end
# an array of all {Formula} names of this {Tap}.
def formula_names
@formula_names ||= formula_files.map { |f| formula_file_to_name(f) }
end
# path to the directory of all alias files for this {Tap}.
# @private
def alias_dir
@alias_dir ||= path/"Aliases"
end
# an array of all alias files of this {Tap}.
# @private
def alias_files
@alias_files ||= Pathname.glob("#{alias_dir}/*").select(&:file?)
end
# an array of all aliases of this {Tap}.
# @private
def aliases
@aliases ||= alias_files.map { |f| alias_file_to_name(f) }
end
# a table mapping alias to formula name
# @private
def alias_table
return @alias_table if @alias_table
@alias_table = {}
alias_files.each do |alias_file|
@alias_table[alias_file_to_name(alias_file)] = formula_file_to_name(alias_file.resolved_path)
end
@alias_table
end
# a table mapping formula name to aliases
# @private
def alias_reverse_table
return @alias_reverse_table if @alias_reverse_table
@alias_reverse_table = {}
alias_table.each do |alias_name, formula_name|
@alias_reverse_table[formula_name] ||= []
@alias_reverse_table[formula_name] << alias_name
end
@alias_reverse_table
end
def command_dir
@command_dir ||= path/"cmd"
end
def command_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.parent == command_dir && file.basename.to_s.match?(/^brew(cask)?-/) &&
(file.executable? || file.extname == ".rb")
end
# an array of all commands files of this {Tap}.
def command_files
@command_files ||= if command_dir.directory?
command_dir.children.select(&method(:command_file?))
else
[]
end
end
# path to the pin record for this {Tap}.
# @private
def pinned_symlink_path
HOMEBREW_LIBRARY/"PinnedTaps/#{name}"
end
# True if this {Tap} has been pinned.
def pinned?
return @pinned if instance_variable_defined?(:@pinned)
@pinned = pinned_symlink_path.directory?
end
# pin this {Tap}.
def pin
raise TapUnavailableError, name unless installed?
raise TapPinStatusError.new(name, true) if pinned?
pinned_symlink_path.make_relative_symlink(path)
@pinned = true
end
# unpin this {Tap}.
def unpin
raise TapUnavailableError, name unless installed?
raise TapPinStatusError.new(name, false) unless pinned?
pinned_symlink_path.delete
pinned_symlink_path.parent.rmdir_if_possible
pinned_symlink_path.parent.parent.rmdir_if_possible
@pinned = false
end
def to_hash
hash = {
"name" => name,
"user" => user,
"repo" => repo,
"path" => path.to_s,
"installed" => installed?,
"official" => official?,
"formula_names" => formula_names,
"formula_files" => formula_files.map(&:to_s),
"command_files" => command_files.map(&:to_s),
"pinned" => pinned?,
}
if installed?
hash["remote"] = remote
hash["custom_remote"] = custom_remote?
hash["private"] = private?
end
hash
end
# Hash with tap formula renames
def formula_renames
require "json"
@formula_renames ||= if (rename_file = path/"formula_renames.json").file?
JSON.parse(rename_file.read)
else
{}
end
end
# Hash with tap migrations
def tap_migrations
require "json"
@tap_migrations ||= if (migration_file = path/"tap_migrations.json").file?
JSON.parse(migration_file.read)
else
{}
end
end
def ==(other)
other = Tap.fetch(other) if other.is_a?(String)
self.class == other.class && name == other.name
end
def self.each
return unless TAP_DIRECTORY.directory?
return to_enum unless block_given?
TAP_DIRECTORY.subdirs.each do |user|
user.subdirs.each do |repo|
yield fetch(user.basename.to_s, repo.basename.to_s)
end
end
end
# an array of all installed {Tap} names.
def self.names
map(&:name).sort
end
# an array of all tap cmd directory {Pathname}s
def self.cmd_directories
Pathname.glob TAP_DIRECTORY/"*/*/cmd"
end
# @private
def formula_file_to_name(file)
"#{name}/#{file.basename(".rb")}"
end
# @private
def alias_file_to_name(file)
"#{name}/#{file.basename}"
end
private
def read_or_set_private_config
case config["private"]
when "true" then true
when "false" then false
else
config["private"] = begin
if custom_remote?
true
else
GitHub.private_repo?(full_name)
end
rescue GitHub::HTTPNotFoundError
true
rescue GitHub::Error
false
end
end
end
end
# A specialized {Tap} class for the core formulae
class CoreTap < Tap
def default_remote
"https://github.com/Homebrew/homebrew-core".freeze
end
# @private
def initialize
super "Homebrew", "core"
end
def self.instance
@instance ||= new
end
def self.ensure_installed!
return if instance.installed?
safe_system HOMEBREW_BREW_FILE, "tap", instance.name
end
# @private
def uninstall
raise "Tap#uninstall is not available for CoreTap"
end
# @private
def pin
raise "Tap#pin is not available for CoreTap"
end
# @private
def unpin
raise "Tap#unpin is not available for CoreTap"
end
# @private
def pinned?
false
end
# @private
def core_tap?
true
end
# @private
def formula_dir
@formula_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def alias_dir
@alias_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_renames
@formula_renames ||= begin
self.class.ensure_installed!
super
end
end
# @private
def tap_migrations
@tap_migrations ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_file_to_name(file)
file.basename(".rb").to_s
end
# @private
def alias_file_to_name(file)
file.basename.to_s
end
end
# Permanent configuration per {Tap} using `git-config(1)`
class TapConfig
attr_reader :tap
def initialize(tap)
@tap = tap
end
def [](key)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
Utils.popen_read("git", "config", "--local", "--get", "homebrew.#{key}").chomp.presence
end
end
def []=(key, value)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
safe_system "git", "config", "--local", "--replace-all", "homebrew.#{key}", value.to_s
end
end
end
require "extend/os/tap"
Remove redundant check in formula_files and cask_files
Formula_files consists of every non-recursive child of formula_dir,
for which formula_file? evaluates to true. formula_file? checks if the file
is a child of formula_dir, which it is by definition. It turns out that by
removing the check, the time used for 'brew search' decreased from 800 ms to
700 ms, noticably faster during tab completion. The same happens with
cask_files and cask_file?
require "extend/cachable"
require "readall"
# a {Tap} is used to extend the formulae provided by Homebrew core.
# Usually, it's synced with a remote git repository. And it's likely
# a GitHub repository with the name of `user/homebrew-repo`. In such
# case, `user/repo` will be used as the {#name} of this {Tap}, where
# {#user} represents GitHub username and {#repo} represents repository
# name without leading `homebrew-`.
class Tap
extend Cachable
TAP_DIRECTORY = HOMEBREW_LIBRARY/"Taps"
def self.fetch(*args)
case args.length
when 1
user, repo = args.first.split("/", 2)
when 2
user = args.first
repo = args.second
end
if [user, repo].any? { |part| part.nil? || part.include?("/") }
raise "Invalid tap name '#{args.join("/")}'"
end
# We special case homebrew and linuxbrew so that users don't have to shift in a terminal.
user = user.capitalize if ["homebrew", "linuxbrew"].include? user
repo = repo.delete_prefix "homebrew-"
if ["Homebrew", "Linuxbrew"].include?(user) && ["core", "homebrew"].include?(repo)
return CoreTap.instance
end
cache_key = "#{user}/#{repo}".downcase
cache.fetch(cache_key) { |key| cache[key] = Tap.new(user, repo) }
end
def self.from_path(path)
match = File.expand_path(path).match(HOMEBREW_TAP_PATH_REGEX)
raise "Invalid tap path '#{path}'" unless match
fetch(match[:user], match[:repo])
rescue
# No need to error as a nil tap is sufficient to show failure.
nil
end
def self.default_cask_tap
@default_cask_tap ||= fetch("Homebrew", "cask")
end
extend Enumerable
# The user name of this {Tap}. Usually, it's the GitHub username of
# this #{Tap}'s remote repository.
attr_reader :user
# The repository name of this {Tap} without leading `homebrew-`.
attr_reader :repo
# The name of this {Tap}. It combines {#user} and {#repo} with a slash.
# {#name} is always in lowercase.
# e.g. `user/repo`
attr_reader :name
# The full name of this {Tap}, including the `homebrew-` prefix.
# It combines {#user} and 'homebrew-'-prefixed {#repo} with a slash.
# e.g. `user/homebrew-repo`
attr_reader :full_name
# The local path to this {Tap}.
# e.g. `/usr/local/Library/Taps/user/homebrew-repo`
attr_reader :path
# @private
def initialize(user, repo)
@user = user
@repo = repo
@name = "#{@user}/#{@repo}".downcase
@full_name = "#{@user}/homebrew-#{@repo}"
@path = TAP_DIRECTORY/@full_name.downcase
@path.extend(GitRepositoryExtension)
@alias_table = nil
@alias_reverse_table = nil
end
# clear internal cache
def clear_cache
@remote = nil
@repo_var = nil
@formula_dir = nil
@cask_dir = nil
@command_dir = nil
@formula_files = nil
@alias_dir = nil
@alias_files = nil
@aliases = nil
@alias_table = nil
@alias_reverse_table = nil
@command_files = nil
@formula_renames = nil
@tap_migrations = nil
@config = nil
remove_instance_variable(:@private) if instance_variable_defined?(:@private)
end
# The remote path to this {Tap}.
# e.g. `https://github.com/user/homebrew-repo`
def remote
raise TapUnavailableError, name unless installed?
@remote ||= path.git_origin
end
# The default remote path to this {Tap}.
def default_remote
"https://github.com/#{full_name}"
end
def repo_var
@repo_var ||= path.to_s
.delete_prefix(TAP_DIRECTORY.to_s)
.tr("^A-Za-z0-9", "_")
.upcase
end
# True if this {Tap} is a git repository.
def git?
path.git?
end
# git branch for this {Tap}.
def git_branch
raise TapUnavailableError, name unless installed?
path.git_branch
end
# git HEAD for this {Tap}.
def git_head
raise TapUnavailableError, name unless installed?
path.git_head
end
# git HEAD in short format for this {Tap}.
def git_short_head
raise TapUnavailableError, name unless installed?
path.git_short_head
end
# time since git last commit for this {Tap}.
def git_last_commit
raise TapUnavailableError, name unless installed?
path.git_last_commit
end
# git last commit date for this {Tap}.
def git_last_commit_date
raise TapUnavailableError, name unless installed?
path.git_last_commit_date
end
# The issues URL of this {Tap}.
# e.g. `https://github.com/user/homebrew-repo/issues`
def issues_url
return unless official? || !custom_remote?
"#{default_remote}/issues"
end
def to_s
name
end
def version_string
return "N/A" unless installed?
pretty_revision = git_short_head
return "(no git repository)" unless pretty_revision
"(git revision #{pretty_revision}; last commit #{git_last_commit_date})"
end
# True if this {Tap} is an official Homebrew tap.
def official?
user == "Homebrew"
end
# True if the remote of this {Tap} is a private repository.
def private?
return @private if instance_variable_defined?(:@private)
@private = read_or_set_private_config
end
# {TapConfig} of this {Tap}
def config
@config ||= begin
raise TapUnavailableError, name unless installed?
TapConfig.new(self)
end
end
# True if this {Tap} has been installed.
def installed?
path.directory?
end
# True if this {Tap} is not a full clone.
def shallow?
(path/".git/shallow").exist?
end
# @private
def core_tap?
false
end
# install this {Tap}.
#
# @param [Hash] options
# @option options [String] :clone_target If passed, it will be used as the clone remote.
# @option options [Boolean, nil] :force_auto_update If present, whether to override the
# logic that skips non-GitHub repositories during auto-updates.
# @option options [Boolean] :full_clone If set as true, full clone will be used.
# @option options [Boolean] :quiet If set, suppress all output.
def install(options = {})
require "descriptions"
full_clone = options.fetch(:full_clone, false)
quiet = options.fetch(:quiet, false)
requested_remote = options[:clone_target] || default_remote
# if :force_auto_update is unset, use nil, meaning "no change"
force_auto_update = options.fetch(:force_auto_update, nil)
if official? && DEPRECATED_OFFICIAL_TAPS.include?(repo)
odie "#{name} was deprecated. This tap is now empty as all its formulae were migrated."
end
if installed? && force_auto_update.nil?
raise TapAlreadyTappedError, name unless full_clone
raise TapAlreadyUnshallowError, name unless shallow?
end
# ensure git is installed
Utils.ensure_git_installed!
if installed?
unless force_auto_update.nil?
config["forceautoupdate"] = force_auto_update
return if !full_clone || !shallow?
end
if options[:clone_target] && requested_remote != remote
raise TapRemoteMismatchError.new(name, @remote, requested_remote)
end
ohai "Unshallowing #{name}" unless quiet
args = %w[fetch --unshallow]
args << "-q" if quiet
path.cd { safe_system "git", *args }
return
end
clear_cache
ohai "Tapping #{name}" unless quiet
args = %W[clone #{requested_remote} #{path}]
args << "--depth=1" unless full_clone
args << "-q" if quiet
begin
safe_system "git", *args
unless Readall.valid_tap?(self, aliases: true)
unless ARGV.homebrew_developer?
raise "Cannot tap #{name}: invalid syntax in tap!"
end
end
rescue Interrupt, RuntimeError
ignore_interrupts do
# wait for git to possibly cleanup the top directory when interrupt happens.
sleep 0.1
FileUtils.rm_rf path
path.parent.rmdir_if_possible
end
raise
end
config["forceautoupdate"] = force_auto_update unless force_auto_update.nil?
link_completions_and_manpages
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
puts "Tapped#{formatted_contents} (#{path.abv})." unless quiet
Descriptions.cache_formulae(formula_names)
return if options[:clone_target]
return unless private?
return if quiet
puts <<~EOS
It looks like you tapped a private repository. To avoid entering your
credentials each time you update, you can use git HTTP credential
caching or issue the following command:
cd #{path}
git remote set-url origin git@github.com:#{full_name}.git
EOS
end
def link_completions_and_manpages
command = "brew tap --repair"
Utils::Link.link_manpages(path, command)
Utils::Link.link_completions(path, command)
end
# uninstall this {Tap}.
def uninstall
require "descriptions"
raise TapUnavailableError, name unless installed?
puts "Untapping #{name}..."
abv = path.abv
formatted_contents = contents.presence&.to_sentence&.dup&.prepend(" ")
unpin if pinned?
Descriptions.uncache_formulae(formula_names)
Utils::Link.unlink_manpages(path)
Utils::Link.unlink_completions(path)
path.rmtree
path.parent.rmdir_if_possible
puts "Untapped#{formatted_contents} (#{abv})."
clear_cache
end
# True if the {#remote} of {Tap} is customized.
def custom_remote?
return true unless remote
remote.casecmp(default_remote).nonzero?
end
# path to the directory of all {Formula} files for this {Tap}.
def formula_dir
@formula_dir ||= potential_formula_dirs.find(&:directory?) || path/"Formula"
end
def potential_formula_dirs
@potential_formula_dirs ||= [path/"Formula", path/"HomebrewFormula", path].freeze
end
# path to the directory of all {Cask} files for this {Tap}.
def cask_dir
@cask_dir ||= path/"Casks"
end
def contents
contents = []
if (command_count = command_files.count).positive?
contents << "#{command_count} #{"command".pluralize(command_count)}"
end
if (cask_count = cask_files.count).positive?
contents << "#{cask_count} #{"cask".pluralize(cask_count)}"
end
if (formula_count = formula_files.count).positive?
contents << "#{formula_count} #{"formula".pluralize(formula_count)}"
end
contents
end
# an array of all {Formula} files of this {Tap}.
def formula_files
@formula_files ||= if formula_dir.directory?
formula_dir.children.select { |file| file.extname == ".rb" }
else
[]
end
end
# an array of all {Cask} files of this {Tap}.
def cask_files
@cask_files ||= if cask_dir.directory?
cask_dir.children.select { |file| file.extname == ".rb" }
else
[]
end
end
# return true if given path would present a {Formula} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def formula_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.extname == ".rb" && file.parent == formula_dir
end
# return true if given path would present a {Cask} file in this {Tap}.
# accepts both absolute path and relative path (relative to this {Tap}'s path)
# @private
def cask_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.extname == ".rb" && file.parent == cask_dir
end
# an array of all {Formula} names of this {Tap}.
def formula_names
@formula_names ||= formula_files.map { |f| formula_file_to_name(f) }
end
# path to the directory of all alias files for this {Tap}.
# @private
def alias_dir
@alias_dir ||= path/"Aliases"
end
# an array of all alias files of this {Tap}.
# @private
def alias_files
@alias_files ||= Pathname.glob("#{alias_dir}/*").select(&:file?)
end
# an array of all aliases of this {Tap}.
# @private
def aliases
@aliases ||= alias_files.map { |f| alias_file_to_name(f) }
end
# a table mapping alias to formula name
# @private
def alias_table
return @alias_table if @alias_table
@alias_table = {}
alias_files.each do |alias_file|
@alias_table[alias_file_to_name(alias_file)] = formula_file_to_name(alias_file.resolved_path)
end
@alias_table
end
# a table mapping formula name to aliases
# @private
def alias_reverse_table
return @alias_reverse_table if @alias_reverse_table
@alias_reverse_table = {}
alias_table.each do |alias_name, formula_name|
@alias_reverse_table[formula_name] ||= []
@alias_reverse_table[formula_name] << alias_name
end
@alias_reverse_table
end
def command_dir
@command_dir ||= path/"cmd"
end
def command_file?(file)
file = Pathname.new(file) unless file.is_a? Pathname
file = file.expand_path(path)
file.parent == command_dir && file.basename.to_s.match?(/^brew(cask)?-/) &&
(file.executable? || file.extname == ".rb")
end
# an array of all commands files of this {Tap}.
def command_files
@command_files ||= if command_dir.directory?
command_dir.children.select(&method(:command_file?))
else
[]
end
end
# path to the pin record for this {Tap}.
# @private
def pinned_symlink_path
HOMEBREW_LIBRARY/"PinnedTaps/#{name}"
end
# True if this {Tap} has been pinned.
def pinned?
return @pinned if instance_variable_defined?(:@pinned)
@pinned = pinned_symlink_path.directory?
end
# pin this {Tap}.
def pin
raise TapUnavailableError, name unless installed?
raise TapPinStatusError.new(name, true) if pinned?
pinned_symlink_path.make_relative_symlink(path)
@pinned = true
end
# unpin this {Tap}.
def unpin
raise TapUnavailableError, name unless installed?
raise TapPinStatusError.new(name, false) unless pinned?
pinned_symlink_path.delete
pinned_symlink_path.parent.rmdir_if_possible
pinned_symlink_path.parent.parent.rmdir_if_possible
@pinned = false
end
def to_hash
hash = {
"name" => name,
"user" => user,
"repo" => repo,
"path" => path.to_s,
"installed" => installed?,
"official" => official?,
"formula_names" => formula_names,
"formula_files" => formula_files.map(&:to_s),
"command_files" => command_files.map(&:to_s),
"pinned" => pinned?,
}
if installed?
hash["remote"] = remote
hash["custom_remote"] = custom_remote?
hash["private"] = private?
end
hash
end
# Hash with tap formula renames
def formula_renames
require "json"
@formula_renames ||= if (rename_file = path/"formula_renames.json").file?
JSON.parse(rename_file.read)
else
{}
end
end
# Hash with tap migrations
def tap_migrations
require "json"
@tap_migrations ||= if (migration_file = path/"tap_migrations.json").file?
JSON.parse(migration_file.read)
else
{}
end
end
def ==(other)
other = Tap.fetch(other) if other.is_a?(String)
self.class == other.class && name == other.name
end
def self.each
return unless TAP_DIRECTORY.directory?
return to_enum unless block_given?
TAP_DIRECTORY.subdirs.each do |user|
user.subdirs.each do |repo|
yield fetch(user.basename.to_s, repo.basename.to_s)
end
end
end
# an array of all installed {Tap} names.
def self.names
map(&:name).sort
end
# an array of all tap cmd directory {Pathname}s
def self.cmd_directories
Pathname.glob TAP_DIRECTORY/"*/*/cmd"
end
# @private
def formula_file_to_name(file)
"#{name}/#{file.basename(".rb")}"
end
# @private
def alias_file_to_name(file)
"#{name}/#{file.basename}"
end
private
def read_or_set_private_config
case config["private"]
when "true" then true
when "false" then false
else
config["private"] = begin
if custom_remote?
true
else
GitHub.private_repo?(full_name)
end
rescue GitHub::HTTPNotFoundError
true
rescue GitHub::Error
false
end
end
end
end
# A specialized {Tap} class for the core formulae
class CoreTap < Tap
def default_remote
"https://github.com/Homebrew/homebrew-core".freeze
end
# @private
def initialize
super "Homebrew", "core"
end
def self.instance
@instance ||= new
end
def self.ensure_installed!
return if instance.installed?
safe_system HOMEBREW_BREW_FILE, "tap", instance.name
end
# @private
def uninstall
raise "Tap#uninstall is not available for CoreTap"
end
# @private
def pin
raise "Tap#pin is not available for CoreTap"
end
# @private
def unpin
raise "Tap#unpin is not available for CoreTap"
end
# @private
def pinned?
false
end
# @private
def core_tap?
true
end
# @private
def formula_dir
@formula_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def alias_dir
@alias_dir ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_renames
@formula_renames ||= begin
self.class.ensure_installed!
super
end
end
# @private
def tap_migrations
@tap_migrations ||= begin
self.class.ensure_installed!
super
end
end
# @private
def formula_file_to_name(file)
file.basename(".rb").to_s
end
# @private
def alias_file_to_name(file)
file.basename.to_s
end
end
# Permanent configuration per {Tap} using `git-config(1)`
class TapConfig
attr_reader :tap
def initialize(tap)
@tap = tap
end
def [](key)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
Utils.popen_read("git", "config", "--local", "--get", "homebrew.#{key}").chomp.presence
end
end
def []=(key, value)
return unless tap.git?
return unless Utils.git_available?
tap.path.cd do
safe_system "git", "config", "--local", "--replace-all", "homebrew.#{key}", value.to_s
end
end
end
require "extend/os/tap"
|
#
# Be sure to run `pod spec lint MSEmojiChecksum.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "MSEmojiChecksum"
s.version = "1.0.0"
s.summary = "Calculate checksums that are human readable and fun! 🍌🙆🚽"
s.description = <<-DESC
Checksums are a useful way to quickly check the contents of a bunch of data. The MD5 checksum spits out 16 bytes of random numbers. While this is great for computers, humans don't want to look at a huge hex string. The emoji checksum calculator spits out 4 emoji character hash.
The caclulator uses 845 different emoji characters for a total of 509,831,700,625 unique hash codes. Good enough.
DESC
s.homepage = "https://github.com/mindsnacks/MSEmojiChecksumCalculator"
s.license = 'MIT'
s.author = { "Jacob Eiting" => "jacob.eiting@gmail.com" }
s.source = { :git => "https://github.com/mindsnacks/MSEmojiChecksumCalculator.git", :tag => "1.0.0" }
s.source_files = '*.{h,m}'
s.public_header_files = '*.h'
s.framework = 'CommonCrypto'
end
Move to 0.0.1
#
# Be sure to run `pod spec lint MSEmojiChecksum.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "MSEmojiChecksum"
s.version = "0.0.1"
s.summary = "Calculate checksums that are human readable and fun! 🍌🙆🚽"
s.description = <<-DESC
Checksums are a useful way to quickly check the contents of a bunch of data. The MD5 checksum spits out 16 bytes of random numbers. While this is great for computers, humans don't want to look at a huge hex string. The emoji checksum calculator spits out 4 emoji character hash.
The caclulator uses 845 different emoji characters for a total of 509,831,700,625 unique hash codes. Good enough.
DESC
s.homepage = "https://github.com/mindsnacks/MSEmojiChecksumCalculator"
s.license = 'MIT'
s.author = { "Jacob Eiting" => "jacob.eiting@gmail.com" }
s.source = { :git => "https://github.com/mindsnacks/MSEmojiChecksumCalculator.git", :tag => "0.0.1" }
s.source_files = '*.{h,m}'
s.public_header_files = '*.h'
s.framework = 'CommonCrypto'
end
|
Pod::Spec.new do |s|
s.name = "MZFastSortIndex"
s.version = "0.1.1"
s.summary = "Performant and powerful sort index building for Cocoa collections"
s.description = <<-DESC
MZFastSortIndex provides performant and powerful sort index building for Cocoa collections.
DESC
s.homepage = "https://github.com/moshozen/MZFastSortIndex"
s.license = 'MIT'
s.author = { "Mat Trudel" => "mat@geeky.net" }
s.source = { :git => "https://github.com/moshozen/MZFastSortIndex.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'MZFastSortIndex' => ['Pod/Assets/*.png']
}
end
Version bump
Pod::Spec.new do |s|
s.name = "MZFastSortIndex"
s.version = "0.2.0"
s.summary = "Performant and powerful sort index building for Cocoa collections"
s.description = <<-DESC
MZFastSortIndex provides performant and powerful sort index building for Cocoa collections.
DESC
s.homepage = "https://github.com/moshozen/MZFastSortIndex"
s.license = 'MIT'
s.author = { "Mat Trudel" => "mat@geeky.net" }
s.source = { :git => "https://github.com/moshozen/MZFastSortIndex.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'MZFastSortIndex' => ['Pod/Assets/*.png']
}
end
|
#
# Be sure to run `pod lib lint MyFitnessPalSDK.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "MyFitnessPalSDK"
s.version = "2.0.0"
s.summary = "The MyFitnessPal SDK for iOS."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "https://github.com/myfitnesspal/ios-sdk-public"
s.license = 'Apache Version 2.0'
s.author = { "MyFitnessPal" => "api-group@myfitnesspal.com" }
s.source = { :git => "https://github.com/myfitnesspal/ios-sdk-public.git", :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'MyFitnessPalSDK/**/*'
s.resource_bundles = {
'MyFitnessPalSDK' => ['MyFitnessPalSDK/Assets/*.png']
}
s.public_header_files = 'MyFitnessPalSDK/MyFitnessPalSDK.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
Includes the rest of the header files
#
# Be sure to run `pod lib lint MyFitnessPalSDK.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "MyFitnessPalSDK"
s.version = "2.0.0"
s.summary = "The MyFitnessPal SDK for iOS."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "https://github.com/myfitnesspal/ios-sdk-public"
s.license = 'Apache Version 2.0'
s.author = { "MyFitnessPal" => "api-group@myfitnesspal.com" }
s.source = { :git => "https://github.com/myfitnesspal/ios-sdk-public.git", :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'MyFitnessPalSDK/**/*'
s.resource_bundles = {
'MyFitnessPalSDK' => ['MyFitnessPalSDK/Assets/*.png']
}
s.public_header_files = 'MyFitnessPalSDK/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
|
#
# Be sure to run `pod lib lint NPTableAnimator.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'NPTableAnimator'
s.version = '2.1.1'
s.summary = 'Animations for UITableView and UICollectionView.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
This pod was developed for building table animations
DESC
s.homepage = 'https://github.com/Nekitosss/tableAnimator.git'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Nikita Patskov' => 'patskovn@yahoo.com' }
s.source = { :git => 'https://github.com/Nekitosss/tableAnimator.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'NPTableAnimator/Classes/**/*'
s.frameworks = 'Foundation'
end
change spec version
#
# Be sure to run `pod lib lint NPTableAnimator.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'NPTableAnimator'
s.version = '2.1.2'
s.summary = 'Animations for UITableView and UICollectionView.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
This pod was developed for building table animations
DESC
s.homepage = 'https://github.com/Nekitosss/tableAnimator.git'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Nikita Patskov' => 'patskovn@yahoo.com' }
s.source = { :git => 'https://github.com/Nekitosss/tableAnimator.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'NPTableAnimator/Classes/**/*'
s.frameworks = 'Foundation'
end
|
#
# Be sure to run `pod spec lint PLShortVideoKit.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "PLShortVideoKit"
s.version = "1.1.1"
s.summary = "PILI iOS short video record SDK"
s.homepage = "https://github.com/pili-engineering/PLShortVideoKit"
s.license = "Apache License 2.0"
s.author = { "pili" => "pili@qiniu.com" }
s.source = { :git => "https://github.com/pili-engineering/PLShortVideoKit.git", :tag => "v#{s.version}" }
s.platform = :ios
s.requires_arc = true
s.ios.deployment_target = "8.0"
s.vendored_libraries = 'Pod/Library/*.a'
s.vendored_framework = "Pod/Library/PLShortVideoKit.framework"
s.resources = "Pod/Library/PLShortVideoKit.framework/PLShortVideoKit.bundle"
end
update podspec
#
# Be sure to run `pod spec lint PLShortVideoKit.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "PLShortVideoKit"
s.version = "1.1.1"
s.summary = "PILI iOS short video record SDK"
s.homepage = "https://github.com/pili-engineering/PLShortVideoKit"
s.license = "Apache License 2.0"
s.author = { "pili" => "pili@qiniu.com" }
s.source = { :git => "https://github.com/pili-engineering/PLShortVideoKit.git", :tag => "v#{s.version}" }
s.platform = :ios
s.requires_arc = true
s.ios.deployment_target = "8.0"
s.vendored_libraries = 'Pod/Library/*.a'
s.vendored_framework = "Pod/Library/PLShortVideoKit.framework"
s.resources = "Pod/Library/PLShortVideoKit.framework/PLShortVideoKit.bundle"
s.dependency 'Qiniu', '7.1.5'
end
|
Pod::Spec.new do |s|
s.name = "RxSocialConnect"
s.version = "0.0.7"
s.summary = "OAuth RxSwift extension for iOS."
s.homepage = "https://github.com/FuckBoilerplate/RxSocialConnect-iOS"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Roberto Frontado" => "robertofrontado@gmail.com" }
s.source = { :git => "https://github.com/FuckBoilerplate/RxSocialConnect-iOS.git", :tag => s.version.to_s }
s.social_media_url = "https://github.com/FuckBoilerplate"
s.ios.deployment_target = '8.0'
s.requires_arc = true
s.dependency 'RxSwift', '~> 2.0.0'
s.dependency 'OAuthSwift', '~> 0.5.0'
s.dependency 'RxBlocking', '~> 2.0.0'
s.default_subspec = "Core"
s.subspec "Core" do |ss|
ss.source_files = 'Sources/Core/**/*'
end
s.subspec "Moya" do |ss|
ss.source_files = 'Sources/Moya/**/*'
ss.dependency 'RxSocialConnect/Core'
ss.dependency 'Alamofire', '~> 3.1.0'
ss.dependency 'Moya', '~> 6.0.0'
ss.dependency 'Moya/RxSwift', '~> 6.0.0'
end
end
Updated podspec
Pod::Spec.new do |s|
s.name = "RxSocialConnect"
s.version = "0.0.7"
s.summary = "OAuth RxSwift extension for iOS."
s.homepage = "https://github.com/FuckBoilerplate/RxSocialConnect-iOS"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Roberto Frontado" => "robertofrontado@gmail.com" }
s.source = { :git => "https://github.com/FuckBoilerplate/RxSocialConnect-iOS.git", :tag => s.version.to_s }
s.social_media_url = "https://github.com/FuckBoilerplate"
s.ios.deployment_target = '8.0'
s.requires_arc = true
s.dependency 'RxSwift', '~> 2.5.0'
s.dependency 'OAuthSwift', '~> 0.5.0'
s.dependency 'RxBlocking', '~> 2.5.0'
s.default_subspec = "Core"
s.subspec "Core" do |ss|
ss.source_files = 'Sources/Core/**/*'
end
s.subspec "Moya" do |ss|
ss.source_files = 'Sources/Moya/**/*'
ss.dependency 'RxSocialConnect/Core'
ss.dependency 'Alamofire', '~> 3.0'
ss.dependency 'Moya', '~> 6.5.0'
ss.dependency 'Moya/RxSwift', '~> 6.5.0'
end
end
|
Pod::Spec.new do |s|
s.name = "SHLineGraphView"
s.version = "1.0.12"
s.summary = "`SHLineGraphView` is a simple and efficient reusable UIView subview which you can use to create line graphs."
s.description = <<-DESC
`SHLineGraphView` is a simple and efficient reusable UIView subview which you can use to create line graphs. It uses Core Graphics and Core Animation to create the light weight graphs. it is easy to use/understand and completely customizable.
DESC
s.homepage = "https://github.com/grevolution/SHLineGraphView"
s.license = {:type => 'MIT'}
s.author = { "Shan Ul Haq" => "g@grevolution.me" }
s.platform = :ios
s.source = { :git => "https://github.com/grevolution/SHLineGraphView.git", :tag => "1.0.6" }
s.requires_arc = true
s.source_files = 'SHLineGraphView/SHLineGraphView/*.{h,m}'
s.exclude_files = 'Classes/Exclude'
end
up version to 1.0.15
Pod::Spec.new do |s|
s.name = "SHLineGraphView"
s.version = "1.0.15"
s.summary = "`SHLineGraphView` is a simple and efficient reusable UIView subview which you can use to create line graphs."
s.description = <<-DESC
`SHLineGraphView` is a simple and efficient reusable UIView subview which you can use to create line graphs. It uses Core Graphics and Core Animation to create the light weight graphs. it is easy to use/understand and completely customizable.
DESC
s.homepage = "https://github.com/grevolution/SHLineGraphView"
s.license = {:type => 'MIT'}
s.author = { "Shan Ul Haq" => "g@grevolution.me" }
s.platform = :ios
s.source = { :git => "https://github.com/grevolution/SHLineGraphView.git", :tag => "1.0.6" }
s.requires_arc = true
s.source_files = 'SHLineGraphView/SHLineGraphView/*.{h,m}'
s.exclude_files = 'Classes/Exclude'
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dsl_companion/version'
Gem::Specification.new do |spec|
spec.name = 'dsl_companion'
spec.version = DSLCompanion::VERSION
spec.authors = ['Laurent B.']
spec.email = ['lbnetid+gh@gmail.com']
spec.summary = %q{Provides a customizable interpreter to run your own internal DSLs.}
spec.description = %q{The goal of this gem is to provide a versatile DSL interpreter.}
spec.homepage = ''
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.5'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec'
end
Homepage
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dsl_companion/version'
Gem::Specification.new do |spec|
spec.name = 'dsl_companion'
spec.version = DSLCompanion::VERSION
spec.authors = ['Laurent B.']
spec.email = ['lbnetid+gh@gmail.com']
spec.summary = %q{Provides a customizable interpreter to run your own internal DSLs.}
spec.description = %q{The goal of this gem is to provide a versatile DSL interpreter.}
spec.homepage = 'https://github.com/lbriais/dsl_companion'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.5'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec'
end
|
require 'time'
require 'yaml'
require 'logger'
class TMDbMapper::Mapper
# TODO check all literals (especially dates and numbers) for logical correctness
def initialize
load_schemas()
@log = Logger.new('log', 'daily')
@publisher = MsgPublisher.new
@publisher.set_queue 'mapping'
@virtuoso_writer = VirtuosoWriter.new
@virtuoso_writer.set_graph 'mapped'
@virtuoso_reader = VirtuosoReader.new
@virtuoso_reader.set_graph 'raw'
@dbpedia_reader = TMDbMapper::DBpediaReader.new
end
def register_receiver
@receiver = MsgConsumer.new
@receiver.set_queue 'raw_tmdb'
@receiver.subscribe(type: :movie_uri) { |movie_uri| map(movie_uri) }
end
def map(raw_db_uri)
# try to delete existing triples for movie first
@virtuoso_writer.delete_triple(
subject: raw_db_uri)
# add new triples
@virtuoso_writer.new_triple raw_db_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Movie"
map_movie_id(raw_db_uri)
map_movie_titles(raw_db_uri)
map_movie_release_dates(raw_db_uri)
map_movie_production_companies(raw_db_uri)
map_cast(raw_db_uri)
map_director(raw_db_uri)
@virtuoso_writer.new_triple raw_db_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@publisher.enqueue :movie_uri, raw_db_uri
end
def map_movie_id(raw_db_uri)
ids = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/id"
)
ids.each do |id|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}id", id
) if id.to_s.length > 1
end if ids
imdb_ids = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/imdb_id"
)
imdb_ids.each do |imdb_id|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}imdb_id", imdb_id
) if imdb_id.to_s.length > 1
end if imdb_ids
end
def map_movie_titles(raw_db_uri)
titles = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/title"
)
titles.each do |title|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}name", title
) if title.to_s.length > 1
# puts title.to_s
end if titles
end
def map_movie_release_dates(raw_db_uri)
dates = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/release_date"
)
dates.each do |release_date|
begin
date_string = nil
date_string = Date.parse(release_date.to_s).xmlschema if release_date.to_s.length > 1
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}datePublished", "#{date_string}^^#{@schemas['xsd']}date"
) if date_string
rescue ArgumentError
puts "Could not parse release date `#{release_date.to_s}' as date."
end
end if dates
end
def map_movie_production_companies(raw_db_uri)
companies = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies"
)
companies.each do |production_company_raw_uri|
production_company_mapped_uri = "#{@schemas['base_tmdb']}/company/"
ids = @virtuoso_reader.get_objects_for(
subject: production_company_raw_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies/id"
)
ids.each do |production_company_id|
production_company_mapped_uri += "#{production_company_id}"
# try to delete existing triples for company first
@virtuoso_writer.delete_triple(
subject: production_company_mapped_uri)
# add new triples
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Organization", literal: false
) if production_company_id.to_s.length > 1
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['lom']}id", production_company_id
) if production_company_id.to_s.length > 1
end if ids
names = @virtuoso_reader.get_objects_for(
subject: production_company_raw_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies/name"
)
names.each do |production_company_name|
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['schema']}name", production_company_name
) if production_company_name.to_s.length > 1
end if names
@virtuoso_writer.new_triple production_company_mapped_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}productionCompany", production_company_mapped_uri, literal: false
)
end if companies
end
def map_cast(raw_db_uri)
casts = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/credits/cast"
)
casts.each do |cast_raw_uri|
cast_mapped_uri = "#{@schemas['base_tmdb']}/performance/"
ids = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/id"
)
ids.each do |cast_id|
cast_mapped_uri += "#{cast_id}"
# try to delete existing triples for cast first
@virtuoso_writer.delete_triple(
subject: cast_mapped_uri)
# add new triples
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Performance", literal: false
) if cast_id.to_s.length > 1
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}id", cast_id
) if cast_id.to_s.length > 1
end if ids
persons = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/person"
)
persons.each do |person_uri|
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}actor", person_uri
) if person_uri.to_s.length > 1
# try to delete existing triples for person first
@virtuoso_writer.delete_triple(
subject: person_uri)
# add new triples
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['dbpedia']}Actor"
) if person_uri.to_s.length > 1
map_person person_uri
end if persons
characters = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/character"
)
characters.each do |performance_character|
character_uri = nil
character_uri = "#{cast_mapped_uri}/character" if performance_character.to_s.length > 1
if character_uri
# try to delete existing triples for character first
@virtuoso_writer.delete_triple(
subject: character_uri)
# add new triples
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}character", character_uri, literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['rdf']}type", "#{@schemas['dbpedia']}FictionalCharacter", literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Person", literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['schema']}name", performance_character
)
@virtuoso_writer.new_triple character_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
end
end if characters
@virtuoso_writer.new_triple cast_mapped_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}performance", cast_mapped_uri, literal: false
)
end if casts
end
def map_director(raw_db_uri)
crews = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/credits/crew"
)
crews.each do |crew_raw_uri|
job = @virtuoso_reader.get_objects_for(
subject: crew_raw_uri,
predicate: "#{@schemas['tmdb']}crew/job"
)
# check whether job is 'director'
if job.to_s =~ /director/i
persons = @virtuoso_reader.get_objects_for(
subject: crew_raw_uri,
predicate: "#{@schemas['tmdb']}crew/person"
)
persons.each do |person_uri|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}director", person_uri, literal: false
)
# try to delete existing triples for crew first
@virtuoso_writer.delete_triple(
subject: person_uri)
# add new triples
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Director"
)
map_person person_uri
end if persons
end
end if crews
end
def map_person(person_uri)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Person", literal:false
)
ids = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/id"
)
ids.each do |person_id|
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['lom']}id", person_id
)
end if ids
names = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/name"
)
names.each do |person_name|
person_names = person_name.to_s.split(' ')
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}name", person_name
)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}givenName", person_names.first
)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}familyName", person_names.last
)
end if names
birthdates = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/birthday"
)
birthdates.each do |date|
begin
date_string = Date.parse(date.to_s).xmlschema
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}birthDate", "#{date_string}^^#{@schemas['xsd']}date"
)
# puts date_string
rescue ArgumentError
@log.error "Could not parse release date `#{date.to_s}' as date."
end
end if birthdates
birthplaces = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/place_of_birth"
)
birthplaces.each do |place|
place_uri = nil
place_uri = @dbpedia_reader.get_place_uri place if place.to_s.length > 1
if place_uri
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['dbpedia']}birthPlace", place_uri, literal:false
)
end
end if birthplaces
@virtuoso_writer.new_triple person_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
end
def set_xsd_type(literal, type)
"#{literal}^^#{@schemas['xsd']}#{type}"
end
private
def secrets
@secrets ||= YAML.load_file '../config/secrets.yml'
end
private
def load_schemas
file ||= YAML.load_file '../config/namespaces.yml'
@schemas = file['schemas']
end
end
added schema:alternateName to TMDb-Mapper
require 'time'
require 'yaml'
require 'logger'
class TMDbMapper::Mapper
# TODO check all literals (especially dates and numbers) for logical correctness
def initialize
load_schemas()
@log = Logger.new('log', 'daily')
@publisher = MsgPublisher.new
@publisher.set_queue 'mapping'
@virtuoso_writer = VirtuosoWriter.new
@virtuoso_writer.set_graph 'mapped'
@virtuoso_reader = VirtuosoReader.new
@virtuoso_reader.set_graph 'raw'
@dbpedia_reader = TMDbMapper::DBpediaReader.new
end
def register_receiver
@receiver = MsgConsumer.new
@receiver.set_queue 'raw_tmdb'
@receiver.subscribe(type: :movie_uri) { |movie_uri| map(movie_uri) }
end
def map(raw_db_uri)
# try to delete existing triples for movie first
@virtuoso_writer.delete_triple(
subject: raw_db_uri)
# add new triples
@virtuoso_writer.new_triple raw_db_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Movie"
map_movie_id(raw_db_uri)
map_movie_titles(raw_db_uri)
map_movie_release_dates(raw_db_uri)
map_movie_production_companies(raw_db_uri)
map_cast(raw_db_uri)
map_director(raw_db_uri)
@virtuoso_writer.new_triple raw_db_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@publisher.enqueue :movie_uri, raw_db_uri
end
def map_movie_id(raw_db_uri)
ids = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/id"
)
ids.each do |id|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}id", id
) if id.to_s.length > 1
end if ids
imdb_ids = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/imdb_id"
)
imdb_ids.each do |imdb_id|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}imdb_id", imdb_id
) if imdb_id.to_s.length > 1
end if imdb_ids
end
def map_movie_titles(raw_db_uri)
titles = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/title"
)
titles.each do |title|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}name", title
) if title.to_s.length > 1
# puts title.to_s
end if titles
end
def map_movie_release_dates(raw_db_uri)
dates = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/release_date"
)
dates.each do |release_date|
begin
date_string = nil
date_string = Date.parse(release_date.to_s).xmlschema if release_date.to_s.length > 1
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}datePublished", "#{date_string}^^#{@schemas['xsd']}date"
) if date_string
rescue ArgumentError
puts "Could not parse release date `#{release_date.to_s}' as date."
end
end if dates
end
def map_movie_production_companies(raw_db_uri)
companies = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies"
)
companies.each do |production_company_raw_uri|
production_company_mapped_uri = "#{@schemas['base_tmdb']}/company/"
ids = @virtuoso_reader.get_objects_for(
subject: production_company_raw_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies/id"
)
ids.each do |production_company_id|
production_company_mapped_uri += "#{production_company_id}"
# try to delete existing triples for company first
@virtuoso_writer.delete_triple(
subject: production_company_mapped_uri)
# add new triples
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Organization", literal: false
) if production_company_id.to_s.length > 1
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['lom']}id", production_company_id
) if production_company_id.to_s.length > 1
end if ids
names = @virtuoso_reader.get_objects_for(
subject: production_company_raw_uri,
predicate: "#{@schemas['tmdb']}movie/production_companies/name"
)
names.each do |production_company_name|
@virtuoso_writer.new_triple(
production_company_mapped_uri, "#{@schemas['schema']}name", production_company_name
) if production_company_name.to_s.length > 1
end if names
@virtuoso_writer.new_triple production_company_mapped_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}productionCompany", production_company_mapped_uri, literal: false
)
end if companies
end
def map_cast(raw_db_uri)
casts = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/credits/cast"
)
casts.each do |cast_raw_uri|
cast_mapped_uri = "#{@schemas['base_tmdb']}/performance/"
ids = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/id"
)
ids.each do |cast_id|
cast_mapped_uri += "#{cast_id}"
# try to delete existing triples for cast first
@virtuoso_writer.delete_triple(
subject: cast_mapped_uri)
# add new triples
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Performance", literal: false
) if cast_id.to_s.length > 1
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}id", cast_id
) if cast_id.to_s.length > 1
end if ids
persons = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/person"
)
persons.each do |person_uri|
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}actor", person_uri
) if person_uri.to_s.length > 1
# try to delete existing triples for person first
@virtuoso_writer.delete_triple(
subject: person_uri)
# add new triples
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['dbpedia']}Actor"
) if person_uri.to_s.length > 1
map_person person_uri
end if persons
characters = @virtuoso_reader.get_objects_for(
subject: cast_raw_uri,
predicate: "#{@schemas['tmdb']}cast/character"
)
characters.each do |performance_character|
character_uri = nil
character_uri = "#{cast_mapped_uri}/character" if performance_character.to_s.length > 1
if character_uri
# try to delete existing triples for character first
@virtuoso_writer.delete_triple(
subject: character_uri)
# add new triples
@virtuoso_writer.new_triple(
cast_mapped_uri, "#{@schemas['lom']}character", character_uri, literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['rdf']}type", "#{@schemas['dbpedia']}FictionalCharacter", literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Person", literal:false
)
@virtuoso_writer.new_triple(
character_uri, "#{@schemas['schema']}name", performance_character
)
@virtuoso_writer.new_triple character_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
end
end if characters
@virtuoso_writer.new_triple cast_mapped_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['lom']}performance", cast_mapped_uri, literal: false
)
end if casts
end
def map_director(raw_db_uri)
crews = @virtuoso_reader.get_objects_for(
subject: raw_db_uri,
predicate: "#{@schemas['tmdb']}movie/credits/crew"
)
crews.each do |crew_raw_uri|
job = @virtuoso_reader.get_objects_for(
subject: crew_raw_uri,
predicate: "#{@schemas['tmdb']}crew/job"
)
# check whether job is 'director'
if job.to_s =~ /director/i
persons = @virtuoso_reader.get_objects_for(
subject: crew_raw_uri,
predicate: "#{@schemas['tmdb']}crew/person"
)
persons.each do |person_uri|
@virtuoso_writer.new_triple(
raw_db_uri, "#{@schemas['schema']}director", person_uri, literal: false
)
# try to delete existing triples for crew first
@virtuoso_writer.delete_triple(
subject: person_uri)
# add new triples
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['lom']}Director"
)
map_person person_uri
end if persons
end
end if crews
end
def map_person(person_uri)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['rdf']}type", "#{@schemas['schema']}Person", literal:false
)
ids = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/id"
)
ids.each do |person_id|
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['lom']}id", person_id
)
end if ids
names = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/name"
)
names.each do |person_name|
person_names = person_name.to_s.split(' ')
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}name", person_name
)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}givenName", person_names.first
)
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}familyName", person_names.last
)
end if names
aliases = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/also_known_as"
)
aliases.each do |alias_name|
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}alternateName", alias_name
)
end if aliases
birthdates = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/birthday"
)
birthdates.each do |date|
begin
date_string = Date.parse(date.to_s).xmlschema
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['schema']}birthDate", "#{date_string}^^#{@schemas['xsd']}date"
)
# puts date_string
rescue ArgumentError
@log.error "Could not parse release date `#{date.to_s}' as date."
end
end if birthdates
birthplaces = @virtuoso_reader.get_objects_for(
subject: person_uri,
predicate: "#{@schemas['tmdb']}person/place_of_birth"
)
birthplaces.each do |place|
place_uri = nil
place_uri = @dbpedia_reader.get_place_uri place if place.to_s.length > 1
if place_uri
@virtuoso_writer.new_triple(
person_uri, "#{@schemas['dbpedia']}birthPlace", place_uri, literal:false
)
end
end if birthplaces
@virtuoso_writer.new_triple person_uri, @schemas['pav_lastupdateon'], (set_xsd_type DateTime.now, 'dateTime')
end
def set_xsd_type(literal, type)
"#{literal}^^#{@schemas['xsd']}#{type}"
end
private
def secrets
@secrets ||= YAML.load_file '../config/secrets.yml'
end
private
def load_schemas
file ||= YAML.load_file '../config/namespaces.yml'
@schemas = file['schemas']
end
end
|
require 'date'
class Meetup
attr_reader :month, :year
def initialize(month, year)
@month = month
@year = year
end
def day(weekday, schedule)
send(schedule, weekday)
end
private
def teenth(weekday)
search(weekday) { Date.new(year, month, 13) }
end
def first(weekday)
search(weekday)
end
def second(weekday)
search(weekday) { Date.new(year, month, starter(2)) }
end
def third(weekday)
search(weekday) { Date.new(year, month, starter(3)) }
end
def fourth(weekday)
search(weekday) { Date.new(year, month, starter(4)) }
end
def last(weekday)
base_date = Date.new(year, month, 1)
starter_date = base_date.next_month - 1
until Kernel.eval("starter_date." + weekday.to_s + "?") do
starter_date -= 1
end
starter_date
end
def search(weekday)
starter_date = if block_given?
yield
else
Date.new(year, month, 1)
end
until Kernel.eval("starter_date." + weekday.to_s + "?") do
starter_date += 1
end
starter_date
end
def starter(num)
7 * (num - 1) + 1
end
end
Removed using blocks as unnecessary
require 'date'
class Meetup
attr_reader :month, :year
def initialize(month, year)
@month = month
@year = year
end
def day(weekday, schedule)
send(schedule, weekday)
end
private
def teenth(weekday)
search(weekday, 13)
end
def first(weekday)
search(weekday)
end
def second(weekday)
search(weekday, start_day(2))
end
def third(weekday)
search(weekday, start_day(3))
end
def fourth(weekday)
search(weekday, start_day(4))
end
def last(weekday)
base_date = Date.new(year, month, 1)
starter_date = base_date.next_month - 1
until Kernel.eval("starter_date." + weekday.to_s + "?") do
starter_date -= 1
end
starter_date
end
def search(weekday, start_day = 1)
starter_date = Date.new(year, month, start_day)
until Kernel.eval("starter_date." + weekday.to_s + "?") do
starter_date += 1
end
starter_date
end
def start_day(week_num)
7 * (week_num - 1) + 1
end
end |
Pod::Spec.new do |s|
s.name = "Asterism"
s.version = "1.0.0-RC1"
s.summary = "Functional helpers for Objective-C."
s.homepage = "https://github.com/robb/Asterism"
s.license = { :type => 'MIT' }
s.author = { "Robert Böhnke" => "robb@robb.is" }
s.source = { :git => "https://github.com/robb/Asterism.git", :tag => "1.0.0-RC1" }
s.ios.deployment_target = '0.5'
s.osx.deployment_target = '10.8'
s.source_files = 'Asterism', 'Asterism/**/*.{h,m}'
s.exclude_files = 'Specs'
s.requires_arc = true
end
Second 1.0 Release Candidate
Pod::Spec.new do |s|
s.name = "Asterism"
s.version = "1.0.0-RC2"
s.summary = "Functional helpers for Objective-C."
s.homepage = "https://github.com/robb/Asterism"
s.license = { :type => 'MIT' }
s.author = { "Robert Böhnke" => "robb@robb.is" }
s.source = { :git => "https://github.com/robb/Asterism.git", :tag => "1.0.0-RC2" }
s.ios.deployment_target = '0.5'
s.osx.deployment_target = '10.8'
s.source_files = 'Asterism', 'Asterism/**/*.{h,m}'
s.exclude_files = 'Specs'
s.requires_arc = true
end
|
#
# Be sure to run `pod lib lint filename.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "Bluepeer"
s.version = "1.4.2"
s.summary = "Provides adhoc Bluetooth and wifi networking at high-level"
s.description = <<-DESC
Provides P2P (adhoc) Bluetooth and wifi networking at high-level. Uses low-level frameworks like HHServices to have more control than Multipeer and NSNetService.
DESC
s.homepage = "https://github.com/xaphod/Bluepeer"
s.license = 'MIT'
s.author = { "Tim Carr" => "xaphod@gmail.com" }
s.source = { :git => "https://github.com/xaphod/Bluepeer.git", :tag => s.version.to_s }
s.platform = :ios, '9.0'
s.requires_arc = true
s.swift_version = '5.0'
s.subspec 'Core' do |core|
core.source_files = 'Core/*.{swift,m,h}'
core.resource_bundles = {
'Bluepeer' => ['Assets/*.{lproj,storyboard}']
}
core.dependency 'CocoaAsyncSocket', '>= 7.4.0'
core.dependency 'HHServices', '>= 2.0'
core.dependency 'xaphodObjCUtils', '>= 0.0.6'
core.dependency 'DataCompression', '< 4.0.0'
end
s.subspec 'HotPotatoNetwork' do |hpn|
hpn.source_files = 'HotPotato/*.{swift,m,h}'
hpn.dependency 'Bluepeer/Core'
hpn.dependency 'ObjectMapper', '~> 3.1'
end
#s.public_header_files = 'Pod/Classes/*.h'
#s.xcconfig = {'OTHER_LDFLAGS' => '-ObjC -all_load'}
#s.prefix_header_file = 'Pod/Classes/EOSFTPServer-Prefix.pch'
#s.pod_target_xcconfig = {'SWIFT_INCLUDE_PATHS' => '$(SRCROOT)/Bluepeer/Pod/**'}
#s.preserve_paths = 'Pod/Classes/module.modulemap'
end
Pod to 1.4.3
#
# Be sure to run `pod lib lint filename.podspec' to ensure this is a
# valid spec and remove all comments before submitting the spec.
#
# Any lines starting with a # are optional, but encouraged
#
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "Bluepeer"
s.version = "1.4.3"
s.summary = "Provides adhoc Bluetooth and wifi networking at high-level"
s.description = <<-DESC
Provides P2P (adhoc) Bluetooth and wifi networking at high-level. Uses low-level frameworks like HHServices to have more control than Multipeer and NSNetService.
DESC
s.homepage = "https://github.com/xaphod/Bluepeer"
s.license = 'MIT'
s.author = { "Tim Carr" => "xaphod@gmail.com" }
s.source = { :git => "https://github.com/xaphod/Bluepeer.git", :tag => s.version.to_s }
s.platform = :ios, '9.0'
s.requires_arc = true
s.swift_version = '5.0'
s.subspec 'Core' do |core|
core.source_files = 'Core/*.{swift,m,h}'
core.resource_bundles = {
'Bluepeer' => ['Assets/*.{lproj,storyboard}']
}
core.dependency 'CocoaAsyncSocket', '>= 7.4.0'
core.dependency 'HHServices', '>= 2.0'
core.dependency 'xaphodObjCUtils', '>= 0.0.6'
core.dependency 'DataCompression', '< 4.0.0'
end
s.subspec 'HotPotatoNetwork' do |hpn|
hpn.source_files = 'HotPotato/*.{swift,m,h}'
hpn.dependency 'Bluepeer/Core'
hpn.dependency 'ObjectMapper', '~> 3.1'
end
#s.public_header_files = 'Pod/Classes/*.h'
#s.xcconfig = {'OTHER_LDFLAGS' => '-ObjC -all_load'}
#s.prefix_header_file = 'Pod/Classes/EOSFTPServer-Prefix.pch'
#s.pod_target_xcconfig = {'SWIFT_INCLUDE_PATHS' => '$(SRCROOT)/Bluepeer/Pod/**'}
#s.preserve_paths = 'Pod/Classes/module.modulemap'
end
|
# IMPORTANT: This file is generated by cucumber-rails - edit at your own peril.
# It is recommended to regenerate this file in the future when you upgrade to a
# newer version of cucumber-rails. Consider adding your own code to a new file
# instead of editing this one. Cucumber will automatically load all features/**/*.rb
# files.
require 'cucumber/rails'
# Capybara defaults to XPath selectors rather than Webrat's default of CSS3. In
# order to ease the transition to Capybara we set the default here. If you'd
# prefer to use XPath just remove this line and adjust any selectors in your
# steps to use the XPath syntax.
Capybara.default_selector = :css
# By default, any exception happening in your Rails application will bubble up
# to Cucumber so that your scenario will fail. This is a different from how
# your application behaves in the production environment, where an error page will
# be rendered instead.
#
# Sometimes we want to override this default behaviour and allow Rails to rescue
# exceptions and display an error page (just like when the app is running in production).
# Typical scenarios where you want to do this is when you test your error pages.
# There are two ways to allow Rails to rescue exceptions:
#
# 1) Tag your scenario (or feature) with @allow-rescue
#
# 2) Set the value below to true. Beware that doing this globally is not
# recommended as it will mask a lot of errors for you!
#
ActionController::Base.allow_rescue = false
# Remove/comment out the lines below if your app doesn't have a database.
# For some databases (like MongoDB and CouchDB) you may need to use :truncation instead.
begin
DatabaseCleaner.strategy = :transaction
rescue NameError
raise "You need to add database_cleaner to your Gemfile (in the :test group) if you wish to use it."
end
# You may also want to configure DatabaseCleaner to use different strategies for certain features and scenarios.
# See the DatabaseCleaner documentation for details. Example:
#
# Before('@no-txn,@selenium,@culerity,@celerity,@javascript') do
# # { :except => [:widgets] } may not do what you expect here
# # as tCucumber::Rails::Database.javascript_strategy overrides
# # this setting.
# DatabaseCleaner.strategy = :truncation
# end
#
# Before('~@no-txn', '~@selenium', '~@culerity', '~@celerity', '~@javascript') do
# DatabaseCleaner.strategy = :transaction
# end
#
# Possible values are :truncation and :transaction
# The :transaction strategy is faster, but might give you threading problems.
# See https://github.com/cucumber/cucumber-rails/blob/master/features/choose_javascript_database_strategy.feature
Cucumber::Rails::Database.javascript_strategy = :truncation
Cucumber para as teams adicionado
|
#
## Copyright:: Copyright (c) 2015 GitLab B.V.
## License:: Apache License, Version 2.0
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#
name 'mattermost'
default_version '3.7.5'
source url: "https://releases.mattermost.com/#{version}/mattermost-team-#{version}-linux-amd64.tar.gz",
md5: '94fb39daca94a506cd188afba2102146'
relative_path 'mattermost'
license_name = 'GITLAB-MATTERMOST-COMPILED-LICENSE.txt'
license_path = File.join(install_dir, 'embedded/service/mattermost', license_name)
license 'MIT with Trademark Protection'
license_file license_path
build do
move 'bin/platform', "#{install_dir}/embedded/bin/mattermost"
command "mkdir -p #{install_dir}/embedded/service/mattermost"
copy 'templates', "#{install_dir}/embedded/service/mattermost/templates"
copy 'i18n', "#{install_dir}/embedded/service/mattermost/i18n"
copy 'fonts', "#{install_dir}/embedded/service/mattermost/fonts"
copy 'webapp', "#{install_dir}/embedded/service/mattermost/webapp"
block do
File.open(license_path, 'w') { |f| f.write(GITLAB_MATTERMOST_COMPILED_LICENSE) }
end
end
GITLAB_MATTERMOST_COMPILED_LICENSE = <<-EOH.freeze
GitLab Mattermost Compiled License
(MIT with Trademark Protection)
**Note: this license does not cover source code, for information on source code licensing see http://www.mattermost.org/license/
Copyright (c) 2015 Mattermost, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software;
The receiver of the Software will not remove or alter any product identification, trademark, copyright or other notices embedded within or appearing within or on the Software;
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
EOH
Update mattermost.rb
#
## Copyright:: Copyright (c) 2015 GitLab B.V.
## License:: Apache License, Version 2.0
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#
name 'mattermost'
default_version '3.9.0'
source url: "https://releases.mattermost.com/#{version}/mattermost-team-#{version}-linux-amd64.tar.gz",
md5: '94fb39daca94a506cd188afba2102146'
relative_path 'mattermost'
license_name = 'GITLAB-MATTERMOST-COMPILED-LICENSE.txt'
license_path = File.join(install_dir, 'embedded/service/mattermost', license_name)
license 'MIT with Trademark Protection'
license_file license_path
build do
move 'bin/platform', "#{install_dir}/embedded/bin/mattermost"
command "mkdir -p #{install_dir}/embedded/service/mattermost"
copy 'templates', "#{install_dir}/embedded/service/mattermost/templates"
copy 'i18n', "#{install_dir}/embedded/service/mattermost/i18n"
copy 'fonts', "#{install_dir}/embedded/service/mattermost/fonts"
copy 'webapp', "#{install_dir}/embedded/service/mattermost/webapp"
block do
File.open(license_path, 'w') { |f| f.write(GITLAB_MATTERMOST_COMPILED_LICENSE) }
end
end
GITLAB_MATTERMOST_COMPILED_LICENSE = <<-EOH.freeze
GitLab Mattermost Compiled License
(MIT with Trademark Protection)
**Note: this license does not cover source code, for information on source code licensing see http://www.mattermost.org/license/
Copyright (c) 2015 Mattermost, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software;
The receiver of the Software will not remove or alter any product identification, trademark, copyright or other notices embedded within or appearing within or on the Software;
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
EOH
|
# frozen_string_literal: true
require 'capybara_feature_test'
class GithubLoginTest < CapybaraFeatureTest
# rubocop:disable Metrics/BlockLength
scenario 'Has link to GitHub Login', js: true do
# Clean up database here and restart DatabaseCleaner.
# This solves a transient issue if test restarts without running
# teardown meaning the database is dirty after restart.
DatabaseCleaner.clean
DatabaseCleaner.start
configure_omniauth_mock unless ENV['GITHUB_PASSWORD']
VCR.use_cassette('github_login', allow_playback_repeats: true) do
visit '/'
assert has_content? 'CII Best Practices Badge Program'
click_on 'Get Your Badge Now!'
assert has_content? 'Log in with GitHub'
num = ActionMailer::Base.deliveries.size
click_link 'Log in with GitHub'
if ENV['GITHUB_PASSWORD'] # for re-recording cassettes
fill_in 'login_field', with: 'ciitest'
fill_in 'password', with: ENV['GITHUB_PASSWORD']
click_on 'Sign in'
assert has_content? 'Test BadgeApp (not for production use)'
click_on 'Authorize application'
end
assert_equal num + 1, ActionMailer::Base.deliveries.size
assert has_content? 'Signed in!'
click_on 'Get Your Badge Now!'
wait_for_url '/projects/new?'
assert find(
"option[value='https://github.com/ciitest/test-repo']"
)
assert find(
"option[value='https://github.com/ciitest/cii-best-practices-badge']"
)
select 'ciitest/cii-best-practices-badge',
from: 'project[repo_url]'
click_on 'Submit GitHub Repository'
assert has_content? 'Thanks for adding the Project! Please fill out ' \
'the rest of the information to get the Badge.'
assert_equal num + 2, ActionMailer::Base.deliveries.size
click_on 'Account'
assert has_content? 'Profile'
click_on 'Profile'
assert has_content? 'Core Infrastructure Initiative Best Practices Badge'
if ENV['GITHUB_PASSWORD'] # revoke OAuth authorization
visit 'https://github.com/settings/applications'
click_on 'Revoke'
assert has_content? 'Are you sure you want to revoke authorization?'
click_on 'I understand, revoke access'
sleep 1
page.evaluate_script 'window.location.reload()'
assert has_content? 'No authorized applications'
end
end
end
# rubocop:enable Metrics/BlockLength
end
Increase session_helper coverage (#599)
* Increase session_helper coverage
Signed-off-by: Jason Dossett <e6f895edd39634df23774f0615e338f58a73f02c@utdallas.edu>
* Add assertion to github_login_test.rb to check for correct behavior.
Signed-off-by: Jason Dossett <e6f895edd39634df23774f0615e338f58a73f02c@utdallas.edu>
# frozen_string_literal: true
require 'capybara_feature_test'
class GithubLoginTest < CapybaraFeatureTest
# rubocop:disable Metrics/BlockLength
scenario 'Has link to GitHub Login', js: true do
# Clean up database here and restart DatabaseCleaner.
# This solves a transient issue if test restarts without running
# teardown meaning the database is dirty after restart.
DatabaseCleaner.clean
DatabaseCleaner.start
configure_omniauth_mock unless ENV['GITHUB_PASSWORD']
VCR.use_cassette('github_login', allow_playback_repeats: true) do
visit '/'
assert has_content? 'CII Best Practices Badge Program'
click_on 'Get Your Badge Now!'
assert has_content? 'Log in with GitHub'
num = ActionMailer::Base.deliveries.size
click_link 'Log in with GitHub'
if ENV['GITHUB_PASSWORD'] # for re-recording cassettes
fill_in 'login_field', with: 'ciitest'
fill_in 'password', with: ENV['GITHUB_PASSWORD']
click_on 'Sign in'
assert has_content? 'Test BadgeApp (not for production use)'
click_on 'Authorize application'
end
assert_equal num + 1, ActionMailer::Base.deliveries.size
assert has_content? 'Signed in!'
click_on 'Get Your Badge Now!'
wait_for_url '/projects/new?'
assert find(
"option[value='https://github.com/ciitest/test-repo']"
)
assert find(
"option[value='https://github.com/ciitest/cii-best-practices-badge']"
)
select 'ciitest/cii-best-practices-badge',
from: 'project[repo_url]'
click_on 'Submit GitHub Repository'
assert has_content? 'Thanks for adding the Project! Please fill out ' \
'the rest of the information to get the Badge.'
assert_equal num + 2, ActionMailer::Base.deliveries.size
click_on 'Account'
assert has_content? 'Profile'
click_on 'Profile'
assert has_content? 'Core Infrastructure Initiative Best Practices Badge'
# Next two lines give a quick coverage increase in session_helper.rb
click_on 'Projects'
click_on 'Pathfinder OS'
refute has_content? 'Edit'
if ENV['GITHUB_PASSWORD'] # revoke OAuth authorization
visit 'https://github.com/settings/applications'
click_on 'Revoke'
assert has_content? 'Are you sure you want to revoke authorization?'
click_on 'I understand, revoke access'
sleep 1
page.evaluate_script 'window.location.reload()'
assert has_content? 'No authorized applications'
end
end
end
# rubocop:enable Metrics/BlockLength
end
|
# encoding: utf-8
require 'helper'
class Nanoc::Multimarkdown::FilterTest < Minitest::Test
def test_filter
filter = ::Nanoc::Multimarkdown::Filter.new({})
assert_equal("HELLO WORLD!", filter.run("Hello World!"))
end
end
Add a simple test.
# encoding: utf-8
require 'helper'
class Nanoc::Multimarkdown::FilterTest < Minitest::Test
def test_filter
filter = ::Nanoc::Multimarkdown::Filter.new({})
src <<-MMD
# Header
Paragraph
MMD
out <<-HTML
<h1 id="header">Header</h1>
<p>Paragraph</p>
HTML
assert_equal(out, filter.run(src))
end
end
|
cask "dcp-o-matic-batch-converter" do
version "2.14.51"
sha256 "62c4f131986a0be0875b7bdd0bec187b34bd7139e03f44bb634189045d292435"
url "https://dcpomatic.com/dl.php?id=osx-10.9-batch&version=#{version}"
name "DCP-o-matic Batch converter"
desc "Convert video, audio and subtitles into DCP (Digital Cinema Democratized)"
homepage "https://dcpomatic.com/"
livecheck do
url "https://dcpomatic.com/download"
strategy :page_match
regex(/stable\s*release:\s*(\d+(?:\.\d+)*)/i)
end
app "DCP-o-matic #{version.major} Batch converter.app"
end
Update dcp-o-matic-batch-converter from 2.14.51 to 2.14.54 (#108598)
cask "dcp-o-matic-batch-converter" do
version "2.14.54"
sha256 "a447ca91ce1de85f161eb33666f7699f31be5ad9bace6db1df8f7afa0dbd16b3"
url "https://dcpomatic.com/dl.php?id=osx-10.9-batch&version=#{version}"
name "DCP-o-matic Batch converter"
desc "Convert video, audio and subtitles into DCP (Digital Cinema Democratized)"
homepage "https://dcpomatic.com/"
livecheck do
url "https://dcpomatic.com/download"
strategy :page_match
regex(/stable\s*release:\s*(\d+(?:\.\d+)*)/i)
end
app "DCP-o-matic #{version.major} Batch converter.app"
end
|
cask 'flash-player-debugger-npapi' do
version '32.0.0.314'
sha256 'b00214d60e67cce1045ce8e7eaeebe67271fb102d6c5a9ccf0294dadfdf56083'
url "https://fpdownload.adobe.com/pub/flashplayer/updaters/#{version.major}/flashplayer_#{version.major}_plugin_debug.dmg"
appcast 'https://fpdownload.adobe.com/pub/flashplayer/update/current/xml/version_en_mac_pl.xml',
configuration: version.tr('.', ',')
name 'Adobe Flash Player NPAPI (plugin for Safari and Firefox) content debugger'
homepage 'https://www.adobe.com/support/flashplayer/debug_downloads.html'
pkg 'Install Adobe Flash Player Debugger.app/Contents/Resources/Adobe Flash Player Debugger.pkg'
uninstall pkgutil: 'com.adobe.pkg.FlashPlayer',
launchctl: 'com.adobe.fpsaud',
delete: [
'/Library/Application Support/Adobe/Flash Player Install Manager',
'/Library/Internet Plug-Ins/Flash Player.plugin',
]
zap trash: [
'/Library/Internet Plug-Ins/flashplayer.xpt',
'~/Library/Caches/Adobe/Flash Player',
'~/Library/Logs/FlashPlayerInstallManager.log',
'~/Library/Preferences/Macromedia/Flash Player',
'~/Library/Saved Application State/com.adobe.flashplayer.installmanager.savedState',
]
end
Update flash-player-debugger-npapi from 32.0.0.314 to 32.0.0.321 (#75961)
cask 'flash-player-debugger-npapi' do
version '32.0.0.321'
sha256 'c7bb0dc6c0051098cf89f512c7339b2a744e33c34fd89a32c7483b1f3f5a537a'
url "https://fpdownload.adobe.com/pub/flashplayer/updaters/#{version.major}/flashplayer_#{version.major}_plugin_debug.dmg"
appcast 'https://fpdownload.adobe.com/pub/flashplayer/update/current/xml/version_en_mac_pl.xml',
configuration: version.tr('.', ',')
name 'Adobe Flash Player NPAPI (plugin for Safari and Firefox) content debugger'
homepage 'https://www.adobe.com/support/flashplayer/debug_downloads.html'
pkg 'Install Adobe Flash Player Debugger.app/Contents/Resources/Adobe Flash Player Debugger.pkg'
uninstall pkgutil: 'com.adobe.pkg.FlashPlayer',
launchctl: 'com.adobe.fpsaud',
delete: [
'/Library/Application Support/Adobe/Flash Player Install Manager',
'/Library/Internet Plug-Ins/Flash Player.plugin',
]
zap trash: [
'/Library/Internet Plug-Ins/flashplayer.xpt',
'~/Library/Caches/Adobe/Flash Player',
'~/Library/Logs/FlashPlayerInstallManager.log',
'~/Library/Preferences/Macromedia/Flash Player',
'~/Library/Saved Application State/com.adobe.flashplayer.installmanager.savedState',
]
end
|
cask 'font-glow-sans-tc-condensed' do
version '0.9'
sha256 '3abb8c20dc2e8a531b02b01dd02fb3e6654f0f677dc529974d78d73f4a160f43'
url "https://github.com/welai/glow-sans/releases/download/v#{version}/GlowSansTC-Condensed-v#{version}.zip"
appcast 'https://github.com/welai/glow-sans/releases.atom'
name 'Glow Sans TC Condensed'
homepage 'https://github.com/welai/glow-sans'
font 'GlowSansTC-Condensed-Bold.otf'
font 'GlowSansTC-Condensed-Book.otf'
font 'GlowSansTC-Condensed-ExtraBold.otf'
font 'GlowSansTC-Condensed-ExtraLight.otf'
font 'GlowSansTC-Condensed-Heavy.otf'
font 'GlowSansTC-Condensed-Light.otf'
font 'GlowSansTC-Condensed-Medium.otf'
font 'GlowSansTC-Condensed-Regular.otf'
font 'GlowSansTC-Condensed-Thin.otf'
end
font-glow-sans-tc-condensed.rb: fix for new style
cask "font-glow-sans-tc-condensed" do
version "0.9"
sha256 "3abb8c20dc2e8a531b02b01dd02fb3e6654f0f677dc529974d78d73f4a160f43"
url "https://github.com/welai/glow-sans/releases/download/v#{version}/GlowSansTC-Condensed-v#{version}.zip"
appcast "https://github.com/welai/glow-sans/releases.atom"
name "Glow Sans TC Condensed"
homepage "https://github.com/welai/glow-sans"
font "GlowSansTC-Condensed-Bold.otf"
font "GlowSansTC-Condensed-Book.otf"
font "GlowSansTC-Condensed-ExtraBold.otf"
font "GlowSansTC-Condensed-ExtraLight.otf"
font "GlowSansTC-Condensed-Heavy.otf"
font "GlowSansTC-Condensed-Light.otf"
font "GlowSansTC-Condensed-Medium.otf"
font "GlowSansTC-Condensed-Regular.otf"
font "GlowSansTC-Condensed-Thin.otf"
end
|
adds meslo font for powerline
class FontMesloLgForPowerline < Cask
url 'https://github.com/Lokaltog/powerline-fonts/trunk/Meslo',
:using => :svn,
:trust_cert => true
homepage 'https://github.com/Lokaltog/powerline-fonts/tree/master/Meslo'
version 'latest'
sha256 :no_check
font 'Meslo LG L DZ Regular for Powerline.otf'
font 'Meslo LG L Regular for Powerline.otf'
font 'Meslo LG M DZ Regular for Powerline.otf'
font 'Meslo LG M Regular for Powerline.otf'
font 'Meslo LG S DZ Regular for Powerline.otf'
font 'Meslo LG S Regular for Powerline.otf'
end
|
cask "openshot-video-editor-daily" do
version "2.6.1,9975-b890417a-46255e46"
sha256 "00cc2af5a32a725d1a7373ae3c026f5792f13b83fad13a0483128f309afc1cd3"
url "https://github.com/OpenShot/openshot-qt/releases/download/daily/OpenShot-v#{version.csv.first}-dev-daily-#{version.csv.second}-x86_64.dmg",
verified: "github.com/OpenShot/openshot-qt/"
name "OpenShot Video Editor (Daily Build)"
desc "Cross-platform video editor"
homepage "https://openshot.org/"
livecheck do
url "https://www.openshot.org/download/"
regex(/OpenShot[._-]v?(\d+(?:\.\d+)+)[._-]dev[._-]daily[._-](.*)[._-]x86[._-]64\.dmg"/i)
strategy :page_match do |page, regex|
page.scan(regex).map { |match| "#{match[0]},#{match[1]}" }
end
end
conflicts_with cask: "openshot-video-editor"
depends_on macos: ">= :catalina"
app "OpenShot Video Editor.app"
zap trash: [
"~/.openshot_qt",
"~/Library/Application Support/openshot",
"~/Library/Preferences/openshot-qt.plist",
]
end
openshot-video-editor-daily 2.6.1,10042-1186c24d-46255e46
Update openshot-video-editor-daily from 2.6.1,9975-b890417a-46255e46 to 2.6.1,10042-1186c24d-46255e46
Closes #14959.
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
cask "openshot-video-editor-daily" do
version "2.6.1,10042-1186c24d-46255e46"
sha256 "e658aef19574b1ac474aaf4eca4bdf3d7703afa2b35d513c996bc4d182ec9817"
url "https://github.com/OpenShot/openshot-qt/releases/download/daily/OpenShot-v#{version.csv.first}-dev-daily-#{version.csv.second}-x86_64.dmg",
verified: "github.com/OpenShot/openshot-qt/"
name "OpenShot Video Editor (Daily Build)"
desc "Cross-platform video editor"
homepage "https://openshot.org/"
livecheck do
url "https://www.openshot.org/download/"
regex(/OpenShot[._-]v?(\d+(?:\.\d+)+)[._-]dev[._-]daily[._-](.*)[._-]x86[._-]64\.dmg"/i)
strategy :page_match do |page, regex|
page.scan(regex).map { |match| "#{match[0]},#{match[1]}" }
end
end
conflicts_with cask: "openshot-video-editor"
depends_on macos: ">= :catalina"
app "OpenShot Video Editor.app"
zap trash: [
"~/.openshot_qt",
"~/Library/Application Support/openshot",
"~/Library/Preferences/openshot-qt.plist",
]
end
|
cask "visual-studio-code-insiders" do
version "1.58.0,d54f7e1f6c1290fe27e2397eca509d0e0728272e"
if Hardware::CPU.intel?
sha256 "4212d2972c92ad6e3a17c1f6733c073992bf8b20672422b8d6176e95e8cd2312"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin.zip",
verified: "az764295.vo.msecnd.net/insider/"
else
sha256 "db786673955a27ec35911ff1c6b6e8b3b7c9393dbe89ed7b91261dd4ffee99c6"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin-arm64.zip",
verified: "az764295.vo.msecnd.net/insider/"
end
name "Microsoft Visual Studio Code"
name "Visual Studio Code Insiders"
desc "Code editor"
homepage "https://code.visualstudio.com/insiders"
livecheck do
url "https://update.code.visualstudio.com/api/update/darwin-universal/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Visual Studio Code - Insiders.app"
binary "#{appdir}/Visual Studio Code - Insiders.app/Contents/Resources/app/bin/code", target: "code-insiders"
zap trash: [
"~/Library/Application Support/Code - Insiders",
"~/Library/Caches/Code - Insiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders.ShipIt",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.helper.plist",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.plist",
]
end
Update visual-studio-code-insiders (#11246)
from 1.58.0,d54f7e1f6c1290fe27e2397eca509d0e0728272e
to 1.58.0,9744231fc12f1aed21089180b3f0394d694bd2a2
cask "visual-studio-code-insiders" do
version "1.58.0,9744231fc12f1aed21089180b3f0394d694bd2a2"
if Hardware::CPU.intel?
sha256 "684d5e9c7b6d01270a443417fcad08c662aca27eb51af515832587d0d2e20dc9"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin.zip",
verified: "az764295.vo.msecnd.net/insider/"
else
sha256 "0d0f16669a8fd6028654ee5c46513e64f48a5b5a603fc18eaace9248c9468423"
url "https://az764295.vo.msecnd.net/insider/#{version.after_comma}/VSCode-darwin-arm64.zip",
verified: "az764295.vo.msecnd.net/insider/"
end
name "Microsoft Visual Studio Code"
name "Visual Studio Code Insiders"
desc "Code editor"
homepage "https://code.visualstudio.com/insiders"
livecheck do
url "https://update.code.visualstudio.com/api/update/darwin-universal/insider/VERSION"
strategy :page_match do |page|
name = page[/"name":"(\d+(?:\.\d+)*)/i, 1]
version = page[/"version":"(\w+)/i, 1]
"#{name},#{version}"
end
end
auto_updates true
app "Visual Studio Code - Insiders.app"
binary "#{appdir}/Visual Studio Code - Insiders.app/Contents/Resources/app/bin/code", target: "code-insiders"
zap trash: [
"~/Library/Application Support/Code - Insiders",
"~/Library/Caches/Code - Insiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders",
"~/Library/Caches/com.microsoft.VSCodeInsiders.ShipIt",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.helper.plist",
"~/Library/Preferences/com.microsoft.VSCodeInsiders.plist",
]
end
|
require 'test_helper'
class DowngradeTest < ActiveSupport::TestCase
self.use_transactional_tests = false
test 'downgrades and ereases all pending files' do
FileUtils.rm_rf('tmp/archive')
AudioGenerator.new.create_silent_files
assert_equal 3, Dir.glob("tmp/archive/2013/04/10/*.mp3").size
assert_equal 4, Dir.glob("tmp/archive/2013/05/20/*.mp3").size
assert_difference('AudioFile.count', -3) do
system Rails.root.join('bin', 'downgrade').to_s
end
assert_equal 3, Dir.glob("tmp/archive/2013/04/10/*.mp3").size
assert_equal 1, Dir.glob("tmp/archive/2013/05/20/*.mp3").size
info = broadcasts(:info_april)
assert !info.audio_files.where(bitrate: 320).exists?
file = info.audio_files.where(bitrate: 224).first
assert file
assert_equal 2, file.channels
assert File.exist?(file.absolute_path)
assert 224, AudioProcessor.new(file.absolute_path).bitrate
g9s = broadcasts(:g9s_mai)
assert !g9s.audio_files.where(bitrate: 192).exists?
assert_equal 1, g9s.audio_files.where(bitrate: 128).count
klangbecken = broadcasts(:klangbecken_mai1)
assert_equal 0, klangbecken.audio_files.count
end
end
Do not run tests with ffmpeg in travis
require 'test_helper'
class DowngradeTest < ActiveSupport::TestCase
self.use_transactional_tests = false
# Travis has ffmpeg 0.8.17, which reports "Unknown input format: 'lavfi'"
unless ENV['TRAVIS']
test 'downgrades and ereases all pending files' do
FileUtils.rm_rf('tmp/archive')
AudioGenerator.new.create_silent_files
assert_equal 3, Dir.glob("tmp/archive/2013/04/10/*.mp3").size
assert_equal 4, Dir.glob("tmp/archive/2013/05/20/*.mp3").size
assert_difference('AudioFile.count', -3) do
system Rails.root.join('bin', 'downgrade').to_s
end
assert_equal 3, Dir.glob("tmp/archive/2013/04/10/*.mp3").size
assert_equal 1, Dir.glob("tmp/archive/2013/05/20/*.mp3").size
info = broadcasts(:info_april)
assert !info.audio_files.where(bitrate: 320).exists?
file = info.audio_files.where(bitrate: 224).first
assert file
assert_equal 2, file.channels
assert File.exist?(file.absolute_path)
assert 224, AudioProcessor.new(file.absolute_path).bitrate
g9s = broadcasts(:g9s_mai)
assert !g9s.audio_files.where(bitrate: 192).exists?
assert_equal 1, g9s.audio_files.where(bitrate: 128).count
klangbecken = broadcasts(:klangbecken_mai1)
assert_equal 0, klangbecken.audio_files.count
end
end
end
|
require "test_helper"
require "json_schema"
describe JsonSchema::Validator do
it "can find data valid" do
assert validate
end
it "validates type" do
@data_sample = 4
refute validate
assert_includes error_messages,
%{Expected data to be of type "object"; value was: 4.}
end
it "validates maxItems" do
data_sample["flags"] = (0...11).to_a
refute validate
assert_includes error_messages,
%{Expected array to have no more than 10 item(s), had 11 item(s).}
end
it "validates minItems" do
data_sample["flags"] = []
refute validate
assert_includes error_messages,
%{Expected array to have at least 1 item(s), had 0 item(s).}
end
it "validates uniqueItems" do
data_sample["flags"] = [1, 1]
refute validate
assert_includes error_messages,
%{Expected array items to be unique, but duplicate items were found.}
end
it "validates maximum for an integer with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMaximum" => false,
"maximum" => 10
)
data_sample["id"] = 11
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10 (exclusive: false), value was: 11.}
end
it "validates maximum for an integer with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMaximum" => true,
"maximum" => 10
)
data_sample["id"] = 10
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10 (exclusive: true), value was: 10.}
end
it "validates maximum for a number with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMaximum" => false,
"maximum" => 10.0
)
data_sample["cost"] = 10.1
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10.0 (exclusive: false), value was: 10.1.}
end
it "validates maximum for a number with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMaximum" => true,
"maximum" => 10.0
)
data_sample["cost"] = 10.0
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10.0 (exclusive: true), value was: 10.0.}
end
it "validates minimum for an integer with exclusiveMaximum false" do
end
it "validates minimum for an integer with exclusiveMaximum true" do
end
it "validates minimum for a number with exclusiveMaximum false" do
end
it "validates minimum for a number with exclusiveMaximum true" do
end
def data_sample
@data_sample ||= DataScaffold.data_sample
end
def error_messages
@validator.errors.map { |e| e.message }
end
def pointer(data, path)
JsonPointer.evaluate(data, path)
end
def schema_sample
@schema_sample ||= DataScaffold.schema_sample
end
def validate
@schema = JsonSchema.parse!(schema_sample).definitions["app"]
@validator = JsonSchema::Validator.new(@schema)
@validator.validate(data_sample)
end
end
Test the rest of the min/max functions
require "test_helper"
require "json_schema"
describe JsonSchema::Validator do
it "can find data valid" do
assert validate
end
it "validates type" do
@data_sample = 4
refute validate
assert_includes error_messages,
%{Expected data to be of type "object"; value was: 4.}
end
it "validates maxItems" do
data_sample["flags"] = (0...11).to_a
refute validate
assert_includes error_messages,
%{Expected array to have no more than 10 item(s), had 11 item(s).}
end
it "validates minItems" do
data_sample["flags"] = []
refute validate
assert_includes error_messages,
%{Expected array to have at least 1 item(s), had 0 item(s).}
end
it "validates uniqueItems" do
data_sample["flags"] = [1, 1]
refute validate
assert_includes error_messages,
%{Expected array items to be unique, but duplicate items were found.}
end
it "validates maximum for an integer with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMaximum" => false,
"maximum" => 10
)
data_sample["id"] = 11
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10 (exclusive: false), value was: 11.}
end
it "validates maximum for an integer with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMaximum" => true,
"maximum" => 10
)
data_sample["id"] = 10
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10 (exclusive: true), value was: 10.}
end
it "validates maximum for a number with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMaximum" => false,
"maximum" => 10.0
)
data_sample["cost"] = 10.1
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10.0 (exclusive: false), value was: 10.1.}
end
it "validates maximum for a number with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMaximum" => true,
"maximum" => 10.0
)
data_sample["cost"] = 10.0
refute validate
assert_includes error_messages,
%{Expected data to be smaller than maximum 10.0 (exclusive: true), value was: 10.0.}
end
it "validates minimum for an integer with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMinimum" => false,
"minimum" => 1
)
data_sample["id"] = 0
refute validate
assert_includes error_messages,
%{Expected data to be larger than minimum 1 (exclusive: false), value was: 0.}
end
it "validates minimum for an integer with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/id").merge!(
"exclusiveMinimum" => true,
"minimum" => 1
)
data_sample["id"] = 1
refute validate
assert_includes error_messages,
%{Expected data to be larger than minimum 1 (exclusive: true), value was: 1.}
end
it "validates minimum for a number with exclusiveMaximum false" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMinimum" => false,
"minimum" => 0.0
)
data_sample["cost"] = -0.01
refute validate
assert_includes error_messages,
%{Expected data to be larger than minimum 0.0 (exclusive: false), value was: -0.01.}
end
it "validates minimum for a number with exclusiveMaximum true" do
pointer(schema_sample, "#/definitions/app/definitions/cost").merge!(
"exclusiveMinimum" => true,
"minimum" => 0.0
)
data_sample["cost"] = 0.0
refute validate
assert_includes error_messages,
%{Expected data to be larger than minimum 0.0 (exclusive: true), value was: 0.0.}
end
def data_sample
@data_sample ||= DataScaffold.data_sample
end
def error_messages
@validator.errors.map { |e| e.message }
end
def pointer(data, path)
JsonPointer.evaluate(data, path)
end
def schema_sample
@schema_sample ||= DataScaffold.schema_sample
end
def validate
@schema = JsonSchema.parse!(schema_sample).definitions["app"]
@validator = JsonSchema::Validator.new(@schema)
@validator.validate(data_sample)
end
end
|
Create test_front_matter_defaults.rb
require 'helper'
class TestFrontMatterDefaults < BuntoUnitTest
context "A site with full front matter defaults" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
"path" => "contacts",
"type" => "page"
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages.find { |page| page.relative_path == "/contacts/bar.html" }
@not_affected = @site.pages.find { |page| page.relative_path == "about.html" }
end
should "affect only the specified path and type" do
assert_equal @affected.data["key"], "val"
assert_equal @not_affected.data["key"], nil
end
end
context "A site with front matter type pages and an extension" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
"path" => "index.html"
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages.find { |page| page.relative_path == "index.html" }
@not_affected = @site.pages.find { |page| page.relative_path == "about.html" }
end
should "affect only the specified path" do
assert_equal @affected.data["key"], "val"
assert_equal @not_affected.data["key"], nil
end
end
context "A site with front matter defaults with no type" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
"path" => "win"
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.posts.docs.find { |page| page.relative_path =~ /win\// }
@not_affected = @site.pages.find { |page| page.relative_path == "about.html" }
end
should "affect only the specified path and all types" do
assert_equal @affected.data["key"], "val"
assert_equal @not_affected.data["key"], nil
end
end
context "A site with front matter defaults with no path and a deprecated type" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
"type" => "page"
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages
@not_affected = @site.posts.docs
end
should "affect only the specified type and all paths" do
assert_equal @affected.reject { |page| page.data["key"] == "val" }, []
assert_equal @not_affected.reject { |page| page.data["key"] == "val" }, @not_affected
end
end
context "A site with front matter defaults with no path" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
"type" => "pages"
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages
@not_affected = @site.posts.docs
end
should "affect only the specified type and all paths" do
assert_equal @affected.reject { |page| page.data["key"] == "val" }, []
assert_equal @not_affected.reject { |page| page.data["key"] == "val" }, @not_affected
end
end
context "A site with front matter defaults with no path or type" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"scope" => {
},
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages
@not_affected = @site.posts
end
should "affect all types and paths" do
assert_equal @affected.reject { |page| page.data["key"] == "val" }, []
assert_equal @not_affected.reject { |page| page.data["key"] == "val" }, []
end
end
context "A site with front matter defaults with no scope" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"values" => {
"key" => "val"
}
}]
}))
@site.process
@affected = @site.pages
@not_affected = @site.posts
end
should "affect all types and paths" do
assert_equal @affected.reject { |page| page.data["key"] == "val" }, []
assert_equal @not_affected.reject { |page| page.data["key"] == "val" }, []
end
end
context "A site with front matter defaults with quoted date" do
setup do
@site = Site.new(Bunto.configuration({
"source" => source_dir,
"destination" => dest_dir,
"defaults" => [{
"values" => {
"date" => "2015-01-01 00:00:01"
}
}]
}))
end
should "not raise error" do
@site.process
end
should "parse date" do
@site.process
date = Time.parse("2015-01-01 00:00:01")
assert @site.pages.find { |page| page.data["date"] == date }
assert @site.posts.find { |page| page.data["date"] == date }
end
end
end
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Kouhei Sutou <kou@clear-code.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
class HTTPSchemaTest < Test::Unit::TestCase
module Utils
include GroongaHTTPTestUtils
def setup
setup_server
end
def teardown
teardown_server
end
private
def create_bookmarks_table
response = get(command_path(:table_create,
:name => "bookmarks",
:flags => Table::PAT_KEY,
:key_type => "ShortText",
:value_type => "Object",
:default_tokenizer => ""))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
@bookmarks_table_id = object_registered
end
def create_bookmark_title_column
response = get(command_path(:column_create,
:table => "bookmarks",
:name => "title",
:flags => Column::SCALAR,
:type => "ShortText"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
@bookmarks_title_column_id = object_registered
end
def assert_table_list(expected)
response = get(command_path(:table_list))
expected = expected.collect do |values|
name, flags, domain = values
[nil, name, nil, flags, domain]
end
assert_response([
["id", "name", "path", "flags", "domain"],
*expected
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, flags, domain = values
[nil, name, nil, flags, domain]
end
end
end
end
include Utils
def test_table_list_empty
response = get(command_path(:table_list))
assert_response([["id", "name", "path", "flags", "domain"]],
response,
:content_type => "application/json")
end
def test_table_list_exist
create_bookmarks_table
response = get(command_path(:table_list))
normalized_path = "/path/to/table"
assert_response([
["id", "name", "path", "flags", "domain"],
[@bookmarks_table_id,
"bookmarks",
normalized_path,
Flag::PERSISTENT | Table::PAT_KEY | Key::VAR_SIZE,
Type::SHORT_TEXT],
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, flags, domain = values
path = normalized_path if path
[id, name, path, flags, domain]
end
end
end
def test_table_list_with_invalid_output_type
response = get(command_path(:table_list,
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_empty
create_bookmarks_table
response = get(command_path(:column_list,
:table => "bookmarks"))
assert_response([["id", "name", "path", "type", "flags", "domain"]],
response,
:content_type => "application/json")
end
def test_column_list_exist
create_bookmarks_table
create_bookmark_title_column
response = get(command_path(:column_list,
:table => "bookmarks"))
assert_response([
["id", "name", "path", "type", "flags", "domain"],
[@bookmarks_title_column_id,
"title",
nil,
"var",
Column::SCALAR | Flag::PERSISTENT | Key::VAR_SIZE,
@bookmarks_table_id]
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, type, flags, domain = values
[id, name, nil, type, flags, domain]
end
end
end
def test_column_list_nonexistent
response = get(command_path(:column_list,
:table => "nonexistent"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, :message]],
response,
:content_type => "application/json")
end
end
def test_column_list_without_table
response = get(command_path(:column_list))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_with_invalid_output_type
create_bookmarks_table
response = get(command_path(:column_list,
:table => "bookmarks",
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_with_invalid_output_type_without_table
response = get(command_path(:column_list,
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_table_create_without_name
response = get(command_path(:table_create))
assert_response([[Result::UNKNOWN_ERROR,
"should not create anonymous table"]],
response,
:content_type => "application/json")
end
def test_table_create_with_dot_name
response = get(command_path(:table_create, :name => "mori.daijiro"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_under_score_started_name
response = get(command_path(:table_create, :name => "_mori"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_under_score_name
response = get(command_path(:table_create, :name => "mori_daijiro"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
end
def test_table_create_with_colon_name
response = get(command_path(:table_create, :name => "daijiro:mori"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_duplicated_name
response = get(command_path(:table_create, :name => "table_create"))
assert_response([[Result::INVALID_ARGUMENT,
"already used name was assigned"]],
response,
:content_type => "application/json")
end
def test_table_create_with_duplicated_name
response = get(command_path(:table_create, :name => "table_create"))
assert_response([[Result::INVALID_ARGUMENT,
"already used name was assigned"]],
response,
:content_type => "application/json")
end
def test_full_text_search
create_bookmarks_table
create_bookmark_title_column
response = get(command_path(:table_create,
:name => "terms",
:flags => Table::PAT_KEY | Key::NORMALIZE,
:key_type => "ShortText",
:default_tokenizer => "TokenBigram"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
response = get(command_path(:column_create,
:table => "terms",
:name => "bookmarks-title",
:flags => Column::INDEX | Flag::WITH_POSITION,
:type => "bookmarks",
:source => "title"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
groonga_title = "groonga - an open-source fulltext search engine " +
"and column store."
senna_title = "Senna: An Embeddable Fulltext Search Engine"
load("bookmarks",
[{"_key" => "groonga", "title" => groonga_title},
{"_key" => "senna", "title" => senna_title}])
assert_select(["_key", "title"],
[["groonga", groonga_title]],
:table => "bookmarks",
:output_columns => "_key title",
:query => "title:@column")
end
class HashCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create, :name => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Key::NORMALIZE))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_normalized_string_key
response = get(command_path(:table_create,
:name => "users",
:flags => Key::NORMALIZE,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY |
Key::NORMALIZE | Key::VAR_SIZE,
Type::SHORT_TEXT]])
end
def test_view_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:key_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::HASH_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_long_size_key
response = get(command_path(:table_create,
:name => "users",
:key_type => "Text"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Key::SIS,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "SIS is invalid flag for hash"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_nonexistent_key_type
response = get(command_path(:table_create,
:name => "users",
:key_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_invalid_key_type
response = get(command_path(:table_create,
:name => "users",
:key_type => "table_create"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class PatriciaTrieCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::NORMALIZE))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_normalized_string_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::NORMALIZE,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY |
Key::NORMALIZE | Key::VAR_SIZE,
Type::SHORT_TEXT]])
end
def test_view_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:flags => Table::PAT_KEY,
:key_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::PAT_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_long_size_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "Text"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::SIS,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY |
Key::VAR_SIZE | Key::SIS,
Type::SHORT_TEXT]])
end
def test_nonexistent_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_invalid_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "table_create"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class ArrayCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY | Key::NORMALIZE))
assert_response([[Result::UNKNOWN_ERROR,
"key normalization isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "key isn't supported"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY | Key::SIS))
assert_response([[Result::UNKNOWN_ERROR, "SIS key isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY,
Type::INT32]])
end
def test_view_value
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:flags => Table::NO_KEY,
:value_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::NO_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "value type doesn't exist"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class ViewCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW | Key::NORMALIZE))
assert_response([[Result::UNKNOWN_ERROR,
"key normalization isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "key isn't supported"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW | Key::SIS))
assert_response([[Result::UNKNOWN_ERROR, "SIS key isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW,
:value_type => "Int32"))
assert_response([[Result::UNKNOWN_ERROR, "value isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class SymbolFlagsTest < Test::Unit::TestCase
include Utils
def test_single_symbol
response = get(command_path(:table_create,
:name => "users",
:flags => "KEY_NORMALIZE"))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_combined_symbols
response = get(command_path(:table_create,
:name => "users",
:flags => "TABLE_NO_KEY|KEY_NORMALIZE"))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_combined_symbols_with_whitespaces
response = get(command_path(:table_create,
:name => "users",
:flags => " TABLE_NO_KEY | KEY_NORMALIZE "))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY | Key::NORMALIZE,
Type::VOID]])
end
private
def assert_response(expected, response, options=nil)
actual = nil
options ||= {}
if options[:content_type]
assert_equal(options[:content_type], response.content_type)
end
case response.content_type
when "application/json"
begin
actual = JSON.parse(response.body)
rescue JSON::ParserError => error
if response.body == "true" || response.body == "false"
actual = response.body
else
raise
end
end
when "text/html"
actual = response.body
when "text/xml"
actual = response.body
else
flunk("unknown content-type: #{response.content_type}")
end
actual = yield(actual) if block_given?
assert_equal(expected, actual)
end
end
end
add test_invalid_symbol
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Kouhei Sutou <kou@clear-code.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License version 2.1 as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
class HTTPSchemaTest < Test::Unit::TestCase
module Utils
include GroongaHTTPTestUtils
def setup
setup_server
end
def teardown
teardown_server
end
private
def create_bookmarks_table
response = get(command_path(:table_create,
:name => "bookmarks",
:flags => Table::PAT_KEY,
:key_type => "ShortText",
:value_type => "Object",
:default_tokenizer => ""))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
@bookmarks_table_id = object_registered
end
def create_bookmark_title_column
response = get(command_path(:column_create,
:table => "bookmarks",
:name => "title",
:flags => Column::SCALAR,
:type => "ShortText"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
@bookmarks_title_column_id = object_registered
end
def assert_table_list(expected)
response = get(command_path(:table_list))
expected = expected.collect do |values|
name, flags, domain = values
[nil, name, nil, flags, domain]
end
assert_response([
["id", "name", "path", "flags", "domain"],
*expected
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, flags, domain = values
[nil, name, nil, flags, domain]
end
end
end
end
include Utils
def test_table_list_empty
response = get(command_path(:table_list))
assert_response([["id", "name", "path", "flags", "domain"]],
response,
:content_type => "application/json")
end
def test_table_list_exist
create_bookmarks_table
response = get(command_path(:table_list))
normalized_path = "/path/to/table"
assert_response([
["id", "name", "path", "flags", "domain"],
[@bookmarks_table_id,
"bookmarks",
normalized_path,
Flag::PERSISTENT | Table::PAT_KEY | Key::VAR_SIZE,
Type::SHORT_TEXT],
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, flags, domain = values
path = normalized_path if path
[id, name, path, flags, domain]
end
end
end
def test_table_list_with_invalid_output_type
response = get(command_path(:table_list,
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_empty
create_bookmarks_table
response = get(command_path(:column_list,
:table => "bookmarks"))
assert_response([["id", "name", "path", "type", "flags", "domain"]],
response,
:content_type => "application/json")
end
def test_column_list_exist
create_bookmarks_table
create_bookmark_title_column
response = get(command_path(:column_list,
:table => "bookmarks"))
assert_response([
["id", "name", "path", "type", "flags", "domain"],
[@bookmarks_title_column_id,
"title",
nil,
"var",
Column::SCALAR | Flag::PERSISTENT | Key::VAR_SIZE,
@bookmarks_table_id]
],
response,
:content_type => "application/json") do |actual|
actual[0, 1] + actual[1..-1].collect do |values|
id, name, path, type, flags, domain = values
[id, name, nil, type, flags, domain]
end
end
end
def test_column_list_nonexistent
response = get(command_path(:column_list,
:table => "nonexistent"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, :message]],
response,
:content_type => "application/json")
end
end
def test_column_list_without_table
response = get(command_path(:column_list))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_with_invalid_output_type
create_bookmarks_table
response = get(command_path(:column_list,
:table => "bookmarks",
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_column_list_with_invalid_output_type_without_table
response = get(command_path(:column_list,
:output_type => "unknown"))
pend("should implement error case") do
assert_response([[Result::UNKNOWN_ERROR, "should be implemented"]],
response,
:content_type => "application/json")
end
end
def test_table_create_without_name
response = get(command_path(:table_create))
assert_response([[Result::UNKNOWN_ERROR,
"should not create anonymous table"]],
response,
:content_type => "application/json")
end
def test_table_create_with_dot_name
response = get(command_path(:table_create, :name => "mori.daijiro"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_under_score_started_name
response = get(command_path(:table_create, :name => "_mori"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_under_score_name
response = get(command_path(:table_create, :name => "mori_daijiro"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
end
def test_table_create_with_colon_name
response = get(command_path(:table_create, :name => "daijiro:mori"))
assert_response([[Result::INVALID_ARGUMENT,
"name can't start with with '_' and contains '.' or ':'"]],
response,
:content_type => "application/json")
end
def test_table_create_with_duplicated_name
response = get(command_path(:table_create, :name => "table_create"))
assert_response([[Result::INVALID_ARGUMENT,
"already used name was assigned"]],
response,
:content_type => "application/json")
end
def test_table_create_with_duplicated_name
response = get(command_path(:table_create, :name => "table_create"))
assert_response([[Result::INVALID_ARGUMENT,
"already used name was assigned"]],
response,
:content_type => "application/json")
end
def test_full_text_search
create_bookmarks_table
create_bookmark_title_column
response = get(command_path(:table_create,
:name => "terms",
:flags => Table::PAT_KEY | Key::NORMALIZE,
:key_type => "ShortText",
:default_tokenizer => "TokenBigram"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
response = get(command_path(:column_create,
:table => "terms",
:name => "bookmarks-title",
:flags => Column::INDEX | Flag::WITH_POSITION,
:type => "bookmarks",
:source => "title"))
assert_response([[Result::SUCCESS]], response,
:content_type => "application/json")
groonga_title = "groonga - an open-source fulltext search engine " +
"and column store."
senna_title = "Senna: An Embeddable Fulltext Search Engine"
load("bookmarks",
[{"_key" => "groonga", "title" => groonga_title},
{"_key" => "senna", "title" => senna_title}])
assert_select(["_key", "title"],
[["groonga", groonga_title]],
:table => "bookmarks",
:output_columns => "_key title",
:query => "title:@column")
end
class HashCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create, :name => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Key::NORMALIZE))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_normalized_string_key
response = get(command_path(:table_create,
:name => "users",
:flags => Key::NORMALIZE,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY |
Key::NORMALIZE | Key::VAR_SIZE,
Type::SHORT_TEXT]])
end
def test_view_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:key_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::HASH_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_long_size_key
response = get(command_path(:table_create,
:name => "users",
:key_type => "Text"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Key::SIS,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "SIS is invalid flag for hash"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_nonexistent_key_type
response = get(command_path(:table_create,
:name => "users",
:key_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_invalid_key_type
response = get(command_path(:table_create,
:name => "users",
:key_type => "table_create"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class PatriciaTrieCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::NORMALIZE))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_normalized_string_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::NORMALIZE,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY |
Key::NORMALIZE | Key::VAR_SIZE,
Type::SHORT_TEXT]])
end
def test_view_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:flags => Table::PAT_KEY,
:key_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::PAT_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_long_size_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "Text"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY | Key::SIS,
:key_type => "ShortText"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY |
Key::VAR_SIZE | Key::SIS,
Type::SHORT_TEXT]])
end
def test_nonexistent_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_invalid_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:key_type => "table_create"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::PAT_KEY,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::PAT_KEY,
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "should implement error case"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class ArrayCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY | Key::NORMALIZE))
assert_response([[Result::UNKNOWN_ERROR,
"key normalization isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "key isn't supported"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY | Key::SIS))
assert_response([[Result::UNKNOWN_ERROR, "SIS key isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:value_type => "Int32"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY,
Type::INT32]])
end
def test_view_value
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
users_table_id = object_registered
response = get(command_path(:table_create,
:name => "sites",
:flags => Table::NO_KEY,
:value_type => "users"))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["sites",
Flag::PERSISTENT | Table::NO_KEY,
users_table_id],
["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_nonexistent_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::NO_KEY,
:value_type => "nonexistent"))
assert_response([[Result::UNKNOWN_ERROR, "value type doesn't exist"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class ViewCreateTest < Test::Unit::TestCase
include Utils
def test_simple
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW))
assert_response([[Result::SUCCESS]],
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::VIEW,
Type::VOID]])
end
def test_normalize_key
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW | Key::NORMALIZE))
assert_response([[Result::UNKNOWN_ERROR,
"key normalization isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_key_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW,
:key_type => "ShortText"))
assert_response([[Result::UNKNOWN_ERROR, "key isn't supported"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_sis
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW | Key::SIS))
assert_response([[Result::UNKNOWN_ERROR, "SIS key isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
def test_value_type
response = get(command_path(:table_create,
:name => "users",
:flags => Table::VIEW,
:value_type => "Int32"))
assert_response([[Result::UNKNOWN_ERROR, "value isn't available"]],
response,
:content_type => "application/json")
assert_table_list([])
end
end
class SymbolFlagsTest < Test::Unit::TestCase
include Utils
def test_single_symbol
response = get(command_path(:table_create,
:name => "users",
:flags => "KEY_NORMALIZE"))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::HASH_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_combined_symbols
response = get(command_path(:table_create,
:name => "users",
:flags => "TABLE_NO_KEY|KEY_NORMALIZE"))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_combined_symbols_with_whitespaces
response = get(command_path(:table_create,
:name => "users",
:flags => " TABLE_NO_KEY | KEY_NORMALIZE "))
assert_response("true",
response,
:content_type => "application/json")
assert_table_list([["users",
Flag::PERSISTENT | Table::NO_KEY | Key::NORMALIZE,
Type::VOID]])
end
def test_invalid_symbol
response = get(command_path(:table_create,
:name => "users",
:flags => "INVALID_SYMBOL"))
assert_response("false",
response,
:content_type => "application/json")
assert_table_list([])
end
private
def assert_response(expected, response, options=nil)
actual = nil
options ||= {}
if options[:content_type]
assert_equal(options[:content_type], response.content_type)
end
case response.content_type
when "application/json"
begin
actual = JSON.parse(response.body)
rescue JSON::ParserError => error
if response.body == "true" || response.body == "false"
actual = response.body
else
raise
end
end
when "text/html"
actual = response.body
when "text/xml"
actual = response.body
else
flunk("unknown content-type: #{response.content_type}")
end
actual = yield(actual) if block_given?
assert_equal(expected, actual)
end
end
end
|
require File.join(File.dirname(__FILE__), '..', '..', 'test_helper')
class CommandsDownTest < Test::Unit::TestCase
setup do
@klass = Vagrant::Commands::Down
@env = mock_environment
@instance = @klass.new(@env)
end
context "executing" do
should "just error and exit" do
@instance.expects(:error_and_exit).with(:command_deprecation_down)
@instance.execute
end
end
end
Remove `vagrant down` test files
|
require "rubygems"
require "optimist"
opts = Optimist::options do
banner <<-EOS
Import conservation statuses from CSV made using the template at
https://docs.google.com/spreadsheets/d/1OxlVBw4xifdFugMq42gbMS8IDFiliYcC5imc0sUSWn8
Usage:
rails runner tools/import_conservation_statuses.rb PATH_TO_CSV
where [options] are:
EOS
opt :debug, "Print debug statements", type: :boolean, short: "-d"
opt :dry, "Dry run, don't actually change anything", type: :boolean
opt :place_id, "Assign statuses to this place", type: :integer, short: "-p"
opt :user_id, "User ID of user who is adding these statuses", type: :integer, short: "-u"
end
HEADERS = %w(
taxon_name
status
authority
iucn
description
place_id
url
geoprivacy
user
)
REQUIRED = %w(
taxon_name
status
authority
iucn
)
csv_path = ARGV[0]
Optimist::die "You must a CSV file to import" if csv_path.blank?
Optimist::die "#{csv_path} does not exist" unless File.exists?( csv_path )
if !opts.place_id.blank?
if @place = Place.find( opts.place_id )
puts "Found place: #{@place}"
else
Optimist::die "Couldn't find place: #{OPTS.place_id}"
end
end
if !opts.user_id.blank?
@user = User.find_by_id( opts.user_id )
@user ||= User.find_by_login( opts.user_id )
if @user
puts "Found user: #{@user}"
else
Optimist::die "Couldn't find user: #{OPTS.user_id}"
end
end
start = Time.now
created = []
skipped = []
CSV.foreach( csv_path, headers: HEADERS ) do |row|
identifier = [row["taxon_name"], row["status"], row["place_id"]].join( " | " )
puts identifier
blank_column = catch :required_missing do
REQUIRED.each {|h| throw :required_missing, h if row[h].blank? }
nil
end
if blank_column
puts "#{blank_column} cannot be blank, skipping..."
next
end
unless taxon = Taxon.single_taxon_for_name( row["taxon_name"] )
puts "\tCouldn't find taxon for '#{row["taxon_name"]}', skipping..."
skipped << identifier
next
end
place = @place
if place.blank? && !row["place_id"].blank?
place = Place.find( row["place_id"] ) rescue nil
if place.blank?
puts "\tPlace #{row["place_id"]} specified but not found, skipping..."
skipped << identifier
next
end
end
existing = taxon.conservation_statuses.where(
place_id: place,
status: row["status"],
authority: row["authority"]
).first
if existing
puts "\tStatus for this taxon in this place for this authority already exists, skipping..."
skipped << identifier
next
end
iucn = Taxon::IUCN_STATUS_VALUES[row["iucn"].to_s.strip.parameterize.underscore]
iucn ||= Taxon::IUCN_CODE_VALUES[row["iucn"].to_s.strip.upcase]
unless iucn
puts "\t#{row["iucn"]} is not a valid IUCN status, skipping..."
skipped << identifier
next
end
user = @user
if user.blank? && !row["user"].blank?
user = User.find_by_login( row["user"] )
user ||= User.find_by_id( row["user"] )
if user.blank?
puts "\tUser #{row["user"]} specified but no matching user found, skipping..."
skipped << identifier
next
end
end
cs = ConservationStatus.new(
taxon: taxon,
place: place,
status: row["status"],
iucn: iucn,
authority: row["authority"],
description: row["description"],
url: row["url"],
geoprivacy: row["geoprivacy"],
user: user
)
if cs.valid?
cs.save unless opts.dry
puts "\tCreated #{cs}"
created << identifier
else
puts "\tConservation status was not valid: #{cs.errors.full_messages.to_sentence}"
skipped << identifier
end
end
puts
puts "#{created.size} created, #{skipped.size} skipped in #{Time.now - start}s"
puts
puts "Created:"
created.each do |c|
puts c
end
Support taxon_id col in cons status script; support regionally extinct IUCN status
require "rubygems"
require "optimist"
opts = Optimist::options do
banner <<-EOS
Import conservation statuses from CSV made using the template at
https://docs.google.com/spreadsheets/d/1OxlVBw4xifdFugMq42gbMS8IDFiliYcC5imc0sUSWn8
Usage:
rails runner tools/import_conservation_statuses.rb PATH_TO_CSV
where [options] are:
EOS
opt :debug, "Print debug statements", type: :boolean, short: "-d"
opt :dry, "Dry run, don't actually change anything", type: :boolean
opt :place_id, "Assign statuses to this place", type: :integer, short: "-p"
opt :user_id, "User ID of user who is adding these statuses", type: :integer, short: "-u"
end
HEADERS = %w(
taxon_name
status
authority
iucn
description
place_id
url
geoprivacy
user
taxon_id
)
REQUIRED = %w(
taxon_name
status
authority
iucn
)
csv_path = ARGV[0]
Optimist::die "You must a CSV file to import" if csv_path.blank?
Optimist::die "#{csv_path} does not exist" unless File.exists?( csv_path )
if !opts.place_id.blank?
if @place = Place.find( opts.place_id )
puts "Found place: #{@place}"
else
Optimist::die "Couldn't find place: #{OPTS.place_id}"
end
end
if !opts.user_id.blank?
@user = User.find_by_id( opts.user_id )
@user ||= User.find_by_login( opts.user_id )
if @user
puts "Found user: #{@user}"
else
Optimist::die "Couldn't find user: #{OPTS.user_id}"
end
end
start = Time.now
created = []
skipped = []
CSV.foreach( csv_path, headers: HEADERS ) do |row|
identifier = [row["taxon_name"], row["status"], row["place_id"]].join( " | " )
puts identifier
blank_column = catch :required_missing do
REQUIRED.each {|h| throw :required_missing, h if row[h].blank? }
nil
end
if blank_column
puts "#{blank_column} cannot be blank, skipping..."
next
end
taxon = Taxon.find_by_id( row["taxon_id"] ) unless row["taxon_id"].blank?
taxon ||= Taxon.single_taxon_for_name( row["taxon_name"] )
unless taxon
puts "\tCouldn't find taxon for '#{row["taxon_name"]}', skipping..."
skipped << identifier
next
end
place = @place
if place.blank? && !row["place_id"].blank?
place = Place.find( row["place_id"] ) rescue nil
if place.blank?
puts "\tPlace #{row["place_id"]} specified but not found, skipping..."
skipped << identifier
next
end
end
existing = taxon.conservation_statuses.where(
place_id: place,
status: row["status"],
authority: row["authority"]
).first
if existing
puts "\tStatus for this taxon in this place for this authority already exists, skipping..."
skipped << identifier
next
end
iucn = if place && row["iucn"].to_s.strip.parameterize.underscore == "regionally_extinct"
Taxon::IUCN_STATUS_VALUES["extinct"]
else
Taxon::IUCN_STATUS_VALUES[row["iucn"].to_s.strip.parameterize.underscore]
end
iucn ||= Taxon::IUCN_CODE_VALUES[row["iucn"].to_s.strip.upcase]
unless iucn
puts "\t#{row["iucn"]} is not a valid IUCN status, skipping..."
skipped << identifier
next
end
user = @user
if user.blank? && !row["user"].blank?
user = User.find_by_login( row["user"] )
user ||= User.find_by_id( row["user"] )
if user.blank?
puts "\tUser #{row["user"]} specified but no matching user found, skipping..."
skipped << identifier
next
end
end
cs = ConservationStatus.new(
taxon: taxon,
place: place,
status: row["status"],
iucn: iucn,
authority: row["authority"],
description: row["description"],
url: row["url"],
geoprivacy: row["geoprivacy"],
user: user
)
if cs.valid?
cs.save unless opts.dry
puts "\tCreated #{cs}"
created << identifier
else
puts "\tConservation status was not valid: #{cs.errors.full_messages.to_sentence}"
skipped << identifier
end
end
puts
puts "#{created.size} created, #{skipped.size} skipped in #{Time.now - start}s"
puts
puts "Created:"
created.each do |c|
puts c
end
|
require 'rest-client'
require 'json'
class GuildsController < ApplicationController
def index
@guilds = Guild.all
render :index
end
def new
@guild = Guild.new
render :new
end
def create
@guild = Guild.new(guild_params)
p @guild.name
url = "https://api.guildwars2.com/v1/guild_details.json?guild_name=#{@guild.name}"
response = RestClient.get(url)
api_response = JSON.parse(response)
@guild.guild_tag = api_response["tag"]
if @guild.save
redirect_to root_path
else
@guild.errors.full_messages
render :new
end
end
def destroy
@guild = Guild.find(params[:id])
@guild.destroy
redirect_to root_path
end
def edit
@guild = Guild.find(params[:id])
render :edit
end
def update
@guild = Guild.find(params[:id])
if @guild.save
render :show
else
@guild.errors.full_messages
render :edit
end
end
def show
@guild = Guild.find(params[:id])
render :show
end
private
def guild_params
params.require(:guild).permit(:name, :server, :wvw_playstyle, :average_member_count, :guild_tag)
end
end
adds a rescue for when the guild name isn't found
require 'rest-client'
require 'json'
class GuildsController < ApplicationController
def index
@guilds = Guild.all
render :index
end
def new
@guild = Guild.new
render :new
end
def create
@guild = Guild.new(guild_params)
url = "https://api.guildwars2.com/v1/guild_details.json?guild_name=#{@guild.name}"
begin
response = RestClient.get(url)
rescue RestClient::BadRequest
flash[:notice] = "Guild Name Can't Be Found"
response = nil
end
if response != nil
api_response = JSON.parse(response)
@guild.guild_tag = api_response["tag"]
@guild.save
redirect_to root_path
else
@guild.errors.full_messages
render :new
end
end
def destroy
@guild = Guild.find(params[:id])
@guild.destroy
redirect_to root_path
end
def edit
@guild = Guild.find(params[:id])
render :edit
end
def update
@guild = Guild.find(params[:id])
if @guild.save
render :show
else
@guild.errors.full_messages
render :edit
end
end
def show
@guild = Guild.find(params[:id])
render :show
end
private
def guild_params
params.require(:guild).permit(:name, :server, :wvw_playstyle, :average_member_count, :guild_tag)
end
end
|
#!/usr/bin/env ruby
# OpsWorks provisioner for Vagrant
# --------------------------------
# Copyright (c) 2015 PixelCog Inc.
# Licensed under MIT (see LICENSE)
require 'date'
require 'json'
require 'tmpdir'
require 'fileutils'
module OpsWorks
DNA_BASE = {
"ssh_users" => {
"1000" => {
"name" => "vagrant",
"public_key" => nil,
"sudoer" => true
}
},
"dependencies" => {
"gem_binary" => "/usr/local/bin/gem",
"gems" => {},
"debs" => {}
},
"ebs" => {
"devices" => {},
"raids" => {}
},
"opsworks" => {
"activity" => "setup_and_deploy",
"valid_client_activities" => ["setup_and_deploy"],
"agent_version" => 0,
"ruby_version" => "2.0.0",
"ruby_stack" => "ruby",
"rails_stack" => {
"name" => nil
},
"stack" => {
"name" => "Vagrant Stack",
"elb-load-balancers" => [],
"rds_instances" => []
},
"layers" => {},
"instance" => {
"ip" => "127.0.0.1",
"private_ip" => "127.0.0.1",
"layers" => []
}
},
"deploy" => {},
"opsworks_rubygems" => {
"version" => "2.2.2"
},
"opsworks_bundler" => {
"version" => "1.5.3",
"manage_package" => nil
},
"opsworks_custom_cookbooks" => {
"enabled" => false,
"scm" => {
"type" => "git",
"repository" => nil,
"user" => nil,
"password" => nil,
"revision" => nil,
"ssh_key" => nil
},
"manage_berkshelf" => nil,
"recipes" => []
},
"chef_environment" => "_default",
"recipes" => [
"opsworks_custom_cookbooks::load",
"opsworks_custom_cookbooks::execute"
]
}
DNA_DEPLOY_BASE = {
"deploy_to" => nil,
"application" => nil,
"deploying_user" => nil,
"domains" => [],
"application_type" => nil,
"mounted_at" => nil,
"rails_env" => nil,
"ssl_support" => false,
"ssl_certificate" => nil,
"ssl_certificate_key" => nil,
"ssl_certificate_ca" => nil,
"document_root" => nil,
"restart_command" => "echo 'restarting app'",
"sleep_before_restart" => 0,
"symlink_before_migrate" => {},
"symlinks" => {},
"database" => {},
"migrate" => false,
"auto_bundle_on_deploy" => true,
"scm" => {
"scm_type" => "git",
"repository" => nil,
"revision" => nil,
"ssh_key" => nil,
"user" => nil,
"password" => nil
}
}
def self.provision(*args)
if agent_revision < Date.today.prev_month(4)
warn "Warning: OpsWorks agent version #{agent_version} is over four months old, consider updating..."
end
log "Checking dependencies..."
check_dependencies
log "Reading input..."
dna = compile_json expand_paths(args)
log "Parsing deployments..."
dna['deploy'].each do |name, app|
# if repo points to a local path, trick opsworks into receiving it as a git repo
if app['scm']['repository'] && app['scm']['repository'] !~ /^(?:[a-z]+:)?\/\//i
if !Dir.exist?(app['scm']['repository'])
raise "Local app '#{name}' could not be found at '#{app['scm']['repository']}'"
end
app['scm']['repository'] = prepare_deployment(app['scm']['repository'])
end
end
log "Parsing custom cookbooks..."
if dna['opsworks_custom_cookbooks']['enabled']
cookbooks = dna['opsworks_custom_cookbooks']
# if repo points to a local path, trick opsworks into receiving it as a git repo
if cookbooks['scm']['repository'] && cookbooks['scm']['repository'] !~ /^(?:[a-z]+:)?\/\//i
if !Dir.exist?(cookbooks['scm']['repository'])
raise "Local custom cookbooks could not be found at '#{cookbooks['scm']['repository']}'"
end
cookbooks['scm']['repository'] = prepare_deployment(cookbooks['scm']['repository'])
# autodetect berkshelf support
if cookbooks['manage_berkshelf'].nil?
berksfile = cookbooks['scm']['repository'].sub(/[\/\\]+$/,'') + '/Berksfile'
cookbooks['manage_berkshelf'] = File.exist?(berksfile)
end
end
# remove the local cache to force opsworks to update custom cookbooks
log "Purging local cookbooks cache from '/opt/aws/opsworks/current/site-cookbooks'..."
FileUtils.rm_rf('/opt/aws/opsworks/current/site-cookbooks/')
end
if dna['opsworks']['instance']['hostname']
log "Setting instance hostname..."
set_hostname dna['opsworks']['instance']['hostname']
end
# run some base recipes if none explicitly provided
if dna['opsworks_custom_cookbooks']['recipes'].empty?
dna['opsworks_custom_cookbooks']['recipes']= %w(
recipe[opsworks_initial_setup]
recipe[ssh_host_keys]
recipe[ssh_users]
recipe[dependencies]
recipe[ebs]
recipe[agent_version]
recipe[opsworks_stack_state_sync]
recipe[opsworks_cleanup]
)
end
# ensure we don't set the agent version to anything lower than the current version
dna['opsworks']['agent_version'] = [agent_version, dna['opsworks']['agent_version']].max
log "Generating dna.json..."
dna_file = save_json_tempfile dna, 'dna.json'
log "Running opsworks agent..."
# AWS currently does not set UTF-8 as default encoding
system({"LANG" => "POSIX"}, "opsworks-agent-cli run_command -f #{dna_file}")
rescue StandardError => e
warn "Error: #{e}"
exit false
end
def self.save_json_tempfile(data, name)
tmp_dir = Dir.mktmpdir('vagrant-opsworks')
File.chmod(0755, tmp_dir)
tmp_file = "#{tmp_dir}/#{name}"
File.open(tmp_file, 'w') { |f| f.write JSON.pretty_generate(data) }
File.chmod(0755, tmp_file)
tmp_file
end
def self.log(msg)
puts msg
end
def self.check_dependencies
`apt-get -yq install git 2>&1` if `which git`.empty?
end
def self.set_hostname(hostname)
if !File.readlines('/etc/hosts').grep(/(?=[^\.\w-]|$)#{hostname}(?=[^\.\w-]|$)/).any?
File.open('/etc/hosts', 'a') do |f|
f.puts "\n127.0.0.1\t#{hostname}.localdomain #{hostname}\n"
end
end
File.write('/etc/hostname', hostname)
system('hostname', hostname)
end
def self.expand_paths(args)
files = []
args.each do |file|
if File.exist?(file)
files << file
elsif file.include? '*'
files += Dir.glob(file)
else
raise "The file '#{file}' does not appear to exist."
end
end
files
end
def self.compile_json(files)
# combine all json files into one hash, starting with our base hash to
# provide some sensible defaults
dna = files.reduce(DNA_BASE) do |dna, file|
log "Processing '#{file}'..."
json = File.read(file).strip || '{}'
json = JSON.parse(json)
deep_merge(dna, json)
end
# ensure each layer has some required fields including instances with both
# private and public ip addresses
dna['opsworks']['layers'].each do |name, layer|
next unless Hash === layer
layer['name'] ||= name
layer['elb-load-balancers'] ||= []
layer['instances'] ||= {}
next unless Hash === layer['instances']
layer['instances'].each do |name, instance|
next unless Hash === instance
instance['private_ip'] ||= instance['ip']
instance['ip'] ||= instance['private_ip']
end
end
# merge some default values into each app definition
dna['deploy'].each do |name, app|
app.replace deep_merge(DNA_DEPLOY_BASE, app)
app['application'] ||= name
app['domains'] << name if app['domains'].empty?
end
dna
rescue JSON::ParserError => e
raise "The file '#{file}' does not appear to be valid JSON. (error: #{e})"
end
def self.prepare_deployment(path)
tmp_dir = Dir.mktmpdir('vagrant-opsworks')
File.chmod(0755, tmp_dir)
FileUtils.cp_r("#{path}/.", tmp_dir)
Dir.chdir(tmp_dir) do
`find . -name '.git*' -exec rm -rf {} \\; 2>&1; git init; git add .; git -c user.name='Vagrant' -c user.email=none commit -m 'Create temporary repository for deployment.'`
end
tmp_dir
end
def self.deep_merge(a, b)
a.merge(b) { |_, a, b| Hash === a && Hash === b ? deep_merge(a, b) : b }
end
def self.agent_version
File.read('/opt/aws/opsworks/current/REVISION')[/\d\d\d\d\-\d\d-\d\d-\d\d:\d\d:\d\d (\d+)/, 1].to_i
end
def self.agent_revision
date_string = File.read('/opt/aws/opsworks/current/REVISION')[/(\d\d\d\d\-\d\d-\d\d-\d\d:\d\d:\d\d)/, 1]
raise 'Unable to parse agent revision' unless date_string
DateTime.strptime date_string, '%Y-%m-%d-%H:%M:%S'
end
end
# automatically run provisioner
if __FILE__ == $0
STDOUT.sync = true
OpsWorks.provision *ARGV
end
update opsworks provisioner to latest (opsworks-vm rev 2b458da)
#!/usr/bin/env ruby
# OpsWorks provisioner for Vagrant
# --------------------------------
# Copyright (c) 2015 PixelCog Inc.
# Licensed under MIT (see LICENSE)
require 'date'
require 'json'
require 'tmpdir'
require 'fileutils'
module OpsWorks
class OpsWorksError < StandardError; end
DNA_BASE = {
"ssh_users" => {
"1000" => {
"name" => "vagrant",
"public_key" => nil,
"sudoer" => true
}
},
"dependencies" => {
"gem_binary" => "/usr/local/bin/gem",
"gems" => {},
"debs" => {}
},
"ec2" => {
"instance_type" => "vm.vagrant"
},
"opsworks_initial_setup" => {
"swapfile_instancetypes" => ["vm.vagrant"]
},
"ebs" => {
"devices" => {},
"raids" => {}
},
"opsworks" => {
"activity" => "setup",
"valid_client_activities" => ["setup"],
"agent_version" => 0,
"ruby_version" => "2.0.0",
"ruby_stack" => "ruby",
"rails_stack" => {
"name" => nil
},
"stack" => {
"name" => "Vagrant Stack",
"elb-load-balancers" => [],
"rds_instances" => []
},
"layers" => {},
"instance" => {
"ip" => "127.0.0.1",
"private_ip" => "127.0.0.1",
"layers" => []
}
},
"deploy" => {},
"opsworks_rubygems" => {
"version" => "2.2.2"
},
"opsworks_bundler" => {
"version" => "1.5.3",
"manage_package" => nil
},
"opsworks_custom_cookbooks" => {
"enabled" => false,
"scm" => {
"type" => "git",
"repository" => nil,
"user" => nil,
"password" => nil,
"revision" => nil,
"ssh_key" => nil
},
"manage_berkshelf" => nil,
"recipes" => []
},
"chef_environment" => "_default",
"recipes" => [
"opsworks_custom_cookbooks::load",
"opsworks_custom_cookbooks::execute"
]
}
DNA_DEPLOY_BASE = {
"deploy_to" => nil,
"application" => nil,
"deploying_user" => nil,
"domains" => [],
"application_type" => nil,
"mounted_at" => nil,
"rails_env" => nil,
"ssl_support" => false,
"ssl_certificate" => nil,
"ssl_certificate_key" => nil,
"ssl_certificate_ca" => nil,
"document_root" => nil,
"restart_command" => "echo 'restarting app'",
"sleep_before_restart" => 0,
"symlink_before_migrate" => {},
"symlinks" => {},
"database" => {},
"migrate" => false,
"auto_bundle_on_deploy" => true,
"scm" => {
"scm_type" => "git",
"repository" => nil,
"revision" => nil,
"ssh_key" => nil,
"user" => nil,
"password" => nil
}
}
def self.provision(*args)
if agent_revision < Date.today.prev_month(4)
warn "Warning: OpsWorks agent version #{agent_version} is over four months old, consider updating..."
end
log "Checking dependencies..."
check_dependencies
log "Reading input..."
dna = compile_json expand_paths(args)
log "Parsing deployments..."
dna['deploy'].each do |name, app|
# if repo points to a local path, trick opsworks into receiving it as a git repo
if app['scm']['repository'] && app['scm']['repository'] !~ /^(?:[a-z]+:)?\/\//i
if !Dir.exist?(app['scm']['repository'])
raise OpsWorksError, "Local app '#{name}' could not be found at '#{app['scm']['repository']}'"
end
app['scm']['repository'] = prepare_deployment(app['scm']['repository'])
end
end
log "Parsing custom cookbooks..."
if dna['opsworks_custom_cookbooks']['enabled']
cookbooks = dna['opsworks_custom_cookbooks']
# if repo points to a local path, trick opsworks into receiving it as a git repo
if cookbooks['scm']['repository'] && cookbooks['scm']['repository'] !~ /^(?:[a-z]+:)?\/\//i
if !Dir.exist?(cookbooks['scm']['repository'])
raise OpsWorksError, "Local custom cookbooks could not be found at '#{cookbooks['scm']['repository']}'"
end
cookbooks['scm']['repository'] = prepare_deployment(cookbooks['scm']['repository'])
# autodetect berkshelf support
if cookbooks['manage_berkshelf'].nil?
berksfile = cookbooks['scm']['repository'].sub(/[\/\\]+$/,'') + '/Berksfile'
cookbooks['manage_berkshelf'] = File.exist?(berksfile)
end
end
# remove the local cache to force opsworks to update custom cookbooks
log "Purging local cookbooks cache from '/opt/aws/opsworks/current/site-cookbooks'..."
FileUtils.rm_rf('/opt/aws/opsworks/current/site-cookbooks/')
end
if dna['opsworks']['instance']['hostname']
log "Setting instance hostname..."
set_hostname dna['opsworks']['instance']['hostname']
end
# run some base recipes if none explicitly provided
if dna['opsworks_custom_cookbooks']['recipes'].empty?
dna['opsworks_custom_cookbooks']['recipes']= %w(
recipe[opsworks_initial_setup]
recipe[ssh_host_keys]
recipe[ssh_users]
recipe[dependencies]
recipe[ebs]
recipe[agent_version]
recipe[opsworks_stack_state_sync]
recipe[opsworks_cleanup]
)
end
# ensure we don't set the agent version to anything lower than the current version
dna['opsworks']['agent_version'] = [agent_version, dna['opsworks']['agent_version']].max
log "Generating dna.json..."
dna_file = save_json_tempfile dna, 'dna.json'
log "Running opsworks agent..."
# AWS currently does not set UTF-8 as default encoding
system({"LANG" => "POSIX"}, "opsworks-agent-cli run_command -f #{dna_file}")
rescue OpsWorksError => e
warn "Error: #{e}"
exit false
end
def self.save_json_tempfile(data, name)
tmp_dir = Dir.mktmpdir('vagrant-opsworks')
File.chmod(0755, tmp_dir)
tmp_file = "#{tmp_dir}/#{name}"
File.open(tmp_file, 'w') { |f| f.write JSON.pretty_generate(data) }
File.chmod(0755, tmp_file)
tmp_file
end
def self.log(msg)
puts msg
end
def self.check_dependencies
`apt-get -yq install git 2>&1` if `which git`.empty?
end
def self.set_hostname(hostname)
if !File.readlines('/etc/hosts').grep(/(?=[^\.\w-]|$)#{hostname}(?=[^\.\w-]|$)/).any?
File.open('/etc/hosts', 'a') do |f|
f.puts "\n127.0.0.1\t#{hostname}.localdomain #{hostname}\n"
end
end
File.write('/etc/hostname', hostname)
system('hostname', hostname)
end
def self.expand_paths(args)
files = []
args.each do |file|
if File.exist?(file)
files << file
elsif file.include? '*'
files += Dir.glob(file)
else
raise OpsWorksError, "The file '#{file}' does not appear to exist."
end
end
files
end
def self.compile_json(files)
# combine all json files into one hash, starting with our base hash to
# provide some sensible defaults
dna = files.reduce(DNA_BASE) do |dna, file|
log "Processing '#{file}'..."
begin
json = File.read(file).strip || '{}'
json = JSON.parse(json)
rescue JSON::ParserError => e
raise OpsWorksError, "The file '#{file}' does not appear to be valid JSON. (error: #{e})"
end
deep_merge(dna, json)
end
# ensure each layer has some required fields including instances with both
# private and public ip addresses
dna['opsworks']['layers'].each do |name, layer|
next unless Hash === layer
layer['name'] ||= name
layer['elb-load-balancers'] ||= []
layer['instances'] ||= {}
next unless Hash === layer['instances']
layer['instances'].each do |name, instance|
next unless Hash === instance
instance['private_ip'] ||= instance['ip']
instance['ip'] ||= instance['private_ip']
end
end
# merge some default values into each app definition
dna['deploy'].each do |name, app|
app.replace deep_merge(DNA_DEPLOY_BASE, app)
app['application'] ||= name
app['domains'] << name if app['domains'].empty?
end
dna
end
def self.prepare_deployment(path)
tmp_dir = Dir.mktmpdir('vagrant-opsworks')
File.chmod(0755, tmp_dir)
FileUtils.cp_r("#{path}/.", tmp_dir)
Dir.chdir(tmp_dir) do
`find . -name '.git*' -exec rm -rf {} \\; 2>&1; git init; git add .; git -c user.name='Vagrant' -c user.email=none commit -m 'Create temporary repository for deployment.'`
end
tmp_dir
end
def self.deep_merge(a, b)
a.merge(b) { |_, a, b| Hash === a && Hash === b ? deep_merge(a, b) : b }
end
def self.agent_version
File.read('/opt/aws/opsworks/current/REVISION')[/\d\d\d\d\-\d\d-\d\d-\d\d:\d\d:\d\d (\d+)/, 1].to_i
end
def self.agent_revision
date_string = File.read('/opt/aws/opsworks/current/REVISION')[/(\d\d\d\d\-\d\d-\d\d-\d\d:\d\d:\d\d)/, 1]
raise OpsWorksError, 'Unable to parse agent revision' unless date_string
DateTime.strptime date_string, '%Y-%m-%d-%H:%M:%S'
end
end
# automatically run provisioner
if __FILE__ == $0
STDOUT.sync = true
OpsWorks.provision *ARGV
end
|
#!/usr/bin/env ruby19 -wKU
require "yaml"
require "pp"
require 'escape'
LASTFM_SUBMIT = '/usr/local/bin/lastfmsubmit'
METADATA_FILE = "/Users/bilalh/Movies/.Movie/OpeningP/_metadata.yaml"
output = $stderr
scrobbler_echo = ENV['SCROBBLER_ECHO'] || true
use_taginfo = ENV['USE_TAGINFO'] || true
scrobbler_echo = false if !scrobbler_echo || scrobbler_echo == 'false'
use_taginfo = false if !use_taginfo || use_taginfo == 'false'
use_increment = ENV['USE_INCREMENT'] || false
use_increment = false if !use_increment || use_increment == 'false'
display = ENV['DISPLAY_TRACK_INFO'] || true
display = false if !display || display == 'false'
playcount_file = ENV['PLAYCOUNT_FILE'] || File.expand_path("~/Music/playcount.yaml")
`echo "get_property path" >> ~/.mplayer/pipe`
sleep 0.1
filepath_with_name = `tail -n1 ~/.mplayer/output`
filepath = filepath_with_name[/.*?=(.*)/,1]
m = {}
if use_taginfo then
arr = `taginfo --short #{Escape.shell_command [filepath] } 2>/dev/null`.split(/\n/)
(output.puts "# No Tag Info for #{filepath}";exit ) if arr.length == 0
m = {title:arr[0], album:arr[1], artist:arr[2], length:arr[3]}
output.puts('# ' + `taginfo --info #{Escape.shell_command [filepath]} 2>/dev/null`) if display
if use_increment then
counts =
if File.exists? playcount_file then
YAML::load File.open playcount_file
else
{}
end
i = counts[filepath] || 0
i += 1
counts[filepath] = i
File.open(playcount_file, 'w') do |f|
f.write counts.to_yaml
end
end
else
filename = File.basename filepath
metadata = YAML::load( File.open(METADATA_FILE)) || (output.puts "no metadata file"; exit)
m = metadata[File.basename filename] || (puts "# no metadata for '#{filename}'"; exit)
m[:length] = `mediaInfo --Inform='Video;%Duration/String3%' #{Escape.shell_command [filepath]} | sed "s/\.[0-9][0-9]*$//"`.strip
(m[:length] = "1:30") unless m[:length].length > 0
output.puts "# #{m[:artist]} - #{m[:title]} - #{m[:album]}" if display
end
output.puts %{# #{LASTFM_SUBMIT} -e utf8 -a "#{m[:artist]}" -b "#{m[:album]}" --title "#{m[:title]}" -l "#{m[:length]}"} if scrobbler_echo
# scrobbles the track
artist, album, title = Escape.shell_single_word(m[:artist]), Escape.shell_single_word(m[:album]), (Escape.shell_single_word m[:title])
# puts "# #{artist}, #{album}, #{title}"
output.puts `kill $(ps aux | grep lastfmsubmitd | grep -v grep | awk '{print $2}') &>/dev/null;\
#{LASTFM_SUBMIT} -e utf8 -a #{artist} -b #{album} --title #{title} -l "#{m[:length]}"; lastfmsubmitd&`
bug fix
#!/usr/bin/env ruby19 -wKU
require "yaml"
require "pp"
require 'escape'
LASTFM_SUBMIT = '/usr/local/bin/lastfmsubmit'
METADATA_FILE = "/Users/bilalh/Movies/.Movie/OpeningP/_metadata.yaml"
output = $stderr
scrobbler_echo = ENV['SCROBBLER_ECHO'] || true
use_taginfo = ENV['USE_TAGINFO'] || true
scrobbler_echo = false if !scrobbler_echo || scrobbler_echo == 'false'
use_taginfo = false if !use_taginfo || use_taginfo == 'false'
use_increment = ENV['USE_INCREMENT'] || false
use_increment = false if !use_increment || use_increment == 'false'
display = ENV['DISPLAY_TRACK_INFO'] || true
display = false if !display || display == 'false'
playcount_file = ENV['PLAYCOUNT_FILE'] || File.expand_path("~/Music/playcount.yaml")
`echo "get_property path" >> ~/.mplayer/pipe`
sleep 0.1
filepath_with_name = `tail -n1 ~/.mplayer/output`
filepath = filepath_with_name[/.*?=(.*)/,1]
m = {}
if use_taginfo then
arr = `taginfo --short #{Escape.shell_command [filepath] } 2>/dev/null`.split(/\n/)
(output.puts "# No Tag Info for #{filepath}";exit ) if arr.length == 0
m = {title:arr[0], album:arr[1], artist:arr[2], length:arr[3]}
output.puts('# ' + `taginfo --info #{Escape.shell_command [filepath]} 2>/dev/null`) if display
else
filename = File.basename filepath
metadata = YAML::load( File.open(METADATA_FILE)) || (output.puts "no metadata file"; exit)
m = metadata[File.basename filename] || (puts "# no metadata for '#{filename}'"; exit)
m[:length] = `mediaInfo --Inform='Video;%Duration/String3%' #{Escape.shell_command [filepath]} | sed "s/\.[0-9][0-9]*$//"`.strip
(m[:length] = "1:30") unless m[:length].length > 0
output.puts "# #{m[:artist]} - #{m[:title]} - #{m[:album]}" if display
filepath = filename
end
if use_increment then
counts =
if File.exists? playcount_file then
YAML::load File.open playcount_file
else
{}
end
i = counts[filepath] || 0
i += 1
counts[filepath] = i
File.open(playcount_file, 'w') do |f|
f.write counts.to_yaml
end
end
output.puts %{# #{LASTFM_SUBMIT} -e utf8 -a "#{m[:artist]}" -b "#{m[:album]}" --title "#{m[:title]}" -l "#{m[:length]}"} if scrobbler_echo
# scrobbles the track
artist, album, title = Escape.shell_single_word(m[:artist]), Escape.shell_single_word(m[:album]), (Escape.shell_single_word m[:title])
# puts "# #{artist}, #{album}, #{title}"
output.puts `kill $(ps aux | grep lastfmsubmitd | grep -v grep | awk '{print $2}') &>/dev/null;\
#{LASTFM_SUBMIT} -e utf8 -a #{artist} -b #{album} --title #{title} -l "#{m[:length]}"; lastfmsubmitd&`
|
require_relative '../lib/remote_model'
require_relative '../lib/queryable'
require_relative '../lib/api_client'
require_relative '../search/event_animal_attendance_search'
module RescueGroups
class EventAnimalAttendance
include RemoteModel
include Queryable
include ApiClient
class << self
def object_type
:eventanimalattendance
end
def object_fields
EventAnimalAttendance
end
def search_engine_class
EventAnimalAttendanceSearch
end
end
attr_accessor *object_fields::FIELDS.keys
end
end
Remove event animal attendance
|
Pod::Spec.new do |s|
s.name = "SimpleAnimation"
s.version = "0.3.0"
s.summary = "A UIView extension to make basic animations, like fades and bounces, simple."
s.homepage = "https://github.com/keithito/SimpleAnimation"
s.license = 'MIT'
s.author = { "Keith Ito" => "keeeto@gmail.com" }
s.source = { :git => "https://github.com/keithito/SimpleAnimation.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/keeeto'
s.ios.deployment_target = '8.0'
s.source_files = 'Source/*'
s.frameworks = 'UIKit'
end
add support to tvOS
Pod::Spec.new do |s|
s.name = "SimpleAnimation"
s.version = "0.3.0"
s.summary = "A UIView extension to make basic animations, like fades and bounces, simple."
s.homepage = "https://github.com/keithito/SimpleAnimation"
s.license = 'MIT'
s.author = { "Keith Ito" => "keeeto@gmail.com" }
s.source = { :git => "https://github.com/keithito/SimpleAnimation.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/keeeto'
s.ios.deployment_target = '8.0'
s.tvos.deployment_target = '9.2'
s.source_files = 'Source/*'
s.frameworks = 'UIKit'
end
|
Add plugin to enable _pages directory
https://github.com/bbakersmith/jekyll-pages-directory
# https://github.com/bbakersmith/jekyll-pages-directory/blob/master/_plugins/jekyll-pages-directory.rb
module Jekyll
class PagesDirGenerator < Generator
def generate(site)
pages_dir = site.config['pages'] || './_pages'
all_raw_paths = Dir["#{pages_dir}/**/*"]
all_raw_paths.each do |f|
if File.file?(File.join(site.source, '/', f))
filename = f.match(/[^\/]*$/)[0]
clean_filepath = f.gsub(/^#{pages_dir}\//, '')
clean_dir = extract_directory(clean_filepath)
site.pages << PagesDirPage.new(site,
site.source,
clean_dir,
filename,
pages_dir)
end
end
end
def extract_directory(filepath)
dir_match = filepath.match(/(.*\/)[^\/]*$/)
if dir_match
return dir_match[1]
else
return ''
end
end
end
class PagesDirPage < Page
def initialize(site, base, dir, name, pagesdir)
@site = site
@base = base
@dir = dir
@name = name
process(name)
read_yaml(File.join(base, pagesdir, dir), name)
data.default_proc = proc do |hash, key|
site.frontmatter_defaults.find(File.join(dir, name), type, key)
end
Jekyll::Hooks.trigger :pages, :post_init, self
end
end
end
|
Pod::Spec.new do |s|
s.name = "SpringIndicator"
s.version = "1.1.5"
s.summary = "SpringIndicator is a indicator such as a spring and PullToRefresh."
s.homepage = "https://github.com/KyoheiG3/SpringIndicator"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Kyohei Ito" => "je.suis.kyohei@gmail.com" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/KyoheiG3/SpringIndicator.git", :tag => s.version.to_s }
s.source_files = "SpringIndicator/**/*.{h,swift}"
s.requires_arc = true
s.frameworks = "UIKit"
end
UPDATE podspec
Pod::Spec.new do |s|
s.name = "SpringIndicator"
s.version = "1.2.0"
s.summary = "SpringIndicator is a indicator such as a spring and PullToRefresh."
s.homepage = "https://github.com/KyoheiG3/SpringIndicator"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Kyohei Ito" => "je.suis.kyohei@gmail.com" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/KyoheiG3/SpringIndicator.git", :tag => s.version.to_s }
s.source_files = "SpringIndicator/**/*.{h,swift}"
s.requires_arc = true
s.frameworks = "UIKit"
end
|
class Anyenv < Formula
desc "all in one for **envs"
homepage "https://github.com/riywo/anyenv"
head "https://github.com/riywo/anyenv.git"
def install
inreplace "libexec/anyenv", %(ANYENV_ROOT="${HOME}/.anyenv"), %(ANYENV_ROOT="#{prefix}")
prefix.install Dir["*"]
end
def caveats; <<-EOS.undent
To enable anyenv, add this to your profile follow:
eval "$(anyenv init -)"
EOS
end
test do
system bin/"anyenv", "install", "-l"
end
end
add if statement
class Anyenv < Formula
desc "all in one for **envs"
homepage "https://github.com/riywo/anyenv"
head "https://github.com/riywo/anyenv.git"
def install
inreplace "libexec/anyenv", %(ANYENV_ROOT="${HOME}/.anyenv"), %(ANYENV_ROOT="#{prefix}")
prefix.install Dir["*"]
end
def caveats; <<-EOS.undent
To enable anyenv, add this to your profile follow:
if which anyenv > /dev/null; then
eval "$(anyenv init -)"
fi
EOS
end
test do
system bin/"anyenv", "install", "-l"
end
end
|
[Update] KCConversationKit (0.1.103)
Pod::Spec.new do |s|
s.name = "KCConversationKit"
s.version = "0.1.103"
s.summary = "A short description of KCConversationKit."
s.license = 'MIT'
s.author = { "Emil Wojtaszek" => "emil@appunite.com" }
s.source = { :git => "git@git.appunite.com:newmedia/kingschat-conversation-ios.git", :tag => s.version.to_s }
s.platform = :ios, '8.0'
s.requires_arc = true
s.source_files = 'KCConversationKit/Classes/*.{h,m}'
s.resources = 'KCConversationKit/Resources/*.*'
s.homepage = 'https://www.appunite.com'
s.dependency 'AFNetworking', '~> 2.6'
s.dependency 'SlackTextViewController'
s.dependency 'MZTimerLabel'
s.dependency 'MTAnimatedLabel'
s.dependency 'ACPDownload', '~> 1.1'
s.dependency 'EZAudio', '~> 1.1'
s.dependency 'DFImageManager'
# kingschat
s.dependency 'KCMediaProcessingKit'
s.dependency 'KCSharedAsstes'
end
|
class UpdateCustom < ActiveRecord::Migration
def self.up
execute "DROP FROM `content_pages` where ID in (1,10)"
execute "INSERT INTO `content_pages` VALUES (1,'Home Page','home',1,'<h1>Welcome to Expertiza</h1> <p> The Expertiza project is a system for using peer review to create reusable learning objects. Students do different assignments; then peer review selects the best work in each category, and assembles it to create a single unit.</p>',3,'2006-06-12 00:31:56','2007-02-23 10:17:45','<h1>Welcome to Expertiza</h1> <p> The Expertiza project is system for using peer review to create reusable learning objects. Students do different assignments; then peer review selects the best work in each category, and assembles it to create a single unit.</p>');"
execute "INSERT INTO `content_pages` VALUES (10,'Credits and License','credits',1,'h1. Credits and License\n\nGoldberg contains original material and third-party material from various sources.\n\nAll original material is (p) Public Domain, No Rights Reserved. Goldberg comes with no warranty whatsoever.\n\nThe copyright for any third party material remains with the original author, and the material is distributed here under the original terms. \n\nMaterial has been selected from sources with licensing terms and conditions that allow use and redistribution for both personal and business purposes. These licences include public domain, BSD-style licences, and Creative Commons licences (but *not* Creative Commons Non-Commercial).\n\nIf you are an author and you believe your copyrighted material has been included in Goldberg in breach of your licensing terms and conditions, please contact Dave Nelson (urbanus at 240gl dot org).\n\n\nh2. Layouts\n\nGoldberg comes with a choice of layouts, adapted from various sources.\n\nh3. The Default\n\nThe default layout is a modified version of Andreas09 by Anreas Viklund. Andreas09 is distributed under free/unlicensed terms, with an informal request that credit be given to the original author. The original template can be obtained from \"Open Source Web Design\":http://www.oswd.org/design/preview/id/2493/.\n\nAuthor''s website: \"andreasviklund.com\":http://andreasviklund.com/.\n\n\nh3. \"Earth Wind and Fire\"\n\nOriginally designed by Brett Hillesheim (brett7481 at msn dot com) and distributed under free terms (from the MadSeason website: \"Every template we create is completely open source, meaning you can take it and do whatever you want with it\"). The original template can be obtained from \"Open Source Web Design\":http://www.oswd.org/design/preview/id/2453/.\n\nAuthor''s website: \"www.madseason.co.uk\":http://www.madseason.co.uk/.\n\n\nh3. \"Snooker\"\n\n\"Snooker\" is an original design and is therefore Public Domain. It incorporates dynamic two-column layout techniques described on the \"A List Apart\":http://alistapart.com/articles/negativemargins website.\n\n\nh3. \"Spoiled Brat\"\n\nOriginally designed by \"Rayk Web Design\":http://www.raykdesign.net/ and distributed under the terms of the \"Creative Commons Attribution Share Alike\":http://creativecommons.org/licenses/by-sa/2.5/legalcode licence. The original template can be obtained from \"Open Web Design\":http://www.openwebdesign.org/viewdesign.phtml?id=2894/.\n\nAuthor''s website: \"www.csstinderbox.com\":http://www.csstinderbox.com/.\n\n\nh2. Other Features\n\nGoldberg also contains some miscellaneous code and techniques from other sources.\n\nh3. Suckerfish Menus\n\nThe three templates \"Earth Wind and Fire\", \"Snooker\" and \"Spoiled Brat\" have all been configured to use Suckerfish menus. This technique of using a combination of CSS and Javascript to implement dynamic menus was first described by \"A List Apart\":http://www.alistapart.com/articles/dropdowns/. Goldberg''s implementation also incorporates techniques described by \"HTMLDog\":http://www.htmldog.com/articles/suckerfish/dropdowns/.\n\nh3. Tabbed Panels\n\nGoldberg''s implementation of tabbed panels was adapted from \n\"InternetConnection\":http://support.internetconnection.net/CODE_LIBRARY/Javascript_Show_Hide.shtml.\n',3,'2006-10-02 10:35:35','2007-02-23 10:17:46','<h1>Credits and Licence</h1>\n\n\n <p>Goldberg contains original material and third party material from various sources.</p>\n\n\n <p>All original material is (p) Public Domain, No Rights Reserved. Goldberg comes with no warranty whatsoever.</p>\n\n\n <p>The copyright for any third party material remains with the original author, and the material is distributed here under the original terms.</p>\n\n\n <p>Material has been selected from sources with licensing terms and conditions that allow use and redistribution for both personal and business purposes. These licences include public domain, <span class=\"caps\">BSD</span>-style licences, and Creative Commons licences (but <strong>not</strong> Creative Commons Non-Commercial).</p>\n\n\n <p>If you are an author and you believe your copyrighted material has been included in Goldberg in breach of your licensing terms and conditions, please contact Dave Nelson (urbanus at 240gl dot org).</p>\n\n\n <h2>Layouts</h2>\n\n\n <p>Goldberg comes with a choice of layouts, adapted from various sources.</p>\n\n\n <h3>The Default</h3>\n\n\n <p>The default layout is a modified version of Andreas09 by Anreas Viklund. Andreas09 is distributed under free/unlicensed terms, with an informal request that credit be given to the original author. The original template can be obtained from <a href=\"http://www.oswd.org/design/preview/id/2493/\">Open Source Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://andreasviklund.com/\">andreasviklund.com</a>.</p>\n\n\n <h3>“Earth Wind and Fire”</h3>\n\n\n <p>Originally designed by Brett Hillesheim (brett7481 at msn dot com) and distributed under free terms (from the MadSeason website: “Every template we create is completely open source, meaning you can take it and do whatever you want with it”). The original template can be obtained from <a href=\"http://www.oswd.org/design/preview/id/2453/\">Open Source Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://www.madseason.co.uk/\">www.madseason.co.uk</a>.</p>\n\n\n <h3>“Snooker”</h3>\n\n\n <p>“Snooker” is an original design and is therefore Public Domain. It incorporates dynamic two-column layout techniques described on the <a href=\"http://alistapart.com/articles/negativemargins\">A List Apart</a> website.</p>\n\n\n <h3>“Spoiled Brat”</h3>\n\n\n <p>Originally designed by <a href=\"http://www.raykdesign.net/\">Rayk Web Design</a> and distributed under the terms of the <a href=\"http://creativecommons.org/licenses/by-sa/2.5/legalcode\">Creative Commons Attribution Share Alike</a> licence. The original template can be obtained from <a href=\"http://www.openwebdesign.org/viewdesign.phtml?id=2894/\">Open Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://www.csstinderbox.com/\">www.csstinderbox.com</a>.</p>\n\n\n <h2>Other Features</h2>\n\n\n <p>Goldberg also contains some miscellaneous code and techniques from other sources.</p>\n\n\n <h3>Suckerfish Menus</h3>\n\n\n <p>The three templates “Earth Wind and Fire”, “Snooker” and “Spoiled Brat” have all been configured to use Suckerfish menus. This technique of using a combination of <span class=\"caps\">CSS</span> and Javascript to implement dynamic menus was first described by <a href=\"http://www.alistapart.com/articles/dropdowns/\">A List Apart</a>. Goldberg’s implementation also incorporates techniques described by <a href=\"http://www.htmldog.com/articles/suckerfish/dropdowns/\">HTMLDog</a>.</p>\n\n\n <h3>Tabbed Panels</h3>\n\n\n <p>Goldberg’s implementation of tabbed panels was adapted from \n<a href=\"http://support.internetconnection.net/CODE_LIBRARY/Javascript_Show_Hide.shtml\">InternetConnection</a>.</p>');"
end
def self.down
end
end
Correct typo (DELETE not DROP) in migrate file
git-svn-id: f67d969b640da65cb7bc1d229e09fd6d2db44ae1@517 392d7b7b-3f31-0410-9dc3-c95a9635ea79
class UpdateCustom < ActiveRecord::Migration
def self.up
execute "DELETE FROM `content_pages` where ID in (1,10)"
execute "INSERT INTO `content_pages` VALUES (1,'Home Page','home',1,'<h1>Welcome to Expertiza</h1> <p> The Expertiza project is a system for using peer review to create reusable learning objects. Students do different assignments; then peer review selects the best work in each category, and assembles it to create a single unit.</p>',3,'2006-06-12 00:31:56','2007-02-23 10:17:45','<h1>Welcome to Expertiza</h1> <p> The Expertiza project is system for using peer review to create reusable learning objects. Students do different assignments; then peer review selects the best work in each category, and assembles it to create a single unit.</p>');"
execute "INSERT INTO `content_pages` VALUES (10,'Credits and License','credits',1,'h1. Credits and License\n\nGoldberg contains original material and third-party material from various sources.\n\nAll original material is (p) Public Domain, No Rights Reserved. Goldberg comes with no warranty whatsoever.\n\nThe copyright for any third party material remains with the original author, and the material is distributed here under the original terms. \n\nMaterial has been selected from sources with licensing terms and conditions that allow use and redistribution for both personal and business purposes. These licences include public domain, BSD-style licences, and Creative Commons licences (but *not* Creative Commons Non-Commercial).\n\nIf you are an author and you believe your copyrighted material has been included in Goldberg in breach of your licensing terms and conditions, please contact Dave Nelson (urbanus at 240gl dot org).\n\n\nh2. Layouts\n\nGoldberg comes with a choice of layouts, adapted from various sources.\n\nh3. The Default\n\nThe default layout is a modified version of Andreas09 by Anreas Viklund. Andreas09 is distributed under free/unlicensed terms, with an informal request that credit be given to the original author. The original template can be obtained from \"Open Source Web Design\":http://www.oswd.org/design/preview/id/2493/.\n\nAuthor''s website: \"andreasviklund.com\":http://andreasviklund.com/.\n\n\nh3. \"Earth Wind and Fire\"\n\nOriginally designed by Brett Hillesheim (brett7481 at msn dot com) and distributed under free terms (from the MadSeason website: \"Every template we create is completely open source, meaning you can take it and do whatever you want with it\"). The original template can be obtained from \"Open Source Web Design\":http://www.oswd.org/design/preview/id/2453/.\n\nAuthor''s website: \"www.madseason.co.uk\":http://www.madseason.co.uk/.\n\n\nh3. \"Snooker\"\n\n\"Snooker\" is an original design and is therefore Public Domain. It incorporates dynamic two-column layout techniques described on the \"A List Apart\":http://alistapart.com/articles/negativemargins website.\n\n\nh3. \"Spoiled Brat\"\n\nOriginally designed by \"Rayk Web Design\":http://www.raykdesign.net/ and distributed under the terms of the \"Creative Commons Attribution Share Alike\":http://creativecommons.org/licenses/by-sa/2.5/legalcode licence. The original template can be obtained from \"Open Web Design\":http://www.openwebdesign.org/viewdesign.phtml?id=2894/.\n\nAuthor''s website: \"www.csstinderbox.com\":http://www.csstinderbox.com/.\n\n\nh2. Other Features\n\nGoldberg also contains some miscellaneous code and techniques from other sources.\n\nh3. Suckerfish Menus\n\nThe three templates \"Earth Wind and Fire\", \"Snooker\" and \"Spoiled Brat\" have all been configured to use Suckerfish menus. This technique of using a combination of CSS and Javascript to implement dynamic menus was first described by \"A List Apart\":http://www.alistapart.com/articles/dropdowns/. Goldberg''s implementation also incorporates techniques described by \"HTMLDog\":http://www.htmldog.com/articles/suckerfish/dropdowns/.\n\nh3. Tabbed Panels\n\nGoldberg''s implementation of tabbed panels was adapted from \n\"InternetConnection\":http://support.internetconnection.net/CODE_LIBRARY/Javascript_Show_Hide.shtml.\n',3,'2006-10-02 10:35:35','2007-02-23 10:17:46','<h1>Credits and Licence</h1>\n\n\n <p>Goldberg contains original material and third party material from various sources.</p>\n\n\n <p>All original material is (p) Public Domain, No Rights Reserved. Goldberg comes with no warranty whatsoever.</p>\n\n\n <p>The copyright for any third party material remains with the original author, and the material is distributed here under the original terms.</p>\n\n\n <p>Material has been selected from sources with licensing terms and conditions that allow use and redistribution for both personal and business purposes. These licences include public domain, <span class=\"caps\">BSD</span>-style licences, and Creative Commons licences (but <strong>not</strong> Creative Commons Non-Commercial).</p>\n\n\n <p>If you are an author and you believe your copyrighted material has been included in Goldberg in breach of your licensing terms and conditions, please contact Dave Nelson (urbanus at 240gl dot org).</p>\n\n\n <h2>Layouts</h2>\n\n\n <p>Goldberg comes with a choice of layouts, adapted from various sources.</p>\n\n\n <h3>The Default</h3>\n\n\n <p>The default layout is a modified version of Andreas09 by Anreas Viklund. Andreas09 is distributed under free/unlicensed terms, with an informal request that credit be given to the original author. The original template can be obtained from <a href=\"http://www.oswd.org/design/preview/id/2493/\">Open Source Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://andreasviklund.com/\">andreasviklund.com</a>.</p>\n\n\n <h3>“Earth Wind and Fire”</h3>\n\n\n <p>Originally designed by Brett Hillesheim (brett7481 at msn dot com) and distributed under free terms (from the MadSeason website: “Every template we create is completely open source, meaning you can take it and do whatever you want with it”). The original template can be obtained from <a href=\"http://www.oswd.org/design/preview/id/2453/\">Open Source Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://www.madseason.co.uk/\">www.madseason.co.uk</a>.</p>\n\n\n <h3>“Snooker”</h3>\n\n\n <p>“Snooker” is an original design and is therefore Public Domain. It incorporates dynamic two-column layout techniques described on the <a href=\"http://alistapart.com/articles/negativemargins\">A List Apart</a> website.</p>\n\n\n <h3>“Spoiled Brat”</h3>\n\n\n <p>Originally designed by <a href=\"http://www.raykdesign.net/\">Rayk Web Design</a> and distributed under the terms of the <a href=\"http://creativecommons.org/licenses/by-sa/2.5/legalcode\">Creative Commons Attribution Share Alike</a> licence. The original template can be obtained from <a href=\"http://www.openwebdesign.org/viewdesign.phtml?id=2894/\">Open Web Design</a>.</p>\n\n\n <p>Author’s website: <a href=\"http://www.csstinderbox.com/\">www.csstinderbox.com</a>.</p>\n\n\n <h2>Other Features</h2>\n\n\n <p>Goldberg also contains some miscellaneous code and techniques from other sources.</p>\n\n\n <h3>Suckerfish Menus</h3>\n\n\n <p>The three templates “Earth Wind and Fire”, “Snooker” and “Spoiled Brat” have all been configured to use Suckerfish menus. This technique of using a combination of <span class=\"caps\">CSS</span> and Javascript to implement dynamic menus was first described by <a href=\"http://www.alistapart.com/articles/dropdowns/\">A List Apart</a>. Goldberg’s implementation also incorporates techniques described by <a href=\"http://www.htmldog.com/articles/suckerfish/dropdowns/\">HTMLDog</a>.</p>\n\n\n <h3>Tabbed Panels</h3>\n\n\n <p>Goldberg’s implementation of tabbed panels was adapted from \n<a href=\"http://support.internetconnection.net/CODE_LIBRARY/Javascript_Show_Hide.shtml\">InternetConnection</a>.</p>');"
end
def self.down
end
end
|
######################################
#
# This file is run on every deploy.
# Ensure any changes account for this.
#
######################################
unless ENV['GOVUK_APP_DOMAIN'].present?
abort "GOVUK_APP_DOMAIN is not set. Maybe you need to run under govuk_setenv..."
end
backends = [
'canary-frontend',
'datainsight-frontend',
'frontend',
'licensify',
'limelight',
'publicapi',
'tariff',
'transactions-explorer',
]
backends.each do |backend|
url = "http://#{backend}.#{ENV['GOVUK_APP_DOMAIN']}/"
puts "Backend #{backend} => #{url}"
be = Backend.find_or_initialize_by(:backend_id => backend)
be.backend_url = url
be.save!
end
routes = [
%w(/apply-for-a-licence prefix licensify),
%w(/trade-tariff prefix tariff),
%w(/performance/deposit-foreign-marriage prefix spotlight),
%w(/performance/pay-legalisation-post prefix spotlight),
%w(/performance/pay-legalisation-drop-off prefix spotlight),
%w(/performance/pay-foreign-marriage-certificates prefix spotlight),
%w(/performance/pay-register-birth-abroad prefix spotlight),
%w(/performance/pay-register-death-abroad prefix spotlight),
%w(/performance/deposit-foreign-marriage/api prefix publicapi),
%w(/performance/vehicle-licensing/api prefix publicapi),
%w(/performance/government/api prefix publicapi),
%w(/performance/hmrc_preview/api prefix publicapi),
%w(/performance/licence_finder/api prefix publicapi),
%w(/performance/licensing/api prefix publicapi),
%w(/performance/land-registry/api prefix publicapi),
%w(/performance/lasting-power-of-attorney/api prefix publicapi),
%w(/performance/pay-foreign-marriage-certificates/api prefix publicapi),
%w(/performance/pay-legalisation-drop-off/api prefix publicapi),
%w(/performance/pay-legalisation-post/api prefix publicapi),
%w(/performance/pay-register-birth-abroad/api prefix publicapi),
%w(/performance/pay-register-death-abroad/api prefix publicapi),
%w(/performance/sorn/api prefix publicapi),
%w(/performance/tax-disc/api prefix publicapi),
%w(/performance/test/api prefix publicapi),
%w(/performance/dashboard prefix datainsight-frontend),
%w(/performance/transactions-explorer prefix transactions-explorer),
%w(/performance prefix limelight),
%w(/__canary__ exact canary-frontend),
]
routes.each do |path, type, backend|
puts "Route #{path} (#{type}) => #{backend}"
abort "Invalid backend #{backend}" unless Backend.where(:backend_id => backend).any?
route = Route.find_or_initialize_by(:incoming_path => path, :route_type => type)
route.handler = "backend"
route.backend_id = backend
route.save!
end
# Remove some previously seeded routes.
# This can be removed once it's run on prod.
[
%w(/ prefix),
].each do |path, type|
if route = Route.where(:incoming_path => path, :route_type => type).first
puts "Removing route #{path} (#{type}) => #{route.backend_id}"
route.destroy
end
end
Define spotlight backend
######################################
#
# This file is run on every deploy.
# Ensure any changes account for this.
#
######################################
unless ENV['GOVUK_APP_DOMAIN'].present?
abort "GOVUK_APP_DOMAIN is not set. Maybe you need to run under govuk_setenv..."
end
backends = [
'canary-frontend',
'datainsight-frontend',
'frontend',
'licensify',
'limelight',
'publicapi',
'spotlight',
'tariff',
'transactions-explorer',
]
backends.each do |backend|
url = "http://#{backend}.#{ENV['GOVUK_APP_DOMAIN']}/"
puts "Backend #{backend} => #{url}"
be = Backend.find_or_initialize_by(:backend_id => backend)
be.backend_url = url
be.save!
end
routes = [
%w(/apply-for-a-licence prefix licensify),
%w(/trade-tariff prefix tariff),
%w(/performance/deposit-foreign-marriage prefix spotlight),
%w(/performance/pay-legalisation-post prefix spotlight),
%w(/performance/pay-legalisation-drop-off prefix spotlight),
%w(/performance/pay-foreign-marriage-certificates prefix spotlight),
%w(/performance/pay-register-birth-abroad prefix spotlight),
%w(/performance/pay-register-death-abroad prefix spotlight),
%w(/performance/deposit-foreign-marriage/api prefix publicapi),
%w(/performance/vehicle-licensing/api prefix publicapi),
%w(/performance/government/api prefix publicapi),
%w(/performance/hmrc_preview/api prefix publicapi),
%w(/performance/licence_finder/api prefix publicapi),
%w(/performance/licensing/api prefix publicapi),
%w(/performance/land-registry/api prefix publicapi),
%w(/performance/lasting-power-of-attorney/api prefix publicapi),
%w(/performance/pay-foreign-marriage-certificates/api prefix publicapi),
%w(/performance/pay-legalisation-drop-off/api prefix publicapi),
%w(/performance/pay-legalisation-post/api prefix publicapi),
%w(/performance/pay-register-birth-abroad/api prefix publicapi),
%w(/performance/pay-register-death-abroad/api prefix publicapi),
%w(/performance/sorn/api prefix publicapi),
%w(/performance/tax-disc/api prefix publicapi),
%w(/performance/test/api prefix publicapi),
%w(/performance/dashboard prefix datainsight-frontend),
%w(/performance/transactions-explorer prefix transactions-explorer),
%w(/performance prefix limelight),
%w(/__canary__ exact canary-frontend),
]
routes.each do |path, type, backend|
puts "Route #{path} (#{type}) => #{backend}"
abort "Invalid backend #{backend}" unless Backend.where(:backend_id => backend).any?
route = Route.find_or_initialize_by(:incoming_path => path, :route_type => type)
route.handler = "backend"
route.backend_id = backend
route.save!
end
# Remove some previously seeded routes.
# This can be removed once it's run on prod.
[
%w(/ prefix),
].each do |path, type|
if route = Route.where(:incoming_path => path, :route_type => type).first
puts "Removing route #{path} (#{type}) => #{route.backend_id}"
route.destroy
end
end
|
#
# Cookbook Name:: ark
# Resource:: Ark
#
# Author:: Bryan W. Berry <bryan.berry@gmail.com>
# Copyright 2012, Bryan W. Berry
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions(
:cherry_pick,
:configure,
:dump,
:install,
:install_with_make,
:put,
:setup_py,
:setup_py_build,
:setup_py_install,
:unzip
)
default_action :install
attr_accessor :extension,
:home_dir,
:path,
:prefix_bin,
:prefix_root,
:release_file,
:version
attribute :owner, kind_of: String, default: nil
attribute :group, kind_of: [String, Integer], default: 0
attribute :url, kind_of: String, required: true
attribute :path, kind_of: String, default: nil
attribute :full_path, kind_of: String, default: nil
attribute :append_env_path, kind_of: [TrueClass, FalseClass], default: false
attribute :checksum, regex: /^[a-zA-Z0-9]{64}$/, default: nil
attribute :has_binaries, kind_of: Array, default: []
attribute :creates, kind_of: String, default: nil
attribute :release_file, kind_of: String, default: ''
attribute :strip_leading_dir, kind_of: [TrueClass, FalseClass, NilClass]
attribute :strip_components, kind_of: Integer, default: 1
attribute :mode, kind_of: Integer, default: 0755
attribute :prefix_root, kind_of: String, default: nil
attribute :prefix_home, kind_of: String, default: nil
attribute :prefix_bin, kind_of: String, default: nil
attribute :version, kind_of: String, default: nil
attribute :home_dir, kind_of: String, default: nil
attribute :win_install_dir, kind_of: String, default: nil
attribute :environment, kind_of: Hash, default: {}
attribute :autoconf_opts, kind_of: Array, default: []
attribute :make_opts, kind_of: Array, default: []
attribute :home_dir, kind_of: String, default: nil
attribute :autoconf_opts, kind_of: Array, default: []
attribute :extension, kind_of: String
attribute :backup, kind_of: [FalseClass, Integer], default: 5
add accessor for owner
Signed-off-by: Derek Wood <e1c79a582b6629e6b39e9679f4bb964d25db4aa8@cask.co>
#
# Cookbook Name:: ark
# Resource:: Ark
#
# Author:: Bryan W. Berry <bryan.berry@gmail.com>
# Copyright 2012, Bryan W. Berry
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
actions(
:cherry_pick,
:configure,
:dump,
:install,
:install_with_make,
:put,
:setup_py,
:setup_py_build,
:setup_py_install,
:unzip
)
default_action :install
attr_accessor :extension,
:home_dir,
:owner,
:path,
:prefix_bin,
:prefix_root,
:release_file,
:version
attribute :owner, kind_of: String, default: nil
attribute :group, kind_of: [String, Integer], default: 0
attribute :url, kind_of: String, required: true
attribute :path, kind_of: String, default: nil
attribute :full_path, kind_of: String, default: nil
attribute :append_env_path, kind_of: [TrueClass, FalseClass], default: false
attribute :checksum, regex: /^[a-zA-Z0-9]{64}$/, default: nil
attribute :has_binaries, kind_of: Array, default: []
attribute :creates, kind_of: String, default: nil
attribute :release_file, kind_of: String, default: ''
attribute :strip_leading_dir, kind_of: [TrueClass, FalseClass, NilClass]
attribute :strip_components, kind_of: Integer, default: 1
attribute :mode, kind_of: Integer, default: 0755
attribute :prefix_root, kind_of: String, default: nil
attribute :prefix_home, kind_of: String, default: nil
attribute :prefix_bin, kind_of: String, default: nil
attribute :version, kind_of: String, default: nil
attribute :home_dir, kind_of: String, default: nil
attribute :win_install_dir, kind_of: String, default: nil
attribute :environment, kind_of: Hash, default: {}
attribute :autoconf_opts, kind_of: Array, default: []
attribute :make_opts, kind_of: Array, default: []
attribute :home_dir, kind_of: String, default: nil
attribute :autoconf_opts, kind_of: Array, default: []
attribute :extension, kind_of: String
attribute :backup, kind_of: [FalseClass, Integer], default: 5
|
actions :create, :update, :delete
default_action :create
attribute :username, kind_of: String, required: true
attribute :password, kind_of: String, required: true
attribute :zone, kind_of: String, required: true
attribute :record_name, kind_of: String, required: true
attribute :record_type, kind_of: String, regex: /(A|CNAME)/i, required: true
attribute :record_value, kind_of: String
attribute :ttl, kind_of: Integer
attribute :connection_options, default: {}
default ttl
actions :create, :update, :delete
default_action :create
attribute :username, kind_of: String, required: true
attribute :password, kind_of: String, required: true
attribute :zone, kind_of: String, required: true
attribute :record_name, kind_of: String, required: true
attribute :record_type, kind_of: String, regex: /(A|CNAME)/i, required: true
attribute :record_value, kind_of: String
attribute :ttl, kind_of: Integer, default: 300
attribute :connection_options, default: {} |
property :name, [String, Symbol], required: true, name_property: true
property :local_path, String, required: false
property :repo_name, String, required: true
property :repo_description, String, required: true
property :repo_baseurl, String, required: true
property :use_repo, [TrueClass, FalseClass], required: true, default: true
property :repo_workers, Integer, required: false, default: node['cpu']['total']
def real_local_path
if local_path == NilClass
"#{local_path}/#{name}/"
else
"/var/lib/yum-repo/#{name}/"
end
end
def path
real_local_path
end
action :create do
template "/etc/reposync.repos.d/#{repo_name}.repo" do
cookbook 'yumserver'
source 'repo.erb'
owner 'root'
group 'root'
mode '0644'
variables(
repo_name: repo_name,
repo_description: repo_description,
repo_baseurl: repo_baseurl
)
action :create
end
directory real_local_path do
owner 'root'
group 'root'
mode '0755'
action :create
end
ruby_block 'reposync' do
block do
YumServer::Helper.reposync(repo_name, real_local_path)
end
action :run
only_if { ::File.exist?("/etc/reposync.repos.d/#{repo_name}.repo") }
end
ruby_block 'createrepo' do
block do
YumServer::Helper.createrepo(real_local_path, repo_workers)
end
action :run
end
if use_repo
yum_repository repo_name do
description repo_description
baseurl "file://#{real_local_path}"
gpgcheck false
action :create
end
end
end
action :delete do
file "/etc/reposync.repos.d/#{repo_name}.conf" do
action :delete
end
directory real_local_path do
recursive true
action :delete
end
yum_repository repo_name do
action :delete
end
end
Fix the delete action on the mirror resource
property :name, [String, Symbol], required: true, name_property: true
property :local_path, String, required: false
property :repo_name, String, required: true
property :repo_description, String, required: true
property :repo_baseurl, String, required: true
property :use_repo, [TrueClass, FalseClass], required: true, default: true
property :repo_workers, Integer, required: false, default: node['cpu']['total']
def real_local_path
if local_path == NilClass
"#{local_path}/#{name}/"
else
"/var/lib/yum-repo/#{name}/"
end
end
def path
real_local_path
end
action :create do
template "/etc/reposync.repos.d/#{repo_name}.repo" do
cookbook 'yumserver'
source 'repo.erb'
owner 'root'
group 'root'
mode '0644'
variables(
repo_name: repo_name,
repo_description: repo_description,
repo_baseurl: repo_baseurl
)
action :create
end
directory real_local_path do
owner 'root'
group 'root'
mode '0755'
action :create
end
ruby_block 'reposync' do
block do
YumServer::Helper.reposync(repo_name, real_local_path)
end
action :run
only_if { ::File.exist?("/etc/reposync.repos.d/#{repo_name}.repo") }
end
ruby_block 'createrepo' do
block do
YumServer::Helper.createrepo(real_local_path, repo_workers)
end
action :run
end
if use_repo
yum_repository repo_name do
description repo_description
baseurl "file://#{real_local_path}"
gpgcheck false
action :create
end
end
end
action :delete do
file "/etc/reposync.repos.d/#{repo_name}.repo" do
action :delete
end
directory real_local_path do
recursive true
action :delete
end
yum_repository repo_name do
action :delete
end
end
|
Gem::Specification.new do |s|
s.name = "rest-client"
s.version = "1.0"
s.summary = "Simple REST client for Ruby, inspired by microframework syntax for specifying actions."
s.description = "A simple REST client for Ruby, inspired by the Sinatra microframework style of specifying actions: get, put, post, delete."
s.author = "Adam Wiggins"
s.email = "adam@heroku.com"
s.rubyforge_project = "rest-client"
s.homepage = "http://rest-client.heroku.com/"
s.has_rdoc = true
s.platform = Gem::Platform::RUBY
s.files = %w(Rakefile README.rdoc rest-client.gemspec
lib/rest_client.rb lib/restclient.rb
lib/restclient/request.rb lib/restclient/response.rb
lib/restclient/exceptions.rb lib/restclient/resource.rb
spec/base.rb spec/request_spec.rb spec/response_spec.rb
spec/exceptions_spec.rb spec/resource_spec.rb spec/restclient_spec.rb
bin/restclient)
s.executables = ['restclient']
s.require_path = "lib"
end
add mixin to gemspec
Gem::Specification.new do |s|
s.name = "rest-client"
s.version = "1.0"
s.summary = "Simple REST client for Ruby, inspired by microframework syntax for specifying actions."
s.description = "A simple REST client for Ruby, inspired by the Sinatra microframework style of specifying actions: get, put, post, delete."
s.author = "Adam Wiggins"
s.email = "adam@heroku.com"
s.rubyforge_project = "rest-client"
s.homepage = "http://rest-client.heroku.com/"
s.has_rdoc = true
s.platform = Gem::Platform::RUBY
s.files = %w(Rakefile README.rdoc rest-client.gemspec
lib/rest_client.rb lib/restclient.rb
lib/restclient/request.rb lib/restclient/response.rb
lib/restclient/exceptions.rb lib/restclient/resource.rb
lib/restclient/mixin/response.rb
spec/base.rb spec/request_spec.rb spec/response_spec.rb
spec/exceptions_spec.rb spec/resource_spec.rb spec/restclient_spec.rb
bin/restclient)
s.executables = ['restclient']
s.require_path = "lib"
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.name = 'restartable'
s.version = '1.0.0'
s.summary = %q{Run code, Ctrl-C to restart, once more Ctrl-C to stop}
s.homepage = "http://github.com/toy/#{s.name}"
s.authors = ['Ivan Kuchin']
s.license = 'MIT'
s.rubyforge_project = s.name
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = %w[lib]
s.add_dependency 'colored', '~> 1.2'
s.add_dependency 'sys-proctable', '~> 0.9.3'
s.add_development_dependency 'cucumber'
s.add_development_dependency 'rspec', '~> 3.0'
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('1.9.3')
s.add_development_dependency 'rubocop', '~> 0.27'
end
end
v1.0.1
# encoding: UTF-8
Gem::Specification.new do |s|
s.name = 'restartable'
s.version = '1.0.1'
s.summary = %q{Run code, Ctrl-C to restart, once more Ctrl-C to stop}
s.homepage = "http://github.com/toy/#{s.name}"
s.authors = ['Ivan Kuchin']
s.license = 'MIT'
s.rubyforge_project = s.name
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = %w[lib]
s.add_dependency 'colored', '~> 1.2'
s.add_dependency 'sys-proctable', '~> 0.9.3'
s.add_development_dependency 'cucumber'
s.add_development_dependency 'rspec', '~> 3.0'
if Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('1.9.3')
s.add_development_dependency 'rubocop', '~> 0.27'
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: riak-record 0.5.1 ruby lib
Gem::Specification.new do |s|
s.name = "riak-record"
s.version = "0.5.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Robert Graff"]
s.date = "2014-10-16"
s.description = "RiakRecord is a thin and immature wrapper around riak-ruby-client. It creates a bucket for\n each class, provides a simple finder, and creates attribute reader."
s.email = "robert_graff@yahoo.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"TODO.md",
"VERSION",
"lib/riak_record.rb",
"lib/riak_record/associations.rb",
"lib/riak_record/base.rb",
"lib/riak_record/callbacks.rb",
"lib/riak_record/finder.rb",
"riak-record-0.5.0.gem",
"riak-record.gemspec",
"spec/riak_record/associations_spec.rb",
"spec/riak_record/base_spec.rb",
"spec/riak_record/callbacks_spec.rb",
"spec/riak_record/finder_spec.rb",
"spec/riak_record_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/rgraff/riak-record"
s.licenses = ["MIT"]
s.rubygems_version = "2.2.1"
s.summary = "A wrapper around ruby-client."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_development_dependency(%q<guard-bundler>, [">= 0"])
s.add_development_dependency(%q<terminal-notifier-guard>, [">= 0"])
else
s.add_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_dependency(%q<guard-bundler>, [">= 0"])
s.add_dependency(%q<terminal-notifier-guard>, [">= 0"])
end
else
s.add_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_dependency(%q<guard-bundler>, [">= 0"])
s.add_dependency(%q<terminal-notifier-guard>, [">= 0"])
end
end
Regenerate gemspec for version 0.6.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
# stub: riak-record 0.6.0 ruby lib
Gem::Specification.new do |s|
s.name = "riak-record"
s.version = "0.6.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Robert Graff"]
s.date = "2014-10-22"
s.description = "RiakRecord is a thin and immature wrapper around riak-ruby-client. It creates a bucket for\n each class, provides a simple finder, and creates attribute reader."
s.email = "robert_graff@yahoo.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.md"
]
s.files = [
".document",
".rspec",
"Gemfile",
"Gemfile.lock",
"Guardfile",
"LICENSE.txt",
"README.md",
"Rakefile",
"TODO.md",
"VERSION",
"lib/riak_record.rb",
"lib/riak_record/associations.rb",
"lib/riak_record/base.rb",
"lib/riak_record/callbacks.rb",
"lib/riak_record/finder.rb",
"riak-record-0.5.0.gem",
"riak-record.gemspec",
"spec/riak_record/associations_spec.rb",
"spec/riak_record/base_spec.rb",
"spec/riak_record/callbacks_spec.rb",
"spec/riak_record/finder_spec.rb",
"spec/riak_record_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = "http://github.com/rgraff/riak-record"
s.licenses = ["MIT"]
s.rubygems_version = "2.2.1"
s.summary = "A wrapper around ruby-client."
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_development_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_development_dependency(%q<rdoc>, ["~> 3.12"])
s.add_development_dependency(%q<bundler>, ["~> 1.0"])
s.add_development_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_development_dependency(%q<simplecov>, [">= 0"])
s.add_development_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_development_dependency(%q<guard-bundler>, [">= 0"])
s.add_development_dependency(%q<terminal-notifier-guard>, [">= 0"])
else
s.add_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_dependency(%q<guard-bundler>, [">= 0"])
s.add_dependency(%q<terminal-notifier-guard>, [">= 0"])
end
else
s.add_dependency(%q<riak-client>, ["~> 2.0.0"])
s.add_dependency(%q<rspec>, ["~> 3.1.0"])
s.add_dependency(%q<rdoc>, ["~> 3.12"])
s.add_dependency(%q<bundler>, ["~> 1.0"])
s.add_dependency(%q<jeweler>, ["~> 2.0.1"])
s.add_dependency(%q<simplecov>, [">= 0"])
s.add_dependency(%q<guard-rspec>, ["~> 4.3.1"])
s.add_dependency(%q<guard-bundler>, [">= 0"])
s.add_dependency(%q<terminal-notifier-guard>, [">= 0"])
end
end
|
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'publify_core/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'publify_core'
s.version = PublifyCore::VERSION
s.authors = ['Matijs van Zuijlen', 'Yannick François',
'Thomas Lecavellier', 'Frédéric de Villamil']
s.email = ['matijs@matijs.net']
s.homepage = 'https://publify.co'
s.summary = 'Core engine for the Publify blogging system.'
s.description = 'Core engine for the Publify blogging system, formerly known as Typo.'
s.license = 'MIT'
s.files = File.open('Manifest.txt').readlines.map(&:chomp)
s.required_ruby_version = '>= 2.2.0'
s.add_dependency 'rails', '~> 5.0.0'
s.add_dependency 'RedCloth', '~> 4.3.2'
s.add_dependency 'aasm', '~> 4.12.0'
s.add_dependency 'akismet', '~> 2.0'
s.add_dependency 'bluecloth', '~> 2.1'
s.add_dependency 'bootstrap-sass', '~> 3.3.6'
s.add_dependency 'cancancan', '~> 2.0'
s.add_dependency 'carrierwave', '~> 1.1.0'
s.add_dependency 'devise', '~> 4.3.0'
s.add_dependency 'devise-i18n', '~> 1.1.0'
s.add_dependency 'dynamic_form', '~> 1.1.4'
s.add_dependency 'feedjira', '~> 2.1.0'
s.add_dependency 'fog-aws', '~> 1.3.0'
s.add_dependency 'jquery-rails', '~> 4.3.1'
s.add_dependency 'jquery-ui-rails', '~> 6.0.1'
s.add_dependency 'kaminari', '~> 1.0.1'
s.add_dependency 'mini_magick', '~> 4.2'
s.add_dependency 'rails-timeago', '~> 2.0'
s.add_dependency 'rails_autolink', '~> 1.1.0'
s.add_dependency 'recaptcha', '~> 4.3.1'
s.add_dependency 'rubypants', '~> 0.6.0'
s.add_dependency 'mimemagic', '~> 0.3.2'
s.add_dependency 'sass-rails', '~> 5.0'
s.add_dependency 'twitter', '~> 6.1.0'
s.add_dependency 'uuidtools', '~> 2.1.1'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rspec-rails', '~> 3.5'
s.add_development_dependency 'capybara', '~> 2.7'
s.add_development_dependency 'factory_girl_rails', '~> 4.8.0'
s.add_development_dependency 'i18n-tasks', '~> 0.9.1'
s.add_development_dependency 'rails-controller-testing', '~> 1.0.1'
s.add_development_dependency 'timecop', '~> 0.8.1'
s.add_development_dependency 'webmock', '~> 3.0.1'
s.add_development_dependency 'simplecov', '~> 0.14.0'
end
Update devise-i18n
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'publify_core/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'publify_core'
s.version = PublifyCore::VERSION
s.authors = ['Matijs van Zuijlen', 'Yannick François',
'Thomas Lecavellier', 'Frédéric de Villamil']
s.email = ['matijs@matijs.net']
s.homepage = 'https://publify.co'
s.summary = 'Core engine for the Publify blogging system.'
s.description = 'Core engine for the Publify blogging system, formerly known as Typo.'
s.license = 'MIT'
s.files = File.open('Manifest.txt').readlines.map(&:chomp)
s.required_ruby_version = '>= 2.2.0'
s.add_dependency 'rails', '~> 5.0.0'
s.add_dependency 'RedCloth', '~> 4.3.2'
s.add_dependency 'aasm', '~> 4.12.0'
s.add_dependency 'akismet', '~> 2.0'
s.add_dependency 'bluecloth', '~> 2.1'
s.add_dependency 'bootstrap-sass', '~> 3.3.6'
s.add_dependency 'cancancan', '~> 2.0'
s.add_dependency 'carrierwave', '~> 1.1.0'
s.add_dependency 'devise', '~> 4.3.0'
s.add_dependency 'devise-i18n', '~> 1.2.0'
s.add_dependency 'dynamic_form', '~> 1.1.4'
s.add_dependency 'feedjira', '~> 2.1.0'
s.add_dependency 'fog-aws', '~> 1.3.0'
s.add_dependency 'jquery-rails', '~> 4.3.1'
s.add_dependency 'jquery-ui-rails', '~> 6.0.1'
s.add_dependency 'kaminari', '~> 1.0.1'
s.add_dependency 'mini_magick', '~> 4.2'
s.add_dependency 'rails-timeago', '~> 2.0'
s.add_dependency 'rails_autolink', '~> 1.1.0'
s.add_dependency 'recaptcha', '~> 4.3.1'
s.add_dependency 'rubypants', '~> 0.6.0'
s.add_dependency 'mimemagic', '~> 0.3.2'
s.add_dependency 'sass-rails', '~> 5.0'
s.add_dependency 'twitter', '~> 6.1.0'
s.add_dependency 'uuidtools', '~> 2.1.1'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rspec-rails', '~> 3.5'
s.add_development_dependency 'capybara', '~> 2.7'
s.add_development_dependency 'factory_girl_rails', '~> 4.8.0'
s.add_development_dependency 'i18n-tasks', '~> 0.9.1'
s.add_development_dependency 'rails-controller-testing', '~> 1.0.1'
s.add_development_dependency 'timecop', '~> 0.8.1'
s.add_development_dependency 'webmock', '~> 3.0.1'
s.add_development_dependency 'simplecov', '~> 0.14.0'
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{event_horizon}
s.version = "0.0.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jiri Zajpt"]
s.date = %q{2010-10-17}
s.description = %q{Simple plugin for logging events in Rails applications using Mongoid ODM.}
s.email = %q{jzajpt@blueberryapps.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/models/event.rb",
"event_horizon.gemspec",
"lib/event_horizon.rb",
"lib/event_horizon/events.rb",
"lib/event_horizon/helpers.rb",
"lib/event_horizon/helpers/event_horizon_helper.rb",
"lib/event_horizon/mongoid_support/document.rb",
"lib/event_horizon/mongoid_support/event.rb",
"lib/event_horizon/orm_support.rb",
"spec/event_horizon_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/jzajpt/event_horizon}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Simple plugin for logging events in Rails applications.}
s.test_files = [
"spec/event_horizon_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_runtime_dependency(%q<activesupport>, ["~> 3.0.0"])
else
s.add_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_dependency(%q<activesupport>, ["~> 3.0.0"])
end
else
s.add_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_dependency(%q<activesupport>, ["~> 3.0.0"])
end
end
Updated gemspec.
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{event_horizon}
s.version = "0.0.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Jiri Zajpt"]
s.date = %q{2010-10-17}
s.description = %q{Simple plugin for logging events in Rails applications using Mongoid ODM.}
s.email = %q{jzajpt@blueberryapps.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"app/models/event.rb",
"event_horizon.gemspec",
"lib/event_horizon.rb",
"lib/event_horizon/events.rb",
"lib/event_horizon/helpers.rb",
"lib/event_horizon/helpers/event_horizon_helper.rb",
"lib/event_horizon/mongoid_support/document.rb",
"lib/event_horizon/mongoid_support/event.rb",
"lib/event_horizon/orm_support.rb",
"spec/event_horizon_spec.rb",
"spec/spec_helper.rb"
]
s.homepage = %q{http://github.com/jzajpt/event_horizon}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Simple plugin for logging events in Rails applications.}
s.test_files = [
"spec/event_horizon_spec.rb",
"spec/spec_helper.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_runtime_dependency(%q<activesupport>, ["~> 3.0.0"])
else
s.add_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_dependency(%q<activesupport>, ["~> 3.0.0"])
end
else
s.add_dependency(%q<rspec>, ["~> 2.0.0"])
s.add_dependency(%q<activesupport>, ["~> 3.0.0"])
end
end
|
Pod::Spec.new do |s|
s.name = 'SwiftyUtilities'
s.version = '0.1.0'
s.summary = 'A collection of reusable boilerplate code.'
s.homepage = 'https://github.com/eastsss/SwiftyUtilities'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'eastsss' => 'anatox91@yandex.ru' }
s.source = { :git => 'https://github.com/eastsss/SwiftyUtilities.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.default_subspec = "Foundation"
s.subspec "Foundation" do |ss|
ss.source_files = 'SwiftyUtilities/Foundation/**/*'
ss.framework = 'Foundation'
end
s.subspec "UIKit" do |ss|
ss.source_files = 'SwiftyUtilities/UIKit/**/*'
ss.framework = 'UIKit'
end
s.subspec "Networking" do |ss|
ss.source_files = 'SwiftyUtilities/Networking/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'Moya/ReactiveSwift', '~> 8.0.3'
ss.dependency 'Argo', '~> 4.1.2'
ss.dependency 'Curry', '~> 3.0.0'
ss.dependency 'Ogra', '~> 4.1.1'
end
s.subspec "Reactive" do |ss|
ss.source_files = 'SwiftyUtilities/Reactive/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'ReactiveCocoa', '~> 5.0.2'
end
s.subspec "ReactivePortionLoader" do |ss|
ss.source_files = 'SwiftyUtilities/ReactivePortionLoader/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'SwiftyUtilities/Networking'
end
end
release 0.2.0
Pod::Spec.new do |s|
s.name = 'SwiftyUtilities'
s.version = '0.2.0'
s.summary = 'A collection of reusable boilerplate code.'
s.homepage = 'https://github.com/eastsss/SwiftyUtilities'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'eastsss' => 'anatox91@yandex.ru' }
s.source = { :git => 'https://github.com/eastsss/SwiftyUtilities.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.default_subspec = "Foundation"
s.subspec "Foundation" do |ss|
ss.source_files = 'SwiftyUtilities/Foundation/**/*'
ss.framework = 'Foundation'
end
s.subspec "UIKit" do |ss|
ss.source_files = 'SwiftyUtilities/UIKit/**/*'
ss.framework = 'UIKit'
end
s.subspec "Networking" do |ss|
ss.source_files = 'SwiftyUtilities/Networking/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'Moya/ReactiveSwift', '~> 8.0.3'
ss.dependency 'Argo', '~> 4.1.2'
ss.dependency 'Curry', '~> 3.0.0'
ss.dependency 'Ogra', '~> 4.1.1'
end
s.subspec "Reactive" do |ss|
ss.source_files = 'SwiftyUtilities/Reactive/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'ReactiveCocoa', '~> 5.0.2'
end
s.subspec "ReactivePortionLoader" do |ss|
ss.source_files = 'SwiftyUtilities/ReactivePortionLoader/**/*'
ss.dependency 'SwiftyUtilities/Foundation'
ss.dependency 'SwiftyUtilities/Networking'
end
end
|
Pod::Spec.new do |s|
s.name = "SwiftyXMLParser"
s.version = "3.0.2"
s.summary = "Simple XML Parser implemented by Swift"
s.description = <<-DESC
This is a XML parser inspired by SwiftyJSON and SWXMLHash.
NSXMLParser in Foundation framework is a kind of "SAX" parser. It has a enough performance but is a little inconvenient.
So we have implemented "DOM" parser wrapping it.
DESC
s.homepage = "https://github.com/yahoojapan/SwiftyXMLParser.git"
s.license = "MIT"
s.author = { "kahayash" => "kahayash@yahoo-corp.jp" }
s.ios.deployment_target = "8.0"
s.tvos.deployment_target = "9.0"
s.source_files = "SwiftyXMLParser/*.swift"
s.requires_arc = true
s.source = { :git => "https://github.com/yahoojapan/SwiftyXMLParser.git", :tag => "3.0.2" }
end
Create SwiftyXMLParser.podspec
Pod::Spec.new do |s|
s.name = "SwiftyXMLParser"
s.version = "3.0.3"
s.summary = "Simple XML Parser implemented by Swift"
s.description = <<-DESC
This is a XML parser inspired by SwiftyJSON and SWXMLHash.
NSXMLParser in Foundation framework is a kind of "SAX" parser. It has a enough performance but is a little inconvenient.
So we have implemented "DOM" parser wrapping it.
DESC
s.homepage = "https://github.com/yahoojapan/SwiftyXMLParser.git"
s.license = "MIT"
s.author = { "kahayash" => "kahayash@yahoo-corp.jp" }
s.ios.deployment_target = "8.0"
s.tvos.deployment_target = "9.0"
s.source_files = "SwiftyXMLParser/*.swift"
s.requires_arc = true
s.source = { :git => "https://github.com/yahoojapan/SwiftyXMLParser.git", :tag => "3.0.3" }
end
|
Archive unpublished MMO slugs (correctly).
class MarkUnpublishedMmoSlugsAsArchivedV2 < Mongoid::Migration
# Marine Management Organisation
# Sourced from https://gist.github.com/JordanHatch/a389a32abf0eaca89fec
# which was sourced from https://gist.github.com/elliotcm/1a6f9159448d66dc97bb
UNPUBLISHED_SLUGS = [
"apply-for-a-european-fisheries-fund-grant",
"applying-for-a-fishing-vessel-licence",
"buy-and-sell-first-sale-marine-fish",
"changes-to-your-fishing-vessel-licence",
"closed-fishing-areas",
"disposal-of-dredged-material-at-sea-regulations-and-controls",
"east-marine-plan-areas",
"electronic-recording-and-reporting-of-fishing-activity",
"european-fisheries-fund-fishing-industry-common-interests",
"european-fisheries-fund-organic-and-environmentally-friendly-measures-grant",
"european-fisheries-fund-processing-and-marketing-fisheries-and-aquaculture",
"european-fisheries-fund-projects",
"fisheries-catch-limits",
"fisheries-catch-limits-10-metres-and-under",
"fisheries-catch-limits-non-sector",
"fishing-industry-regulations",
"fishing-vessels-licence-upgrades-after-re-engining",
"gaining-consent-to-dredge-marine-minerals",
"get-a-fishing-vessel-licence-mussel-seed",
"get-an-oil-spill-treatment-product-approved",
"get-involved-in-marine-planning",
"government/collections/fishing-vessel-licences-10-metre-and-under-vessels",
"government/collections/fishing-vessel-licences-over-10-metre-vessels",
"government/collections/marine-conservation-byelaws",
"government/publications/approved-electronic-logbook-software-systems",
"government/publications/category-a-annexes",
"government/publications/category-a-conditions-and-schedule",
"government/publications/category-a-islands",
"government/publications/category-a-pelagic-annexes",
"government/publications/category-a-pelagic-conditions-and-schedule",
"government/publications/category-b-annexes",
"government/publications/category-b-conditions-and-schedule",
"government/publications/category-c-annexes",
"government/publications/category-c-conditions-and-schedule",
"government/publications/deep-sea-species-annexes",
"government/publications/deep-sea-species-conditions-and-schedule",
"government/publications/east-inshore-and-east-offshore-marine-plans",
"government/publications/handline-mackerel-conditions-and-schedule",
"government/publications/non-sector-capped-licences",
"government/publications/non-sector-uncapped-licences",
"government/publications/sector-annexes",
"government/publications/sector-conditions-and-schedule",
"government/publications/thames-and-blackwater-conditions-and-schedule",
"harbour-development-and-the-law",
"how-a-marine-plan-is-made",
"how-to-clean-an-oil-spill-at-sea",
"investing-in-aquaculture",
"investing-on-board-your-fishing-vessel",
"lease-extra-fishing-quota",
"licences-for-offshore-renewable-energy-installations",
"make-a-european-fisheries-fund-claim",
"make-changes-to-your-fishing-vessel-licence",
"make-changes-to-your-fishing-vessel-licence-combine-and-separate-licences",
"manage-fisheries-quota",
"manage-your-fishing-effort-cod-recovery-zone",
"manage-your-fishing-effort-sole-recovery-zone",
"manage-your-fishing-effort-western-waters-scallops",
"marine-construction-and-coastal-protection",
"marine-licensing-additional-information-for-dredging-applications",
"marine-licensing-aggregate-extraction",
"marine-licensing-assess-the-impact-on-the-environment",
"marine-licensing-disposing-waste-at-sea",
"marine-licensing-diving",
"marine-licensing-dredging",
"marine-licensing-emergency-application",
"marine-licensing-exemptions",
"marine-licensing-fast-track-application-process",
"marine-licensing-local-or-regional-dredging-conditions",
"marine-licensing-maintenance-activities",
"marine-licensing-marker-buoys-and-posts",
"marine-licensing-minor-removals",
"marine-licensing-sampling-and-sediment-analysis",
"marine-licensing-scaffolding-and-ladders",
"marine-licensing-scientific-sampling",
"marine-wildlife-licence",
"offshore-cables-and-pipelines",
"penalties-for-fishing-offences",
"record-and-report-your-fishing-activity",
"record-sales-and-submit-sales-notes",
"report-a-wildlife-incident",
"report-and-respond-to-a-marine-pollution-incident",
"south-marine-plan-areas",
"the-days-at-sea-scheme",
"the-days-at-sea-scheme-sole-recovery-zone",
"trace-fish-products",
"transport-fish",
"understand-your-fishing-vessel-licence",
"weigh-fish-products"
].freeze
def self.up
Artefact.where(:slug.in => UNPUBLISHED_SLUGS).each do |artefact|
puts "Archiving artefact #{artefact.slug}"
artefact.update_attribute(:state, 'archived')
end
end
def self.down
Artefact.where(:slug.in => UNPUBLISHED_SLUGS).each do |artefact|
puts "Setting artefact #{artefact.slug} to live"
artefact.update_attribute(:state, 'live')
end
end
end
|
class HistoricalRecoveryTargetsDistributions < ActiveRecord::Migration
def up
products_without_production = LandParcel.where(activity_production_id: nil)
ActivityProduction.all.each do |activity_production|
language = activity_production.creator.language
finded_land_parcel = products_without_production
.where("name like ?", "%#{activity_production.activity.name}%")
.where("name like ?", "%#{:rank.t(number: activity_production.rank_number, locale: language)}%")
if finded_land_parcel.any?
finded_land_parcel.first.update_attribute(:activity_production_id, activity_production.id)
end
end
end
def down
end
end
Recover activity_production_id for plants
class HistoricalRecoveryTargetsDistributions < ActiveRecord::Migration
def up
add_activity_production_to_land_parcels
add_activity_production_to_plants
end
def down
end
def add_activity_production_to_land_parcels
products_without_production = LandParcel.where(activity_production_id: nil)
ActivityProduction.all.each do |activity_production|
language = activity_production.creator.language
finded_land_parcel = products_without_production
.where("name like ?", "%#{activity_production.activity.name}%")
.where("name like ?", "%#{:rank.t(number: activity_production.rank_number, locale: language)}%")
if finded_land_parcel.any?
finded_land_parcel.first.update_attribute(:activity_production_id, activity_production.id)
end
end
end
def add_activity_production_to_plants
products_without_production = Plant.where(activity_production_id: nil)
products_without_production.each do |product|
activity_production_id = product
.intervention_product_parameters
.select{ |parameter| parameter.is_a?(InterventionOutput) }
.first
.intervention
.targets
.select{ |target| target.reference_name.to_sym == :land_parcel }
.first
.product
.activity_production_id
product.update_attribute(:activity_production_id, activity_production_id) unless activity_production_id.nil?
end
end
end
|
# Copyright:: Copyright (c) 2017 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
resource_name :hab_service
property :service_name, String, name_property: true
property :loaded, [true, false], default: false, desired_state: true
property :running, [true, false], default: false, desired_state: true
# hab sup options which get included based on the action of the resource
property :permanent_peer, [true, false], default: false
property :listen_gossip, String
property :listen_http, String
property :org, String, default: 'default'
property :peer, String
property :ring, String
property :strategy, String
property :topology, String
property :depot_url, String
property :bind, String
property :service_group, String
property :config_from, String
property :override_name, String, default: 'default'
load_current_value do
http_uri = listen_http ? listen_http : 'http://localhost:9631'
svcs = Chef::HTTP::SimpleJSON.new(http_uri).get('/services')
sup_for_service_name = svcs.find do |s|
[s['spec_ident']['origin'], s['spec_ident']['name']].join('/') =~ /#{service_name}/
end
running begin
sup_for_service_name['supervisor']['state'] == 'Up'
rescue
false
end
loaded ::File.exist?("/hab/sup/#{override_name}/specs/#{service_name.split('/').last}.spec")
Chef::Log.debug("service #{service_name} running state: #{running}")
Chef::Log.debug("service #{service_name} loaded state: #{loaded}")
end
action :load do
execute "hab sup load #{new_resource.service_name} #{sup_options.join(' ')}" unless current_resource.loaded
end
action :unload do
execute "hab sup unload #{new_resource.service_name} #{sup_options.join(' ')}" if current_resource.loaded
end
action :start do
execute "hab sup start #{new_resource.service_name} #{sup_options.join(' ')}" unless current_resource.running
end
action :stop do
execute "hab sup stop #{new_resource.service_name} #{sup_options.join(' ')}" if current_resource.running
end
action :restart do
action_stop
sleep 1
action_start
end
action :reload do
action_unload
sleep 1
action_load
end
action_class do
def sup_options
opts = []
# certain options are only valid for specific `hab sup` subcommands.
case action
when :load
opts << "--bind #{new_resource.bind}" if new_resource.bind
opts << "--url #{new_resource.depot_url}" if new_resource.depot_url
opts << "--group #{new_resource.service_group}" if new_resource.service_group
opts << "--strategy #{new_resource.strategy}" if new_resource.strategy
opts << "--topology #{new_resource.topology}" if new_resource.topology
when :start
opts << '--permanent-peer' if new_resource.permanent_peer
opts << "--bind #{new_resource.bind}" if new_resource.bind
opts << "--config-from #{new_resource.config_from}" if new_resource.config_from
opts << "--url #{new_resource.depot_url}" if new_resource.depot_url
opts << "--group #{new_resource.service_group}" if new_resource.service_group
opts << "--listen-gossip #{new_resource.listen_gossip}" if new_resource.listen_gossip
opts << "--listen-http #{new_resource.listen_http}" if new_resource.listen_http
opts << "--org #{new_resource.org}" unless new_resource.org == 'default'
opts << "--peer #{new_resource.peer}" if new_resource.peer
opts << "--ring #{new_resource.ring}" if new_resource.ring
opts << "--strategy #{new_resource.strategy}" if new_resource.strategy
opts << "--topology #{new_resource.topology}" if new_resource.topology
end
opts << "--override-name #{new_resource.override_name}" unless new_resource.override_name == 'default'
opts.map(&:split).flatten.compact
end
end
Fix the data structure we use to determine if a service is running
Signed-off-by: Tim Smith <764ef62106582a09ed09dfa0b6bff7c05fd7d1e4@chef.io>
# Copyright:: Copyright (c) 2017 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
resource_name :hab_service
property :service_name, String, name_property: true
property :loaded, [true, false], default: false, desired_state: true
property :running, [true, false], default: false, desired_state: true
# hab sup options which get included based on the action of the resource
property :permanent_peer, [true, false], default: false
property :listen_gossip, String
property :listen_http, String
property :org, String, default: 'default'
property :peer, String
property :ring, String
property :strategy, String
property :topology, String
property :depot_url, String
property :bind, String
property :service_group, String
property :config_from, String
property :override_name, String, default: 'default'
load_current_value do
http_uri = listen_http ? listen_http : 'http://localhost:9631'
svcs = Chef::HTTP::SimpleJSON.new(http_uri).get('/services')
sup_for_service_name = svcs.find do |s|
[s['spec_ident']['origin'], s['spec_ident']['name']].join('/') =~ /#{service_name}/
end
running begin
sup_for_service_name['process']['state'] == 'Up'
rescue
false
end
loaded ::File.exist?("/hab/sup/#{override_name}/specs/#{service_name.split('/').last}.spec")
Chef::Log.debug("service #{service_name} running state: #{running}")
Chef::Log.debug("service #{service_name} loaded state: #{loaded}")
end
action :load do
execute "hab sup load #{new_resource.service_name} #{sup_options.join(' ')}" unless current_resource.loaded
end
action :unload do
execute "hab sup unload #{new_resource.service_name} #{sup_options.join(' ')}" if current_resource.loaded
end
action :start do
execute "hab sup start #{new_resource.service_name} #{sup_options.join(' ')}" unless current_resource.running
end
action :stop do
execute "hab sup stop #{new_resource.service_name} #{sup_options.join(' ')}" if current_resource.running
end
action :restart do
action_stop
sleep 1
action_start
end
action :reload do
action_unload
sleep 1
action_load
end
action_class do
def sup_options
opts = []
# certain options are only valid for specific `hab sup` subcommands.
case action
when :load
opts << "--bind #{new_resource.bind}" if new_resource.bind
opts << "--url #{new_resource.depot_url}" if new_resource.depot_url
opts << "--group #{new_resource.service_group}" if new_resource.service_group
opts << "--strategy #{new_resource.strategy}" if new_resource.strategy
opts << "--topology #{new_resource.topology}" if new_resource.topology
when :start
opts << '--permanent-peer' if new_resource.permanent_peer
opts << "--bind #{new_resource.bind}" if new_resource.bind
opts << "--config-from #{new_resource.config_from}" if new_resource.config_from
opts << "--url #{new_resource.depot_url}" if new_resource.depot_url
opts << "--group #{new_resource.service_group}" if new_resource.service_group
opts << "--listen-gossip #{new_resource.listen_gossip}" if new_resource.listen_gossip
opts << "--listen-http #{new_resource.listen_http}" if new_resource.listen_http
opts << "--org #{new_resource.org}" unless new_resource.org == 'default'
opts << "--peer #{new_resource.peer}" if new_resource.peer
opts << "--ring #{new_resource.ring}" if new_resource.ring
opts << "--strategy #{new_resource.strategy}" if new_resource.strategy
opts << "--topology #{new_resource.topology}" if new_resource.topology
end
opts << "--override-name #{new_resource.override_name}" unless new_resource.override_name == 'default'
opts.map(&:split).flatten.compact
end
end
|
Gem::Specification.new do |s|
s.name = 'rest_request'
s.version = '0.0.0'
s.date = '2012-07-09'
s.summary = "Simple tool to create Rest Request and print the JSON response"
s.description = "Simple tool to create Rest Request and print the JSON response"
s.authors = ["Hector Reyes Aleman"]
s.email = 'birkoff.h@gmail.com'
s.files = ["lib/rest_request.rb"]
s.homepage = 'http://rubygems.org/gems/hola_birkoffh'
end
Fixing the URL of the gem
Gem::Specification.new do |s|
s.name = 'rest_request'
s.version = '0.0.1'
s.date = '2012-07-09'
s.summary = "Simple tool to create Rest Request and print the JSON response"
s.description = "Simple tool to create Rest Request and print the JSON response"
s.authors = ["Hector Reyes Aleman"]
s.email = 'birkoff.h@gmail.com'
s.files = ["lib/rest_request.rb"]
s.homepage = 'http://rubygems.org/gems/rest_request'
end
|
module ActiveAdmin
module Globalize3
module FormBuilderExtension
extend ActiveSupport::Concern
def translated_inputs(name = "Translations", options = {}, &block)
options.symbolize_keys!
switch_locale = options.fetch(:switch_locale, false)
auto_sort = options.fetch(:auto_sort, :false)
form_buffers.last << template.content_tag(:div, class: "activeadmin-translations") do
template.content_tag(:ul, class: "available-locales") do
(auto_sort ? I18n.available_locales.sort : I18n.available_locales).map do |locale|
template.content_tag(:li) do
I18n.with_locale(switch_locale ? locale : I18n.locale) do
template.content_tag(:a, I18n.t(:"active_admin.globalize3.language.#{locale}"), href:".locale-#{locale}")
end
end
end.join.html_safe
end <<
I18n.available_locales.map do |locale|
translation = object.translations.find { |t| t.locale.to_s == locale.to_s }
translation ||= object.translations.build(locale: locale)
fields = proc do |form|
form.input(:locale, as: :hidden)
form.input(:id, as: :hidden)
I18n.with_locale(switch_locale ? locale : I18n.locale) do
block.call(form)
end
end
inputs_for_nested_attributes(
for: [:translations, translation ],
class: "inputs locale locale-#{translation.locale}",
&fields
)
end.join.html_safe
end
end
module ClassMethods
end
end
end
end
Add auto sort condition also to the form (not just the tab)
module ActiveAdmin
module Globalize3
module FormBuilderExtension
extend ActiveSupport::Concern
def translated_inputs(name = "Translations", options = {}, &block)
options.symbolize_keys!
switch_locale = options.fetch(:switch_locale, false)
auto_sort = options.fetch(:auto_sort, :false)
form_buffers.last << template.content_tag(:div, class: "activeadmin-translations") do
template.content_tag(:ul, class: "available-locales") do
(auto_sort ? I18n.available_locales.sort : I18n.available_locales).map do |locale|
template.content_tag(:li) do
I18n.with_locale(switch_locale ? locale : I18n.locale) do
template.content_tag(:a, I18n.t(:"active_admin.globalize3.language.#{locale}"), href:".locale-#{locale}")
end
end
end.join.html_safe
end <<
(auto_sort ? I18n.available_locales.sort : I18n.available_locales).map do |locale|
translation = object.translations.find { |t| t.locale.to_s == locale.to_s }
translation ||= object.translations.build(locale: locale)
fields = proc do |form|
form.input(:locale, as: :hidden)
form.input(:id, as: :hidden)
I18n.with_locale(switch_locale ? locale : I18n.locale) do
block.call(form)
end
end
inputs_for_nested_attributes(
for: [:translations, translation ],
class: "inputs locale locale-#{translation.locale}",
&fields
)
end.join.html_safe
end
end
module ClassMethods
end
end
end
end
|
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# Bogus Gateway
class BogusExpressGateway < Gateway
AUTHORIZATION = '53433'
LIVE_REDIRECT_URL = 'https://www.paypal.com/cgibin/webscr?cmd=_express-checkout&token='
TEST_REDIRECT_URL = 'https://www.sandbox.paypal.com/cgi-bin/webscr?cmd=_express-checkout&token='
SUCCESS_MESSAGE = "Bogus Express Gateway: Forced success"
FAILURE_MESSAGE = "Bogus Express Gateway: Forced failure"
ERROR_MESSAGE = "Bogus Express Gateway: Use CreditCard number 1 for success, 2 for exception and anything else for error"
CREDIT_ERROR_MESSAGE = "Bogus Express Gateway: Use trans_id 1 for success, 2 for exception and anything else for error"
UNSTORE_ERROR_MESSAGE = "Bogus Express Gateway: Use trans_id 1 for success, 2 for exception and anything else for error"
CAPTURE_ERROR_MESSAGE = "Bogus Express Gateway: Use authorization number 1 for exception, 2 for error and anything else for success"
self.supported_countries = ['US']
self.supported_cardtypes = [:bogus]
self.homepage_url = 'http://example.com'
self.display_name = 'Bogus Express'
def redirect_url
test? ? TEST_REDIRECT_URL : LIVE_REDIRECT_URL
end
def redirect_url_for(token)
"#{redirect_url}#{token}"
end
def setup_authorization(money, options={})
requires!(options, :return_url, :cancel_return_url)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
self.response_hash(options),
{ :test => true, }.update(options)
)
end
def setup_purchase(money, options = {})
requires!(options, :return_url, :cancel_return_url)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def details_for(token)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def authorize(money, options = {})
requires!(options, :token, :payer_id)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def purchase(money, options = {})
requires!(options, :token, :payer_id)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def capture(money, authorization, options = {})
requires!(options, :token, :payer_id)
PaypalExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def response_hash(options = {})
{
:payer => "john@doe.com",
:token => "EC-11235829203",
:first_name => "John",
:middle_name => "Q.",
:last_name => "Doe",
:payer_id => "123",
:payer_business => "John's Diner",
:street1 => "123 Main Street",
:street2 => "#100",
:city_name => "Test City",
:state_or_province => "Arizona",
:country => "US",
:postal_code => "85260",
}.update(options)
end
end
end
end
Updated bogus_express
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class BogusExpressResponse < Response
def email
"greg_fu@example.com"
end
def name
"Greg Fu"
end
def token
"1234567890"
end
def payer_id
"123"
end
def payer_country
"US"
end
def address
{ 'name' => "Greg",
'company' => "Fu",
'address1' => "123 Test St",
'address2' => "",
'city' => "Test City",
'state' => "AZ",
'country' => "US",
'zip' => 85001,
'phone' => nil
}
end
end
# Bogus Gateway
class BogusExpressGateway < Gateway
AUTHORIZATION = '53433'
LIVE_REDIRECT_URL = 'https://www.paypal.com/cgibin/webscr?cmd=_express-checkout&token='
TEST_REDIRECT_URL = 'https://www.sandbox.paypal.com/cgi-bin/webscr?cmd=_express-checkout&token='
CONFIRM_TOKEN = "CONFIRM"
CANCEL_TOKEN = "CANCEL"
SUCCESS_MESSAGE = "Bogus Express Gateway Success"
FAILURE_MESSAGE = "Bogus Express Gateway: Forced failure"
ERROR_MESSAGE = "Bogus Express Gateway: Error message"
CREDIT_ERROR_MESSAGE = "Bogus Express Gateway: Use trans_id 1 for success, 2 for exception and anything else for error"
UNSTORE_ERROR_MESSAGE = "Bogus Express Gateway: Use trans_id 1 for success, 2 for exception and anything else for error"
CAPTURE_ERROR_MESSAGE = "Bogus Express Gateway: Use authorization number 1 for exception, 2 for error and anything else for success"
self.supported_countries = ['US']
self.supported_cardtypes = [:bogus]
self.homepage_url = 'http://example.com'
self.display_name = 'Bogus Express'
def redirect_url
end
def redirect_url_for(token, options = {})
unless options[:cancel]
ENV["success_url"] % [token, 1]
else
ENV["cancel_url"] % [token, 1]
end
end
def setup_authorization(money, options={})
requires!(options, :return_url, :cancel_return_url)
if money != 14000
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
self.response_hash(options),
{ :test => true, }.update(options)
)
else
BogusExpressResponse.new(
false,
FAILURE_MESSAGE,
self.response_hash(options),
{ :test => true, }.update(options)
)
end
end
def setup_purchase(money, options = {})
requires!(options, :return_url, :cancel_return_url)
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def details_for(token)
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash,
{ :test => true, }
)
end
def authorize(money, options = {})
requires!(options, :token, :payer_id)
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def purchase(money, options = {})
requires!(options, :token, :payer_id)
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def capture(money, authorization, options = {})
requires!(options, :token, :payer_id)
BogusExpressResponse.new(
true,
SUCCESS_MESSAGE,
response_hash(options),
{ :test => true, }.update(options)
)
end
def response_hash(options = {})
{
:payer => "john@doe.com",
:token => "EC-11235829203",
:first_name => "John",
:middle_name => "Q.",
:last_name => "Doe",
:payer_id => "123",
:payer_business => "John's Diner",
:street1 => "123 Main Street",
:street2 => "#100",
:city_name => "Test City",
:state_or_province => "Arizona",
:country => "US",
:postal_code => "85260",
}.update(options)
end
end
end
end
|
require 'active_record/connection_adapters/abstract_adapter'
require 'java'
require 'active_record/connection_adapters/jdbc_adapter_spec'
require 'jdbc_adapter_internal'
require 'bigdecimal'
module ActiveRecord
class Base
def self.jdbc_connection(config)
connection = ConnectionAdapters::JdbcConnection.new(config)
ConnectionAdapters::JdbcAdapter.new(connection, logger, config)
end
alias :attributes_with_quotes_pre_oracle :attributes_with_quotes
def attributes_with_quotes(include_primary_key = true) #:nodoc:
aq = attributes_with_quotes_pre_oracle(include_primary_key)
if connection.class == ConnectionAdapters::JdbcAdapter && (connection.is_a?(JdbcSpec::Oracle) || connection.is_a?(JdbcSpec::Mimer))
aq[self.class.primary_key] = "?" if include_primary_key && aq[self.class.primary_key].nil?
end
aq
end
end
module ConnectionAdapters
module Java
Class = java.lang.Class
URL = java.net.URL
URLClassLoader = java.net.URLClassLoader
end
module Jdbc
DriverManager = java.sql.DriverManager
Statement = java.sql.Statement
Types = java.sql.Types
# some symbolic constants for the benefit of the JDBC-based
# JdbcConnection#indexes method
module IndexMetaData
INDEX_NAME = 6
NON_UNIQUE = 4
TABLE_NAME = 3
COLUMN_NAME = 9
end
module TableMetaData
TABLE_CAT = 1
TABLE_SCHEM = 2
TABLE_NAME = 3
TABLE_TYPE = 4
end
module PrimaryKeyMetaData
COLUMN_NAME = 4
end
end
# I want to use JDBC's DatabaseMetaData#getTypeInfo to choose the best native types to
# use for ActiveRecord's Adapter#native_database_types in a database-independent way,
# but apparently a database driver can return multiple types for a given
# java.sql.Types constant. So this type converter uses some heuristics to try to pick
# the best (most common) type to use. It's not great, it would be better to just
# delegate to each database's existin AR adapter's native_database_types method, but I
# wanted to try to do this in a way that didn't pull in all the other adapters as
# dependencies. Suggestions appreciated.
class JdbcTypeConverter
# The basic ActiveRecord types, mapped to an array of procs that are used to #select
# the best type. The procs are used as selectors in order until there is only one
# type left. If all the selectors are applied and there is still more than one
# type, an exception will be raised.
AR_TO_JDBC_TYPES = {
:string => [ lambda {|r| Jdbc::Types::VARCHAR == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^varchar/i},
lambda {|r| r['type_name'] =~ /^varchar$/i},
lambda {|r| r['type_name'] =~ /varying/i}],
:text => [ lambda {|r| [Jdbc::Types::LONGVARCHAR, Jdbc::Types::CLOB].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^(text|clob)/i},
lambda {|r| r['type_name'] =~ /^character large object$/i},
lambda {|r| r['sql_data_type'] == 2005}],
:integer => [ lambda {|r| Jdbc::Types::INTEGER == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^integer$/i},
lambda {|r| r['type_name'] =~ /^int4$/i},
lambda {|r| r['type_name'] =~ /^int$/i}],
:decimal => [ lambda {|r| Jdbc::Types::DECIMAL == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^decimal$/i},
lambda {|r| r['type_name'] =~ /^numeric$/i},
lambda {|r| r['type_name'] =~ /^number$/i},
lambda {|r| r['precision'] == '38'},
lambda {|r| r['data_type'] == '2'}],
:float => [ lambda {|r| [Jdbc::Types::FLOAT,Jdbc::Types::DOUBLE, Jdbc::Types::REAL].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^float/i},
lambda {|r| r['type_name'] =~ /^double$/i},
lambda {|r| r['type_name'] =~ /^real$/i},
lambda {|r| r['precision'] == '15'}],
:datetime => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^datetime/i},
lambda {|r| r['type_name'] =~ /^timestamp$/i}],
:timestamp => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^timestamp$/i},
lambda {|r| r['type_name'] =~ /^datetime/i} ],
:time => [ lambda {|r| Jdbc::Types::TIME == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^time$/i},
lambda {|r| r['type_name'] =~ /^datetime$/i}],
:date => [ lambda {|r| Jdbc::Types::DATE == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^date$/i}],
:binary => [ lambda {|r| [Jdbc::Types::LONGVARBINARY,Jdbc::Types::BINARY,Jdbc::Types::BLOB].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^blob/i},
lambda {|r| r['type_name'] =~ /sub_type 0$/i}, # For FireBird
lambda {|r| r['type_name'] =~ /^varbinary$/i}, # We want this sucker for Mimer
lambda {|r| r['type_name'] =~ /^binary$/i}, ],
:boolean => [ lambda {|r| [Jdbc::Types::TINYINT].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^bool/i},
lambda {|r| r['type_name'] =~ /^tinyint$/i},
lambda {|r| r['type_name'] =~ /^decimal$/i}],
}
def initialize(types)
@types = types
end
def choose_best_types
type_map = {}
AR_TO_JDBC_TYPES.each_key do |k|
typerow = choose_type(k)
type_map[k] = { :name => typerow['type_name'].downcase }
type_map[k][:limit] = typerow['precision'] && typerow['precision'].to_i if [:integer, :string, :decimal].include?(k)
type_map[k][:limit] = 1 if k == :boolean
end
type_map
end
def choose_type(ar_type)
procs = AR_TO_JDBC_TYPES[ar_type]
types = @types
procs.each do |p|
new_types = types.select(&p)
return new_types.first if new_types.length == 1
types = new_types if new_types.length > 0
end
raise "unable to choose type from: #{types.collect{|t| [t['type_name'],t]}.inspect} for #{ar_type}"
end
end
class JdbcDriver
def self.load(driver)
driver_class_const = (driver[0...1].capitalize + driver[1..driver.length]).gsub(/\./, '_')
unless Jdbc.const_defined?(driver_class_const)
Jdbc.module_eval do
include_class(driver) {|p,c| driver_class_const }
end
Jdbc::DriverManager.registerDriver(Jdbc.const_get(driver_class_const).new)
end
end
end
class JdbcColumn < Column
attr_writer :limit, :precision
COLUMN_TYPES = {
/oracle/i => lambda {|cfg,col| col.extend(JdbcSpec::Oracle::Column)},
/mysql/i => lambda {|cfg,col| col.extend(JdbcSpec::MySQL::Column)},
/postgre/i => lambda {|cfg,col| col.extend(JdbcSpec::PostgreSQL::Column)},
/sqlserver|tds/i => lambda {|cfg,col| col.extend(JdbcSpec::MsSQL::Column)},
/hsqldb|\.h2\./i => lambda {|cfg,col| col.extend(JdbcSpec::HSQLDB::Column)},
/derby/i => lambda {|cfg,col| col.extend(JdbcSpec::Derby::Column)},
/db2/i => lambda {|cfg,col|
if cfg[:url] =~ /^jdbc:derby:net:/
col.extend(JdbcSpec::Derby::Column)
else
col.extend(JdbcSpec::DB2::Column)
end }
}
def initialize(config, name, default, *args)
ds = config[:driver].to_s
for reg, func in COLUMN_TYPES
if reg === ds
func.call(config,self)
end
end
super(name,default_value(default),*args)
init_column(name, default, *args)
end
def init_column(*args)
end
def default_value(val)
val
end
end
class JdbcConnection
attr_reader :adapter
def initialize(config)
@config = config.symbolize_keys!
if @config[:jndi]
configure_jndi
else
configure_jdbc
end
set_native_database_types
@stmts = {}
rescue Exception => e
raise "The driver encountered an error: #{e}"
end
def adapter=(adapt)
@adapter = adapt
@tps = {}
@native_types.each_pair {|k,v| @tps[k] = v.inject({}) {|memo,kv| memo.merge({kv.first => (kv.last.dup rescue kv.last)})}}
adapt.modify_types(@tps)
end
# Default JDBC introspection for index metadata on the JdbcConnection.
# This is currently used for migrations by JdbcSpec::HSQDLB and JdbcSpec::Derby
# indexes with a little filtering tacked on.
#
# JDBC index metadata is denormalized (multiple rows may be returned for
# one index, one row per column in the index), so a simple block-based
# filter like that used for tables doesn't really work here. Callers
# should filter the return from this method instead.
def indexes(table_name, name = nil)
metadata = @connection.getMetaData
unless String === table_name
table_name = table_name.to_s
else
table_name = table_name.dup
end
table_name.upcase! if metadata.storesUpperCaseIdentifiers
table_name.downcase! if metadata.storesLowerCaseIdentifiers
resultset = metadata.getIndexInfo(nil, nil, table_name, false, false)
primary_keys = primary_keys(table_name)
indexes = []
current_index = nil
while resultset.next
index_name = resultset.get_string(Jdbc::IndexMetaData::INDEX_NAME).downcase
column_name = resultset.get_string(Jdbc::IndexMetaData::COLUMN_NAME).downcase
next if primary_keys.include? column_name
# We are working on a new index
if current_index != index_name
current_index = index_name
table_name = resultset.get_string(Jdbc::IndexMetaData::TABLE_NAME).downcase
non_unique = resultset.get_boolean(Jdbc::IndexMetaData::NON_UNIQUE)
# empty list for column names, we'll add to that in just a bit
indexes << IndexDefinition.new(table_name, index_name, !non_unique, [])
end
# One or more columns can be associated with an index
indexes.last.columns << column_name
end
resultset.close
indexes
rescue
if @connection.is_closed
reconnect!
retry
else
raise
end
end
private
def configure_jndi
jndi = @config[:jndi].to_s
ctx = javax.naming.InitialContext.new
ds = ctx.lookup(jndi)
set_connection ds.connection
unless @config[:driver]
@config[:driver] = @connection.meta_data.connection.java_class.name
end
end
def configure_jdbc
driver = @config[:driver].to_s
user = @config[:username].to_s
pass = @config[:password].to_s
url = @config[:url].to_s
unless driver && url
raise ArgumentError, "jdbc adapter requires driver class and url"
end
if driver =~ /mysql/i
div = url =~ /\?/ ? '&' : '?'
url = "#{url}#{div}zeroDateTimeBehavior=convertToNull&jdbcCompliantTruncation=false"
@config[:url] = url
end
JdbcDriver.load(driver)
set_connection Jdbc::DriverManager.getConnection(url, user, pass)
end
end
class JdbcAdapter < AbstractAdapter
ADAPTER_TYPES = {
/oracle/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Oracle)},
/mimer/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Mimer)},
/postgre/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::PostgreSQL)},
/mysql/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::MySQL)},
/sqlserver|tds/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::MsSQL)},
/hsqldb|\.h2\./i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::HSQLDB)},
/derby/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Derby)},
/db2/i => lambda{|cfg,adapt|
if cfg[:url] =~ /^jdbc:derby:net:/
adapt.extend(JdbcSpec::Derby)
else
adapt.extend(JdbcSpec::DB2)
end},
/firebird/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::FireBird)}
}
def initialize(connection, logger, config)
super(connection, logger)
@config = config
ds = config[:driver].to_s
for reg, func in ADAPTER_TYPES
if reg === ds
func.call(@config,self)
end
end
connection.adapter = self
end
def modify_types(tp)
tp
end
def adapter_name #:nodoc:
'JDBC'
end
def supports_migrations?
true
end
def native_database_types #:nodoc:
@connection.native_database_types
end
def database_name #:nodoc:
@connection.database_name
end
def native_sql_to_type(tp)
if /^(.*?)\(([0-9]+)\)/ =~ tp
tname = $1
limit = $2.to_i
ntype = native_database_types
if ntype[:primary_key] == tp
return :primary_key,nil
else
ntype.each do |name,val|
if name == :primary_key
next
end
if val[:name].downcase == tname.downcase && (val[:limit].nil? || val[:limit].to_i == limit)
return name,limit
end
end
end
elsif /^(.*?)/ =~ tp
tname = $1
ntype = native_database_types
if ntype[:primary_key] == tp
return :primary_key,nil
else
ntype.each do |name,val|
if val[:name].downcase == tname.downcase && val[:limit].nil?
return name,nil
end
end
end
else
return :string,255
end
return nil,nil
end
def reconnect!
@connection.close rescue nil
@connection = JdbcConnection.new(@config)
end
def select_all(sql, name = nil)
select(sql, name)
end
def select_one(sql, name = nil)
select(sql, name).first
end
def execute(sql, name = nil)
_execute(sql,name)
end
# we need to do it this way, to allow Rails stupid tests to always work
# even if we define a new execute method. Instead of mixing in a new
# execute, an _execute should be mixed in.
def _execute(sql, name = nil)
log_no_bench(sql, name) do
case sql.strip
when /^(select|show)/i:
@connection.execute_query(sql)
when /^insert/i:
@connection.execute_insert(sql)
else
@connection.execute_update(sql)
end
end
end
def update(sql, name = nil) #:nodoc:
execute(sql, name)
end
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
id = execute(sql, name = nil)
id_value || id
end
def columns(table_name, name = nil)
@connection.columns(table_name.to_s)
end
def tables
@connection.tables
end
def begin_db_transaction
@connection.begin
end
def commit_db_transaction
@connection.commit
end
def rollback_db_transaction
@connection.rollback
end
private
def select(sql, name=nil)
execute(sql,name)
end
def log_no_bench(sql, name)
if block_given?
if @logger and @logger.level <= Logger::INFO
result = yield
log_info(sql, name, 0)
result
else
yield
end
else
log_info(sql, name, 0)
nil
end
rescue Exception => e
# Log message and raise exception.
message = "#{e.class.name}: #{e.message}: #{sql}"
log_info(message, name, 0)
raise ActiveRecord::StatementInvalid, message
end
end
end
end
Add a helper to choose between pgsql float4 and float8, corresponding to JDBC REAL and DOUBLE. For this case, prefer REAL to DOUBLE.
git-svn-id: 0d15740d2b2329e9094a4e8932733054e5d3e04c@520 8ba958d5-0c1a-0410-94a6-a65dfc1b28a6
require 'active_record/connection_adapters/abstract_adapter'
require 'java'
require 'active_record/connection_adapters/jdbc_adapter_spec'
require 'jdbc_adapter_internal'
require 'bigdecimal'
module ActiveRecord
class Base
def self.jdbc_connection(config)
connection = ConnectionAdapters::JdbcConnection.new(config)
ConnectionAdapters::JdbcAdapter.new(connection, logger, config)
end
alias :attributes_with_quotes_pre_oracle :attributes_with_quotes
def attributes_with_quotes(include_primary_key = true) #:nodoc:
aq = attributes_with_quotes_pre_oracle(include_primary_key)
if connection.class == ConnectionAdapters::JdbcAdapter && (connection.is_a?(JdbcSpec::Oracle) || connection.is_a?(JdbcSpec::Mimer))
aq[self.class.primary_key] = "?" if include_primary_key && aq[self.class.primary_key].nil?
end
aq
end
end
module ConnectionAdapters
module Java
Class = java.lang.Class
URL = java.net.URL
URLClassLoader = java.net.URLClassLoader
end
module Jdbc
DriverManager = java.sql.DriverManager
Statement = java.sql.Statement
Types = java.sql.Types
# some symbolic constants for the benefit of the JDBC-based
# JdbcConnection#indexes method
module IndexMetaData
INDEX_NAME = 6
NON_UNIQUE = 4
TABLE_NAME = 3
COLUMN_NAME = 9
end
module TableMetaData
TABLE_CAT = 1
TABLE_SCHEM = 2
TABLE_NAME = 3
TABLE_TYPE = 4
end
module PrimaryKeyMetaData
COLUMN_NAME = 4
end
end
# I want to use JDBC's DatabaseMetaData#getTypeInfo to choose the best native types to
# use for ActiveRecord's Adapter#native_database_types in a database-independent way,
# but apparently a database driver can return multiple types for a given
# java.sql.Types constant. So this type converter uses some heuristics to try to pick
# the best (most common) type to use. It's not great, it would be better to just
# delegate to each database's existin AR adapter's native_database_types method, but I
# wanted to try to do this in a way that didn't pull in all the other adapters as
# dependencies. Suggestions appreciated.
class JdbcTypeConverter
# The basic ActiveRecord types, mapped to an array of procs that are used to #select
# the best type. The procs are used as selectors in order until there is only one
# type left. If all the selectors are applied and there is still more than one
# type, an exception will be raised.
AR_TO_JDBC_TYPES = {
:string => [ lambda {|r| Jdbc::Types::VARCHAR == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^varchar/i},
lambda {|r| r['type_name'] =~ /^varchar$/i},
lambda {|r| r['type_name'] =~ /varying/i}],
:text => [ lambda {|r| [Jdbc::Types::LONGVARCHAR, Jdbc::Types::CLOB].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^(text|clob)/i},
lambda {|r| r['type_name'] =~ /^character large object$/i},
lambda {|r| r['sql_data_type'] == 2005}],
:integer => [ lambda {|r| Jdbc::Types::INTEGER == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^integer$/i},
lambda {|r| r['type_name'] =~ /^int4$/i},
lambda {|r| r['type_name'] =~ /^int$/i}],
:decimal => [ lambda {|r| Jdbc::Types::DECIMAL == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^decimal$/i},
lambda {|r| r['type_name'] =~ /^numeric$/i},
lambda {|r| r['type_name'] =~ /^number$/i},
lambda {|r| r['precision'] == '38'},
lambda {|r| r['data_type'] == '2'}],
:float => [ lambda {|r| [Jdbc::Types::FLOAT,Jdbc::Types::DOUBLE, Jdbc::Types::REAL].include?(r['data_type'].to_i)},
lambda {|r| r['data_type'].to_i == Jdbc::Types::REAL}, #Prefer REAL to DOUBLE for Postgresql
lambda {|r| r['type_name'] =~ /^float/i},
lambda {|r| r['type_name'] =~ /^double$/i},
lambda {|r| r['type_name'] =~ /^real$/i},
lambda {|r| r['precision'] == '15'}],
:datetime => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^datetime/i},
lambda {|r| r['type_name'] =~ /^timestamp$/i}],
:timestamp => [ lambda {|r| Jdbc::Types::TIMESTAMP == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^timestamp$/i},
lambda {|r| r['type_name'] =~ /^datetime/i} ],
:time => [ lambda {|r| Jdbc::Types::TIME == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^time$/i},
lambda {|r| r['type_name'] =~ /^datetime$/i}],
:date => [ lambda {|r| Jdbc::Types::DATE == r['data_type'].to_i},
lambda {|r| r['type_name'] =~ /^date$/i}],
:binary => [ lambda {|r| [Jdbc::Types::LONGVARBINARY,Jdbc::Types::BINARY,Jdbc::Types::BLOB].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^blob/i},
lambda {|r| r['type_name'] =~ /sub_type 0$/i}, # For FireBird
lambda {|r| r['type_name'] =~ /^varbinary$/i}, # We want this sucker for Mimer
lambda {|r| r['type_name'] =~ /^binary$/i}, ],
:boolean => [ lambda {|r| [Jdbc::Types::TINYINT].include?(r['data_type'].to_i)},
lambda {|r| r['type_name'] =~ /^bool/i},
lambda {|r| r['type_name'] =~ /^tinyint$/i},
lambda {|r| r['type_name'] =~ /^decimal$/i}],
}
def initialize(types)
@types = types
end
def choose_best_types
type_map = {}
AR_TO_JDBC_TYPES.each_key do |k|
typerow = choose_type(k)
type_map[k] = { :name => typerow['type_name'].downcase }
type_map[k][:limit] = typerow['precision'] && typerow['precision'].to_i if [:integer, :string, :decimal].include?(k)
type_map[k][:limit] = 1 if k == :boolean
end
type_map
end
def choose_type(ar_type)
procs = AR_TO_JDBC_TYPES[ar_type]
types = @types
procs.each do |p|
new_types = types.select(&p)
return new_types.first if new_types.length == 1
types = new_types if new_types.length > 0
end
raise "unable to choose type from: #{types.collect{|t| [t['type_name'],t]}.inspect} for #{ar_type}"
end
end
class JdbcDriver
def self.load(driver)
driver_class_const = (driver[0...1].capitalize + driver[1..driver.length]).gsub(/\./, '_')
unless Jdbc.const_defined?(driver_class_const)
Jdbc.module_eval do
include_class(driver) {|p,c| driver_class_const }
end
Jdbc::DriverManager.registerDriver(Jdbc.const_get(driver_class_const).new)
end
end
end
class JdbcColumn < Column
attr_writer :limit, :precision
COLUMN_TYPES = {
/oracle/i => lambda {|cfg,col| col.extend(JdbcSpec::Oracle::Column)},
/mysql/i => lambda {|cfg,col| col.extend(JdbcSpec::MySQL::Column)},
/postgre/i => lambda {|cfg,col| col.extend(JdbcSpec::PostgreSQL::Column)},
/sqlserver|tds/i => lambda {|cfg,col| col.extend(JdbcSpec::MsSQL::Column)},
/hsqldb|\.h2\./i => lambda {|cfg,col| col.extend(JdbcSpec::HSQLDB::Column)},
/derby/i => lambda {|cfg,col| col.extend(JdbcSpec::Derby::Column)},
/db2/i => lambda {|cfg,col|
if cfg[:url] =~ /^jdbc:derby:net:/
col.extend(JdbcSpec::Derby::Column)
else
col.extend(JdbcSpec::DB2::Column)
end }
}
def initialize(config, name, default, *args)
ds = config[:driver].to_s
for reg, func in COLUMN_TYPES
if reg === ds
func.call(config,self)
end
end
super(name,default_value(default),*args)
init_column(name, default, *args)
end
def init_column(*args)
end
def default_value(val)
val
end
end
class JdbcConnection
attr_reader :adapter
def initialize(config)
@config = config.symbolize_keys!
if @config[:jndi]
configure_jndi
else
configure_jdbc
end
set_native_database_types
@stmts = {}
rescue Exception => e
raise "The driver encountered an error: #{e}"
end
def adapter=(adapt)
@adapter = adapt
@tps = {}
@native_types.each_pair {|k,v| @tps[k] = v.inject({}) {|memo,kv| memo.merge({kv.first => (kv.last.dup rescue kv.last)})}}
adapt.modify_types(@tps)
end
# Default JDBC introspection for index metadata on the JdbcConnection.
# This is currently used for migrations by JdbcSpec::HSQDLB and JdbcSpec::Derby
# indexes with a little filtering tacked on.
#
# JDBC index metadata is denormalized (multiple rows may be returned for
# one index, one row per column in the index), so a simple block-based
# filter like that used for tables doesn't really work here. Callers
# should filter the return from this method instead.
def indexes(table_name, name = nil)
metadata = @connection.getMetaData
unless String === table_name
table_name = table_name.to_s
else
table_name = table_name.dup
end
table_name.upcase! if metadata.storesUpperCaseIdentifiers
table_name.downcase! if metadata.storesLowerCaseIdentifiers
resultset = metadata.getIndexInfo(nil, nil, table_name, false, false)
primary_keys = primary_keys(table_name)
indexes = []
current_index = nil
while resultset.next
index_name = resultset.get_string(Jdbc::IndexMetaData::INDEX_NAME).downcase
column_name = resultset.get_string(Jdbc::IndexMetaData::COLUMN_NAME).downcase
next if primary_keys.include? column_name
# We are working on a new index
if current_index != index_name
current_index = index_name
table_name = resultset.get_string(Jdbc::IndexMetaData::TABLE_NAME).downcase
non_unique = resultset.get_boolean(Jdbc::IndexMetaData::NON_UNIQUE)
# empty list for column names, we'll add to that in just a bit
indexes << IndexDefinition.new(table_name, index_name, !non_unique, [])
end
# One or more columns can be associated with an index
indexes.last.columns << column_name
end
resultset.close
indexes
rescue
if @connection.is_closed
reconnect!
retry
else
raise
end
end
private
def configure_jndi
jndi = @config[:jndi].to_s
ctx = javax.naming.InitialContext.new
ds = ctx.lookup(jndi)
set_connection ds.connection
unless @config[:driver]
@config[:driver] = @connection.meta_data.connection.java_class.name
end
end
def configure_jdbc
driver = @config[:driver].to_s
user = @config[:username].to_s
pass = @config[:password].to_s
url = @config[:url].to_s
unless driver && url
raise ArgumentError, "jdbc adapter requires driver class and url"
end
if driver =~ /mysql/i
div = url =~ /\?/ ? '&' : '?'
url = "#{url}#{div}zeroDateTimeBehavior=convertToNull&jdbcCompliantTruncation=false"
@config[:url] = url
end
JdbcDriver.load(driver)
set_connection Jdbc::DriverManager.getConnection(url, user, pass)
end
end
class JdbcAdapter < AbstractAdapter
ADAPTER_TYPES = {
/oracle/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Oracle)},
/mimer/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Mimer)},
/postgre/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::PostgreSQL)},
/mysql/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::MySQL)},
/sqlserver|tds/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::MsSQL)},
/hsqldb|\.h2\./i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::HSQLDB)},
/derby/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::Derby)},
/db2/i => lambda{|cfg,adapt|
if cfg[:url] =~ /^jdbc:derby:net:/
adapt.extend(JdbcSpec::Derby)
else
adapt.extend(JdbcSpec::DB2)
end},
/firebird/i => lambda{|cfg,adapt| adapt.extend(JdbcSpec::FireBird)}
}
def initialize(connection, logger, config)
super(connection, logger)
@config = config
ds = config[:driver].to_s
for reg, func in ADAPTER_TYPES
if reg === ds
func.call(@config,self)
end
end
connection.adapter = self
end
def modify_types(tp)
tp
end
def adapter_name #:nodoc:
'JDBC'
end
def supports_migrations?
true
end
def native_database_types #:nodoc:
@connection.native_database_types
end
def database_name #:nodoc:
@connection.database_name
end
def native_sql_to_type(tp)
if /^(.*?)\(([0-9]+)\)/ =~ tp
tname = $1
limit = $2.to_i
ntype = native_database_types
if ntype[:primary_key] == tp
return :primary_key,nil
else
ntype.each do |name,val|
if name == :primary_key
next
end
if val[:name].downcase == tname.downcase && (val[:limit].nil? || val[:limit].to_i == limit)
return name,limit
end
end
end
elsif /^(.*?)/ =~ tp
tname = $1
ntype = native_database_types
if ntype[:primary_key] == tp
return :primary_key,nil
else
ntype.each do |name,val|
if val[:name].downcase == tname.downcase && val[:limit].nil?
return name,nil
end
end
end
else
return :string,255
end
return nil,nil
end
def reconnect!
@connection.close rescue nil
@connection = JdbcConnection.new(@config)
end
def select_all(sql, name = nil)
select(sql, name)
end
def select_one(sql, name = nil)
select(sql, name).first
end
def execute(sql, name = nil)
_execute(sql,name)
end
# we need to do it this way, to allow Rails stupid tests to always work
# even if we define a new execute method. Instead of mixing in a new
# execute, an _execute should be mixed in.
def _execute(sql, name = nil)
log_no_bench(sql, name) do
case sql.strip
when /^(select|show)/i:
@connection.execute_query(sql)
when /^insert/i:
@connection.execute_insert(sql)
else
@connection.execute_update(sql)
end
end
end
def update(sql, name = nil) #:nodoc:
execute(sql, name)
end
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
id = execute(sql, name = nil)
id_value || id
end
def columns(table_name, name = nil)
@connection.columns(table_name.to_s)
end
def tables
@connection.tables
end
def begin_db_transaction
@connection.begin
end
def commit_db_transaction
@connection.commit
end
def rollback_db_transaction
@connection.rollback
end
private
def select(sql, name=nil)
execute(sql,name)
end
def log_no_bench(sql, name)
if block_given?
if @logger and @logger.level <= Logger::INFO
result = yield
log_info(sql, name, 0)
result
else
yield
end
else
log_info(sql, name, 0)
nil
end
rescue Exception => e
# Log message and raise exception.
message = "#{e.class.name}: #{e.message}: #{sql}"
log_info(message, name, 0)
raise ActiveRecord::StatementInvalid, message
end
end
end
end
|
module Bitcoin::Storage::Backends::SequelMigrations
def migrate
unless @db.tables.include?(:blk)
@db.create_table :blk do
primary_key :id
column :hash, :bytea, :null => false, :unique => true, :index => true
column :depth, :int, :null => false, :index => true
column :version, :bigint, :null => false
column :prev_hash, :bytea, :null => false, :index => true
column :mrkl_root, :bytea, :null => false
column :time, :bigint, :null => false
column :bits, :bigint, :null => false
column :nonce, :bigint, :null => false
column :blk_size, :int, :null => false
column :chain, :int, :null => false
column :work, :int, :index => true
end
end
unless @db.tables.include?(:tx)
@db.create_table :tx do
primary_key :id
column :hash, :bytea, :null => false, :unique => true, :index => true
column :version, :bigint, :null => false
column :lock_time, :bigint, :null => false
column :coinbase, :bool, :null => false
column :tx_size, :int, :null => false
end
end
unless @db.tables.include?(:blk_tx)
@db.create_table :blk_tx do
column :blk_id, :int, :null => false, :index => true
column :tx_id, :int, :null => false, :index => true
column :idx, :int, :null => false
end
end
unless @db.tables.include?(:txin)
@db.create_table :txin do
primary_key :id
column :tx_id, :int, :null => false, :index => true
column :tx_idx, :int, :null => false
column :script_sig, :bytea, :null => false
column :prev_out, :bytea, :null => false, :index => true
column :prev_out_index, :bigint, :null => false
column :sequence, :bigint, :null => false
end
end
unless @db.tables.include?(:txout)
@db.create_table :txout do
primary_key :id
column :tx_id, :int, :null => false, :index => true
column :tx_idx, :int, :null => false
column :pk_script, :bytea, :null => false, :index => true
column :value, :bigint
column :type, :int, :null => false, :index => true
end
end
unless @db.tables.include?(:addr)
@db.create_table :addr do
primary_key :id
column :hash160, String, :null => false, :index => true
end
end
unless @db.tables.include?(:addr_txout)
@db.create_table :addr_txout do
column :addr_id, :int, :null => false, :index => true
column :txout_id, :int, :null => false, :index => true
end
end
unless @db.views.include?(:unconfirmed)
@db.create_view(:unconfirmed,
"SELECT * FROM tx WHERE NOT EXISTS " +
"(SELECT 1 FROM blk_tx WHERE blk_tx.tx_id = tx.id)" +
"ORDER BY tx.id DESC")
end
end
end
store cumulative block work as bigint
module Bitcoin::Storage::Backends::SequelMigrations
def migrate
unless @db.tables.include?(:blk)
@db.create_table :blk do
primary_key :id
column :hash, :bytea, :null => false, :unique => true, :index => true
column :depth, :int, :null => false, :index => true
column :version, :bigint, :null => false
column :prev_hash, :bytea, :null => false, :index => true
column :mrkl_root, :bytea, :null => false
column :time, :bigint, :null => false
column :bits, :bigint, :null => false
column :nonce, :bigint, :null => false
column :blk_size, :int, :null => false
column :chain, :int, :null => false
column :work, :bigint, :index => true
end
end
unless @db.tables.include?(:tx)
@db.create_table :tx do
primary_key :id
column :hash, :bytea, :null => false, :unique => true, :index => true
column :version, :bigint, :null => false
column :lock_time, :bigint, :null => false
column :coinbase, :bool, :null => false
column :tx_size, :int, :null => false
end
end
unless @db.tables.include?(:blk_tx)
@db.create_table :blk_tx do
column :blk_id, :int, :null => false, :index => true
column :tx_id, :int, :null => false, :index => true
column :idx, :int, :null => false
end
end
unless @db.tables.include?(:txin)
@db.create_table :txin do
primary_key :id
column :tx_id, :int, :null => false, :index => true
column :tx_idx, :int, :null => false
column :script_sig, :bytea, :null => false
column :prev_out, :bytea, :null => false, :index => true
column :prev_out_index, :bigint, :null => false
column :sequence, :bigint, :null => false
end
end
unless @db.tables.include?(:txout)
@db.create_table :txout do
primary_key :id
column :tx_id, :int, :null => false, :index => true
column :tx_idx, :int, :null => false
column :pk_script, :bytea, :null => false, :index => true
column :value, :bigint
column :type, :int, :null => false, :index => true
end
end
unless @db.tables.include?(:addr)
@db.create_table :addr do
primary_key :id
column :hash160, String, :null => false, :index => true
end
end
unless @db.tables.include?(:addr_txout)
@db.create_table :addr_txout do
column :addr_id, :int, :null => false, :index => true
column :txout_id, :int, :null => false, :index => true
end
end
unless @db.views.include?(:unconfirmed)
@db.create_view(:unconfirmed,
"SELECT * FROM tx WHERE NOT EXISTS " +
"(SELECT 1 FROM blk_tx WHERE blk_tx.tx_id = tx.id)" +
"ORDER BY tx.id DESC")
end
end
end
|
class ActionController::Base
# Use this with CanCan's load_resource to permit a set of params before
# it tries to build or update a resource with them.
#
# Usage:
# class BooksController < ApplicationController
# load_resource :book
# permit_params book: [:title, :isbn]
# end
#
# Or:
# class BooksController < ApplicationController
# load_resource
# permit_params :title, :isbn
# end
#
# the second form should work in the simple case where you don't have to
# supply a resource name for #load_resource
#
def self.permit_params *keys
filter_strong_params :permit, [:create, :update], keys
end
# Like permit_params, but only applies to create action
#
def self.permit_params_on_create *keys
filter_strong_params :permit, :create, keys
end
# Like permit_params, but only applies to update action
#
def self.permit_params_on_update *keys
filter_strong_params :permit, :update, keys
end
# Like permit_params, but marks the params required
#
def self.require_params *keys
filter_strong_params :require, [:create, :update], keys
end
# Like require_params, but only applies to create action
#
def self.require_params_on_create *keys
filter_strong_params :require, :create, keys
end
# Like require_params, but only applies to update action
#
def self.require_params_on_update *keys
filter_strong_params :require, :update, keys
end
# Does a permit! at every level of the params to let everything through
#
def self.permit_all_params options = {}
prepend_before_filter options.reverse_merge(:only => [:create, :update]) do
self.params.deep_permit!
end
end
def self.filter_strong_params method, actions, keys # :nodoc:
hash = keys.extract_options!
keys.flatten!
# Handle attributes if permitted attributes are given for nested models
if (hash.present? && keys.present?) || (hash.select{|k,v| v.is_a?(Array)} == hash)
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
hash = self.class.attributized(hash)
self.params[resource_name] = params[resource_name].send method, *[*keys.flatten, hash]
end
elsif hash.present?
prepend_before_filter :only => actions do
self.params.merge! params.send(method, hash)
end
else
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
if params.has_key?(resource_name)
self.params[resource_name] = params[resource_name].send method, *keys
else
self.params = params.send method, *keys
end
end
end
end
def self.resource_name
self.to_s.sub("Controller", "").underscore.split('/').last.singularize
end
def self.attributized(hash)
Hash.new.tap do |h|
hash.each do |k,v|
h[:"#{k}_attributes"] = v
end
end
end
def self.hashified(whitelist)
hash = whitelist.extract_options! if whitelist.is_a?(Array)
array = whitelist.dup
whitelist = (hash || {}) if hash
array.map {|v| whitelist[v] = true }
whitelist
end
end
module ActionController
class Parameters < ActiveSupport::HashWithIndifferentAccess
def deep_permit!
self.each do |key, value|
if value.is_a?(Hash)
if !value.respond_to?(:permit!)
self[key] = value = ActionController::Parameters.new(value)
end
value.deep_permit!
end
end
permit!
end
end
end
Further cleanup for next ver.
class ActionController::Base
# Use this with CanCan's load_resource to permit a set of params before
# it tries to build or update a resource with them.
#
# Usage:
# class BooksController < ApplicationController
# load_resource :book
# permit_params book: [:title, :isbn]
# end
#
# Or:
# class BooksController < ApplicationController
# load_resource
# permit_params :title, :isbn
# end
#
# the second form should work in the simple case where you don't have to
# supply a resource name for #load_resource
#
def self.permit_params *keys
filter_strong_params :permit, [:create, :update], keys
end
# Like permit_params, but only applies to create action
#
def self.permit_params_on_create *keys
filter_strong_params :permit, :create, keys
end
# Like permit_params, but only applies to update action
#
def self.permit_params_on_update *keys
filter_strong_params :permit, :update, keys
end
# Like permit_params, but marks the params required
#
def self.require_params *keys
filter_strong_params :require, [:create, :update], keys
end
# Like require_params, but only applies to create action
#
def self.require_params_on_create *keys
filter_strong_params :require, :create, keys
end
# Like require_params, but only applies to update action
#
def self.require_params_on_update *keys
filter_strong_params :require, :update, keys
end
# Does a permit! at every level of the params to let everything through
#
def self.permit_all_params options = {}
prepend_before_filter options.reverse_merge(:only => [:create, :update]) do
self.params.deep_permit!
end
end
def self.filter_strong_params method, actions, keys # :nodoc:
hash = keys.extract_options!
keys.flatten!
# Handle attributes if permitted attributes are given for nested models
if (hash.present? && keys.present?) || (hash.select{|k,v| v.is_a?(Array)} == hash)
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
hash = self.class.attributized(hash)
self.params[resource_name] = params[resource_name].send method, *[*keys.flatten, hash]
end
elsif hash.present?
prepend_before_filter :only => actions do
self.params.merge! params.send(method, hash)
end
else
prepend_before_filter :only => actions do
resource_name = self.class.resource_name
if params.has_key?(resource_name)
self.params[resource_name] = params[resource_name].send method, *keys
else
self.params = params.send method, *keys
end
end
end
end
def self.resource_name
self.to_s.sub("Controller", "").underscore.split('/').last.singularize
end
def self.attributized(hash)
Hash.new.tap do |h|
hash.each do |k,v|
h[:"#{k}_attributes"] = v
end
end
end
end
module ActionController
class Parameters < ActiveSupport::HashWithIndifferentAccess
def deep_permit!
self.each do |key, value|
if value.is_a?(Hash)
if !value.respond_to?(:permit!)
self[key] = value = ActionController::Parameters.new(value)
end
value.deep_permit!
end
end
permit!
end
end
end
|
module Chore
module Strategy
class SingleWorkerStrategy
def initialize(manager)
@manager = manager
@worker = nil
end
def start;end
def assign(work)
if workers_available?
@worker = Worker.new
@worker.start(work)
@worker = nil
true
end
end
def workers_available?
@worker.nil?
end
end
end
end
Fix SingleWorker's job startup call
module Chore
module Strategy
class SingleWorkerStrategy
def initialize(manager)
@manager = manager
@worker = nil
end
def start;end
def assign(work)
if workers_available?
@worker = Worker.start(work)
true
end
end
def workers_available?
@worker.nil?
end
end
end
end
|
module Fastlane
module IncrementVersionCode
VERSION = "0.1.0"
end
end
Update version number
module Fastlane
module IncrementVersionCode
VERSION = "0.2.0"
end
end
|
module Fog
module Compute
class Octocloud
class Real
def local_create_vm(boxname, vmname)
target = @vm_dir.join(vmname)
target.mkdir unless target.exist?
source = @box_dir.join(boxname)
# Copy the VMX over
FileUtils.cp(Pathname.glob(source.join("*.vmx")).first,
target.join(vmname + ".vmx"))
# Copy all VMDK's over
Pathname.glob(source.join('*.vmdk')) do |f|
FileUtils.cp f.expand_path, target
end
# TODO: Need to reset MAC/ID/name etc!!
setup_uuid_mac(vmname)
true
end
private
def setup_uuid_mac(name)
vmx = vmx_for_vm(name)
VMXFile.with_vmx_data(vmx) do |data|
data.delete "ethernet0.addressType"
data.delete "uuid.location"
data.delete "uuid.bios"
data.delete "ethernet0.generatedAddress"
data.delete "ethernet1.generatedAddress"
data.delete "ethernet0.generatedAddressOffset"
data.delete "ethernet1.generatedAddressOffset"
data.delete 'displayname'
data['displayName'] = "tenderloin-" + name
end
end
end
end
end
end
Raise if cube doesn't exist.
module Fog
module Compute
class Octocloud
class Real
def local_create_vm(boxname, vmname)
target = @vm_dir.join(vmname)
target.mkdir unless target.exist?
source = @box_dir.join(boxname)
raise "Invalid Cube Specified" unless source.exist?
# Copy the VMX over
FileUtils.cp(Pathname.glob(source.join("*.vmx")).first,
target.join(vmname + ".vmx"))
# Copy all VMDK's over
Pathname.glob(source.join('*.vmdk')) do |f|
FileUtils.cp f.expand_path, target
end
# TODO: Need to reset MAC/ID/name etc!!
setup_uuid_mac(vmname)
true
end
private
def setup_uuid_mac(name)
vmx = vmx_for_vm(name)
VMXFile.with_vmx_data(vmx) do |data|
data.delete "ethernet0.addressType"
data.delete "uuid.location"
data.delete "uuid.bios"
data.delete "ethernet0.generatedAddress"
data.delete "ethernet1.generatedAddress"
data.delete "ethernet0.generatedAddressOffset"
data.delete "ethernet1.generatedAddressOffset"
data.delete 'displayname'
data['displayName'] = "tenderloin-" + name
end
end
end
end
end
end
|
require 'active_support/inflector'
require 'util/miq-exception'
module OpenstackHandle
class Handle
attr_accessor :username, :password, :address, :port, :api_version, :security_protocol, :connection_options
attr_reader :project_name
attr_writer :default_tenant_name
SERVICE_FALL_BACK = {
"Network" => "Compute",
"Image" => "Compute",
"Volume" => "Compute",
"Storage" => nil,
"Metering" => nil
}
SERVICE_NAME_MAP = {
"Compute" => :nova,
"Network" => :neutron,
"Image" => :glance,
"Volume" => :cinder,
"Storage" => :swift,
"Metering" => :ceilometer,
"Baremetal" => :baremetal,
"Orchestration" => :orchestration,
"Planning" => :planning
}
def self.try_connection(security_protocol)
if security_protocol.blank? || security_protocol == 'ssl'
# For backwards compatibility take blank security_protocol as SSL
yield "https", {:ssl_verify_peer => false}
elsif security_protocol == 'ssl-with-validation'
yield "https", {:ssl_verify_peer => true}
else
yield "http", {}
end
end
def self.raw_connect_try_ssl(username, password, address, port, service = "Compute", opts = nil, api_version = nil,
security_protocol = nil)
try_connection(security_protocol) do |scheme, connection_options|
auth_url = auth_url(address, port, scheme, api_version)
opts[:connection_options] = (opts[:connection_options] || {}).merge(connection_options)
raw_connect(username, password, auth_url, service, opts)
end
end
def self.raw_connect(username, password, auth_url, service = "Compute", extra_opts = nil)
opts = {
:provider => 'OpenStack',
:openstack_auth_url => auth_url,
:openstack_username => username,
:openstack_api_key => password,
:openstack_endpoint_type => 'publicURL',
}
opts.merge!(extra_opts) if extra_opts
# Workaround for a bug in Fog
# https://github.com/fog/fog/issues/3112
# Ensure the that if the Storage service is not available, it will not
# throw an error trying to build an connection error message.
opts[:openstack_service_type] = ["object-store"] if service == "Storage"
if service == "Planning"
# Special behaviour for Planning service Tuskar, since it is OpenStack specific service, there is no
# Fog::Planning module, only Fog::OpenStack::Planning
Fog::Openstack.const_get(service).new(opts)
else
Fog.const_get(service).new(opts)
end
rescue Fog::Errors::NotFound => err
raise MiqException::ServiceNotAvailable if err.message.include?("Could not find service")
raise
end
def self.path_for_api_version(api_version)
case api_version
when 'v2'
'/v2.0/tokens'
when 'v3'
'/v3/auth/tokens'
end
end
def self.auth_url(address, port = 5000, scheme = "http", api_version = 'v2')
url(address, port, scheme, path_for_api_version(api_version))
end
def self.url(address, port = 5000, scheme = "http", path = "")
port = port.to_i
uri = URI::Generic.build(:scheme => scheme, :port => port, :path => path)
uri.hostname = address
uri.to_s
end
class << self
attr_writer :connection_options
end
class << self
attr_reader :connection_options
end
def initialize(username, password, address, port = nil, api_version = nil, security_protocol = nil)
@username = username
@password = password
@address = address
@port = port || 5000
@api_version = api_version || 'v2'
@security_protocol = security_protocol || 'ssl'
@connection_cache = {}
@connection_options = self.class.connection_options
end
def browser_url
"http://#{address}/dashboard"
end
def connect(options = {})
opts = options.dup
service = (opts.delete(:service) || "Compute").to_s.camelize
tenant = opts.delete(:tenant_name)
# TODO(lsmola) figure out from where to take the project name and domain name
domain = opts.delete(:domain_name) || 'admin_domain'
# Do not send auth_type to fog, it throws warning
opts.delete(:auth_type)
unless tenant
tenant = "any_tenant" if service == "Identity"
tenant ||= default_tenant_name
end
unless service == "Identity"
opts[:openstack_tenant] = tenant
# For identity ,there is only domain scope, with project_name nil
opts[:openstack_project_name] = @project_name = tenant
end
opts[:openstack_domain_name] = domain
svc_cache = (@connection_cache[service] ||= {})
svc_cache[tenant] ||= begin
opts[:connection_options] = connection_options if connection_options
raw_service = self.class.raw_connect_try_ssl(username, password, address, port, service, opts, api_version,
security_protocol)
service_wrapper_name = "#{service}Delegate"
# Allow openstack to define new services without explicitly requiring a
# service wrapper.
if OpenstackHandle.const_defined?(service_wrapper_name)
OpenstackHandle.const_get(service_wrapper_name).new(raw_service, self, SERVICE_NAME_MAP[service])
else
raw_service
end
end
end
def baremetal_service(tenant_name = nil)
connect(:service => "Baremetal", :tenant_name => tenant_name)
end
def detect_baremetal_service(tenant_name = nil)
detect_service("Baremetal", tenant_name)
end
def orchestration_service(tenant_name = nil)
connect(:service => "Orchestration", :tenant_name => tenant_name)
end
def detect_orchestration_service(tenant_name = nil)
detect_service("Orchestration", tenant_name)
end
def planning_service(tenant_name = nil)
connect(:service => "Planning", :tenant_name => tenant_name)
end
def detect_planning_service(tenant_name = nil)
detect_service("Planning", tenant_name)
end
def compute_service(tenant_name = nil)
connect(:service => "Compute", :tenant_name => tenant_name)
end
alias_method :connect_compute, :compute_service
def identity_service
connect(:service => "Identity")
end
alias_method :connect_identity, :identity_service
def network_service(tenant_name = nil)
connect(:service => "Network", :tenant_name => tenant_name)
end
alias_method :connect_network, :network_service
def detect_network_service(tenant_name = nil)
detect_service("Network", tenant_name)
end
def image_service(tenant_name = nil)
connect(:service => "Image", :tenant_name => tenant_name)
end
alias_method :connect_image, :image_service
def detect_image_service(tenant_name = nil)
detect_service("Image", tenant_name)
end
def volume_service(tenant_name = nil)
connect(:service => "Volume", :tenant_name => tenant_name)
end
alias_method :connect_volume, :volume_service
def detect_volume_service(tenant_name = nil)
detect_service("Volume", tenant_name)
end
def storage_service(tenant_name = nil)
connect(:service => "Storage", :tenant_name => tenant_name)
end
alias_method :connect_storage, :storage_service
def detect_storage_service(tenant_name = nil)
detect_service("Storage", tenant_name)
end
def detect_service(service, tenant_name = nil)
connect(:service => service, :tenant_name => tenant_name)
rescue MiqException::ServiceNotAvailable
unless (fbs = SERVICE_FALL_BACK[service])
return nil
end
svc = connect(:service => fbs, :tenant_name => tenant_name)
end
def tenants
@tenants ||= identity_service.visible_tenants
end
def tenant_names
@tenant_names ||= tenants.collect(&:name)
end
def accessible_tenants
@accessible_tenants ||= tenants.select do |t|
# avoid 401 Unauth errors when checking for accessible tenants
# the "services" tenant is a special tenant in openstack reserved
# specifically for the various services
next if t.name == "services"
begin
compute_service(t.name)
true
rescue Excon::Errors::Unauthorized
false
end
end
end
def accessible_tenant_names
@accessible_tenant_names ||= accessible_tenants.collect(&:name)
end
def default_tenant_name
return @default_tenant_name ||= "admin" if accessible_tenant_names.include?("admin")
@default_tenant_name ||= accessible_tenant_names.detect { |tn| tn != "services" }
end
def service_for_each_accessible_tenant(service_name, &block)
accessible_tenants.each do |tenant|
service = detect_service(service_name, tenant.name)
if service
case block.arity
when 1 then block.call(service)
when 2 then block.call(service, tenant)
else raise "Unexpected number of block args: #{block.arity}"
end
else
$fog_log.warn("MIQ(#{self.class.name}##{__method__}) "\
"Could not access service #{service_name} for tenant #{tenant.name} on OpenStack #{@address}")
end
end
end
def accessor_for_accessible_tenants(service, accessor, uniq_id, array_accessor = true)
ra = []
service_for_each_accessible_tenant(service) do |svc, project|
not_found_error = Fog.const_get(service)::OpenStack::NotFound
rv = begin
if accessor.kind_of?(Proc)
accessor.call(svc)
else
array_accessor ? svc.send(accessor).to_a : svc.send(accessor)
end
rescue not_found_error => e
$fog_log.warn("MIQ(#{self.class.name}.#{__method__}) HTTP 404 Error during OpenStack request. " \
"Skipping inventory item #{service} #{accessor}\n#{e}")
nil
end
if !rv.blank? && array_accessor && rv.last.kind_of?(Fog::Model)
# If possible, store which project(tenant) was used for obtaining of the Fog::Model
rv.map { |x| x.project = project }
end
if rv
array_accessor ? ra.concat(rv) : ra << rv
end
end
if uniq_id.blank? && array_accessor && !ra.blank?
# Take uniq ID from Fog::Model definition
last_object = ra.last
# TODO(lsmola) change to last_object.identity_name once the new fog-core is released
uniq_id = last_object.class.instance_variable_get("@identity") if last_object.kind_of?(Fog::Model)
end
return ra unless uniq_id
ra.uniq { |i| i.kind_of?(Hash) ? i[uniq_id] : i.send(uniq_id) }
end
end
end
OpenStack allow to specify path to CA certificate
OpenStack allow to specify path to CA certificate, which will be
passed to Excon.
require 'active_support/inflector'
require 'util/miq-exception'
module OpenstackHandle
class Handle
attr_accessor :username, :password, :address, :port, :api_version, :security_protocol, :connection_options
attr_reader :project_name
attr_writer :default_tenant_name
SERVICE_FALL_BACK = {
"Network" => "Compute",
"Image" => "Compute",
"Volume" => "Compute",
"Storage" => nil,
"Metering" => nil
}
SERVICE_NAME_MAP = {
"Compute" => :nova,
"Network" => :neutron,
"Image" => :glance,
"Volume" => :cinder,
"Storage" => :swift,
"Metering" => :ceilometer,
"Baremetal" => :baremetal,
"Orchestration" => :orchestration,
"Planning" => :planning
}
def self.try_connection(security_protocol, ssl_options = {})
if security_protocol.blank? || security_protocol == 'ssl'
# For backwards compatibility take blank security_protocol as SSL
yield "https", {:ssl_verify_peer => false}
elsif security_protocol == 'ssl-with-validation'
excon_options = {:ssl_verify_peer => true}.merge(ssl_options)
yield "https", excon_options
else
yield "http", {}
end
end
def self.raw_connect_try_ssl(username, password, address, port, service = "Compute", opts = nil, api_version = nil,
security_protocol = nil)
ssl_options = opts.delete(:ssl_options)
try_connection(security_protocol, ssl_options) do |scheme, connection_options|
auth_url = auth_url(address, port, scheme, api_version)
opts[:connection_options] = (opts[:connection_options] || {}).merge(connection_options)
raw_connect(username, password, auth_url, service, opts)
end
end
def self.raw_connect(username, password, auth_url, service = "Compute", extra_opts = nil)
opts = {
:provider => 'OpenStack',
:openstack_auth_url => auth_url,
:openstack_username => username,
:openstack_api_key => password,
:openstack_endpoint_type => 'publicURL',
}
opts.merge!(extra_opts) if extra_opts
# Workaround for a bug in Fog
# https://github.com/fog/fog/issues/3112
# Ensure the that if the Storage service is not available, it will not
# throw an error trying to build an connection error message.
opts[:openstack_service_type] = ["object-store"] if service == "Storage"
if service == "Planning"
# Special behaviour for Planning service Tuskar, since it is OpenStack specific service, there is no
# Fog::Planning module, only Fog::OpenStack::Planning
Fog::Openstack.const_get(service).new(opts)
else
Fog.const_get(service).new(opts)
end
rescue Fog::Errors::NotFound => err
raise MiqException::ServiceNotAvailable if err.message.include?("Could not find service")
raise
end
def self.path_for_api_version(api_version)
case api_version
when 'v2'
'/v2.0/tokens'
when 'v3'
'/v3/auth/tokens'
end
end
def self.auth_url(address, port = 5000, scheme = "http", api_version = 'v2')
url(address, port, scheme, path_for_api_version(api_version))
end
def self.url(address, port = 5000, scheme = "http", path = "")
port = port.to_i
uri = URI::Generic.build(:scheme => scheme, :port => port, :path => path)
uri.hostname = address
uri.to_s
end
class << self
attr_writer :connection_options
end
class << self
attr_reader :connection_options
end
def initialize(username, password, address, port = nil, api_version = nil, security_protocol = nil,
extra_options = {})
@username = username
@password = password
@address = address
@port = port || 5000
@api_version = api_version || 'v2'
@security_protocol = security_protocol || 'ssl'
@extra_options = extra_options
@connection_cache = {}
@connection_options = self.class.connection_options
end
def ssl_options
@ssl_options ||= {}
return @ssl_options unless @ssl_options.blank?
@ssl_options[:ssl_ca_file] = @extra_options[:ssl_ca_file] unless @extra_options[:ssl_ca_file].blank?
@ssl_options[:ssl_ca_path] = @extra_options[:ssl_ca_path] unless @extra_options[:ssl_ca_path].blank?
# ssl_cert_store is dependent on the presence of ssl_ca_file
@ssl_options[:ssl_cert_store] = @extra_options[:ssl_cert_store] unless @extra_options[:ssl_ca_file].blank?
@ssl_options
end
def browser_url
"http://#{address}/dashboard"
end
def connect(options = {})
opts = options.dup
service = (opts.delete(:service) || "Compute").to_s.camelize
tenant = opts.delete(:tenant_name)
# TODO(lsmola) figure out from where to take the project name and domain name
domain = opts.delete(:domain_name) || 'admin_domain'
# Do not send auth_type to fog, it throws warning
opts.delete(:auth_type)
unless tenant
tenant = "any_tenant" if service == "Identity"
tenant ||= default_tenant_name
end
unless service == "Identity"
opts[:openstack_tenant] = tenant
# For identity ,there is only domain scope, with project_name nil
opts[:openstack_project_name] = @project_name = tenant
end
opts[:openstack_domain_name] = domain
svc_cache = (@connection_cache[service] ||= {})
svc_cache[tenant] ||= begin
opts[:connection_options] = connection_options if connection_options
opts[:ssl_options] = ssl_options
raw_service = self.class.raw_connect_try_ssl(username, password, address, port, service, opts, api_version,
security_protocol)
service_wrapper_name = "#{service}Delegate"
# Allow openstack to define new services without explicitly requiring a
# service wrapper.
if OpenstackHandle.const_defined?(service_wrapper_name)
OpenstackHandle.const_get(service_wrapper_name).new(raw_service, self, SERVICE_NAME_MAP[service])
else
raw_service
end
end
end
def baremetal_service(tenant_name = nil)
connect(:service => "Baremetal", :tenant_name => tenant_name)
end
def detect_baremetal_service(tenant_name = nil)
detect_service("Baremetal", tenant_name)
end
def orchestration_service(tenant_name = nil)
connect(:service => "Orchestration", :tenant_name => tenant_name)
end
def detect_orchestration_service(tenant_name = nil)
detect_service("Orchestration", tenant_name)
end
def planning_service(tenant_name = nil)
connect(:service => "Planning", :tenant_name => tenant_name)
end
def detect_planning_service(tenant_name = nil)
detect_service("Planning", tenant_name)
end
def compute_service(tenant_name = nil)
connect(:service => "Compute", :tenant_name => tenant_name)
end
alias_method :connect_compute, :compute_service
def identity_service
connect(:service => "Identity")
end
alias_method :connect_identity, :identity_service
def network_service(tenant_name = nil)
connect(:service => "Network", :tenant_name => tenant_name)
end
alias_method :connect_network, :network_service
def detect_network_service(tenant_name = nil)
detect_service("Network", tenant_name)
end
def image_service(tenant_name = nil)
connect(:service => "Image", :tenant_name => tenant_name)
end
alias_method :connect_image, :image_service
def detect_image_service(tenant_name = nil)
detect_service("Image", tenant_name)
end
def volume_service(tenant_name = nil)
connect(:service => "Volume", :tenant_name => tenant_name)
end
alias_method :connect_volume, :volume_service
def detect_volume_service(tenant_name = nil)
detect_service("Volume", tenant_name)
end
def storage_service(tenant_name = nil)
connect(:service => "Storage", :tenant_name => tenant_name)
end
alias_method :connect_storage, :storage_service
def detect_storage_service(tenant_name = nil)
detect_service("Storage", tenant_name)
end
def detect_service(service, tenant_name = nil)
connect(:service => service, :tenant_name => tenant_name)
rescue MiqException::ServiceNotAvailable
unless (fbs = SERVICE_FALL_BACK[service])
return nil
end
svc = connect(:service => fbs, :tenant_name => tenant_name)
end
def tenants
@tenants ||= identity_service.visible_tenants
end
def tenant_names
@tenant_names ||= tenants.collect(&:name)
end
def accessible_tenants
@accessible_tenants ||= tenants.select do |t|
# avoid 401 Unauth errors when checking for accessible tenants
# the "services" tenant is a special tenant in openstack reserved
# specifically for the various services
next if t.name == "services"
begin
compute_service(t.name)
true
rescue Excon::Errors::Unauthorized
false
end
end
end
def accessible_tenant_names
@accessible_tenant_names ||= accessible_tenants.collect(&:name)
end
def default_tenant_name
return @default_tenant_name ||= "admin" if accessible_tenant_names.include?("admin")
@default_tenant_name ||= accessible_tenant_names.detect { |tn| tn != "services" }
end
def service_for_each_accessible_tenant(service_name, &block)
accessible_tenants.each do |tenant|
service = detect_service(service_name, tenant.name)
if service
case block.arity
when 1 then block.call(service)
when 2 then block.call(service, tenant)
else raise "Unexpected number of block args: #{block.arity}"
end
else
$fog_log.warn("MIQ(#{self.class.name}##{__method__}) "\
"Could not access service #{service_name} for tenant #{tenant.name} on OpenStack #{@address}")
end
end
end
def accessor_for_accessible_tenants(service, accessor, uniq_id, array_accessor = true)
ra = []
service_for_each_accessible_tenant(service) do |svc, project|
not_found_error = Fog.const_get(service)::OpenStack::NotFound
rv = begin
if accessor.kind_of?(Proc)
accessor.call(svc)
else
array_accessor ? svc.send(accessor).to_a : svc.send(accessor)
end
rescue not_found_error => e
$fog_log.warn("MIQ(#{self.class.name}.#{__method__}) HTTP 404 Error during OpenStack request. " \
"Skipping inventory item #{service} #{accessor}\n#{e}")
nil
end
if !rv.blank? && array_accessor && rv.last.kind_of?(Fog::Model)
# If possible, store which project(tenant) was used for obtaining of the Fog::Model
rv.map { |x| x.project = project }
end
if rv
array_accessor ? ra.concat(rv) : ra << rv
end
end
if uniq_id.blank? && array_accessor && !ra.blank?
# Take uniq ID from Fog::Model definition
last_object = ra.last
# TODO(lsmola) change to last_object.identity_name once the new fog-core is released
uniq_id = last_object.class.instance_variable_get("@identity") if last_object.kind_of?(Fog::Model)
end
return ra unless uniq_id
ra.uniq { |i| i.kind_of?(Hash) ? i[uniq_id] : i.send(uniq_id) }
end
end
end
|
module HeadMusic::Style::Rulesets
end
class HeadMusic::Style::Rulesets::FirstSpeciesMelody
RULESET = [
HeadMusic::Style::Annotations::OneToOne,
HeadMusic::Style::Annotations::NotesSameLength,
HeadMusic::Style::Annotations::SingableIntervals,
HeadMusic::Style::Annotations::StartOnPerfectConsonance,
HeadMusic::Style::Annotations::EndOnTonic,
HeadMusic::Style::Annotations::StepUpToFinalNote,
HeadMusic::Style::Annotations::SingableRange,
HeadMusic::Style::Annotations::LimitOctaveLeaps,
HeadMusic::Style::Annotations::DistinctVoices, # TODO: move to harmony or composition ruleset
]
def self.analyze(voice)
RULESET.map { |rule| rule.new(voice) }
end
end
Remove TODO comment.
module HeadMusic::Style::Rulesets
end
class HeadMusic::Style::Rulesets::FirstSpeciesMelody
RULESET = [
HeadMusic::Style::Annotations::OneToOne,
HeadMusic::Style::Annotations::NotesSameLength,
HeadMusic::Style::Annotations::SingableIntervals,
HeadMusic::Style::Annotations::StartOnPerfectConsonance,
HeadMusic::Style::Annotations::EndOnTonic,
HeadMusic::Style::Annotations::StepUpToFinalNote,
HeadMusic::Style::Annotations::SingableRange,
HeadMusic::Style::Annotations::LimitOctaveLeaps,
HeadMusic::Style::Annotations::DistinctVoices,
]
def self.analyze(voice)
RULESET.map { |rule| rule.new(voice) }
end
end
|
require "paperclip/storage/dropbox/url_generator"
module Paperclip
module Storage
module Dropbox
class PublicUrlGenerator < UrlGenerator
def file_url(style)
url = URI.parse("https://dl.dropboxusercontent.com/u/#{user_id}/")
path = @attachment.path(style)
path = path.match(/^Public\//).try(:post_match)
url.merge!(path)
url.to_s
end
end
end
end
end
cyrillic filename bug fix
require "paperclip/storage/dropbox/url_generator"
module Paperclip
module Storage
module Dropbox
class PublicUrlGenerator < UrlGenerator
def file_url(style)
url = URI.parse("https://dl.dropboxusercontent.com/u/#{user_id}/")
path = @attachment.path(style)
path = path.match(/^Public\//).try(:post_match)
url.merge!(URI.encode(path))
url.to_s
end
end
end
end
end
|
module TaskAtHand
@default_task_timeout = 20
@task_queue = []
def desired_state(state, curr_state)
current_set_state = @setState
@setState = state.to_s
@two_step_in_progress = false
set_task_at_hand(state)
save_state
# end
SystemDebug.debug(SystemDebug.engine_tasks, 'Task at Hand:' + state.to_s + ' Current set state:' + current_set_state.to_s + ' going for:' + @setState + ' with ' + @task_at_hand.to_s + ' in ' + curr_state)
return true
rescue StandardError => e
log_exception(e)
end
def in_progress(action)
if @steps_to_go.nil?
@steps_to_go = 1
elsif @steps_to_go <= 0
@steps_to_go = 1
end
curr_state = read_state
SystemDebug.debug(SystemDebug.engine_tasks, :read_state, curr_state)
# FIX ME Finx the source 0 :->:
curr_state.sub!(/\:->\:/,'')
case action
when :create
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :stop
return desired_state('stopped', curr_state) if curr_state== 'running'
when :start
return desired_state('running', curr_state) if curr_state== 'stopped'
when :pause
return desired_state('paused', curr_state) if curr_state== 'running'
when :restart
if curr_state == 'running'
@steps = [:start,:stop]
@steps_to_go = 2
return desired_state('stopped', curr_state)
end
return desired_state('running', curr_state)
when :unpause
return desired_state('running', curr_state) if curr_state== 'paused'
when :recreate
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('running', curr_state)
end
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :rebuild
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('nocontainer', curr_state)
end
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :reinstall
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('running', curr_state)
end
@steps_to_go = 2
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :build
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :delete
return desired_state('nocontainer', curr_state) if curr_state== 'stopped'
# desired_state('noimage')
when :destroy
return desired_state('nocontainer', curr_state) if curr_state== 'stopped' || curr_state== 'nocontainer'
end
return log_error_mesg('not in matching state want _' + tasks_final_state(action).to_s + '_but in ' + curr_state.class.name + ' ',curr_state )
# Perhaps ?return clear_task_at_hand
rescue StandardError => e
log_exception(e)
end
def task_complete(action)
return if action == 'create'
@last_task = action
SystemDebug.debug(SystemDebug.engine_tasks, :task_complete, ' ', action.to_s + ' as action for task ' + task_at_hand.to_s + " " + @steps_to_go.to_s + 'steps to go ',@steps)
expire_engine_info
clear_task_at_hand
SystemDebug.debug(SystemDebug.builder, :last_task, @last_task)
save_state unless @last_task == :delete
# FixMe Kludge unless docker event listener
ContainerStateFiles.delete_container_configs(container) if @last_task == :delete
return true
rescue StandardError => e
log_exception(e)
end
def task_at_hand
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
return nil unless File.exist?(fn)
task = File.read(fn)
if task_has_expired?(task)
expire_task_at_had
return nil
end
r = read_state(raw=true)
if tasks_final_state(task) == r
clear_task_at_hand
return nil
end
task
rescue StandardError => e
log_exception(e)
return nil
# @task_at_hand
end
def expire_task_at_had
SystemDebug.debug(SystemDebug.engine_tasks, 'expire Task ' + @task_at_hand.to_s )
clear_task_at_hand
end
def clear_task_at_hand
@steps_to_go -= 1
if @steps_to_go > 0
SystemDebug.debug(SystemDebug.engine_tasks, 'Multistep Task ' + @task_at_hand.to_s )
@task_at_hand = @steps[@steps_to_go - 1]
SystemDebug.debug(SystemDebug.engine_tasks, 'next Multistep Task ' + @task_at_hand.to_s)
# f = File.new(ContainerStateFiles.container_state_dir(self) + '/task_at_hand','w+')
# f.write(@task_at_hand.to_s)
# f.close
else
SystemDebug.debug(SystemDebug.engine_tasks, 'cleared Task ' + @task_at_hand.to_s)
@task_at_hand = nil
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
File.delete(fn) if File.exist?(fn)
end
rescue StandardError => e
# log_exception(e) Dont log exception
# well perhaps a perms or disk error but definitly not no such file
return true #possbile exception such file (another process alsop got the eot mesg and removed)
end
def wait_for_task(task)
loops=0
timeout = task_set_timeout(task)
# p :wait_for_task
SystemDebug.debug(SystemDebug.engine_tasks, :wait_for_task, task_at_hand)
while ! task_at_hand.nil?
sleep(0.5)
loops+=1
SystemDebug.debug(SystemDebug.engine_tasks, :wft_loop, ' ', task_at_hand)
if loops > timeout * 2
return false
end
end
return true
rescue StandardError => e
log_exception(e)
return false
end
def task_failed(msg)
clear_task_at_hand
SystemDebug.debug(SystemDebug.engine_tasks,:TASK_FAILES______Doing, @task_at_hand)
@last_error = @container_api.last_error unless @container_api.nil?
SystemDebug.debug(SystemDebug.engine_tasks, :WITH, @last_error.to_s, msg.to_s)
task_complete(:failed)
return false
rescue StandardError => e
log_exception(e)
end
def wait_for_container_task(timeout=90)
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
return true unless File.exist?(fn)
loop = 0
while File.exist?(fn)
sleep(0.5)
loop += 1
return false if loop > timeout * 2
end
return true
rescue StandardError => e
log_exception(e)
end
private
def tasks_final_state(task)
case task
when :create
return 'running'
when :stop
return 'stopped'
when :start
return 'running'
when :pause
return 'paused'
when :restart
return 'stopped'
when :unpause
return 'running'
when :reinstall
return 'running'
when :recreate
return 'running'
when :rebuild
return 'running'
when :build
return 'running'
when :delete
return 'nocontainer'
when :destroy
return 'destroyed'
end
rescue StandardError => e
log_exception(e)
end
def task_has_expired?(task)
fmtime = File.mtime(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
SystemDebug.debug(SystemDebug.engine_tasks, "mtime ", fmtime)
SystemDebug.debug(SystemDebug.engine_tasks, "timeout",task_set_timeout(task))
mtime = fmtime + task_set_timeout(task)
SystemDebug.debug(SystemDebug.engine_tasks, "expires ",fmtime)
SystemDebug.debug(SystemDebug.engine_tasks, "now ", Time.now)
if mtime < Time.now
File.delete(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
return true
end
return false
# no file problem with mtime etc means task has finished in progress and task file has dissapppeared
rescue StandardError => e
SystemDebug.debug(SystemDebug.engine_tasks, e, e.backtrace)
return true
end
def task_set_timeout(task)
@default_task_timeout = 20
@task_timeouts = {}
@task_timeouts[task.to_sym] = @default_task_timeout unless @task_timeouts.key?(task.to_sym)
@task_timeouts[:stop]= 60
@task_timeouts[:start]= 30
@task_timeouts[:restart]= 60
@task_timeouts[:recreate]= 90
@task_timeouts[:create]= 90
@task_timeouts[:build]= 300
@task_timeouts[:rebuild]= 120
@task_timeouts[:pause]= 20
@task_timeouts[:unpause]= 20
@task_timeouts[:destroy]= 30
@task_timeouts[:delete]= 40
@task_timeouts[:running]= 40
SystemDebug.debug(SystemDebug.engine_tasks, :timeout_set_for_task,task.to_sym, @task_timeouts[task.to_sym].to_s + 'secs')
# return @default_task_timeout
return @task_timeouts[task.to_sym]
end
def set_task_at_hand(state)
@task_at_hand = state
f = File.new(ContainerStateFiles.container_state_dir(self) + '/task_at_hand','w+')
f.write(state)
f.close
rescue StandardError => e
log_exception(e)
end
end
wait for task now wits for file time change
module TaskAtHand
@default_task_timeout = 20
@task_queue = []
def desired_state(state, curr_state)
current_set_state = @setState
@setState = state.to_s
@two_step_in_progress = false
set_task_at_hand(state)
save_state
# end
SystemDebug.debug(SystemDebug.engine_tasks, 'Task at Hand:' + state.to_s + ' Current set state:' + current_set_state.to_s + ' going for:' + @setState + ' with ' + @task_at_hand.to_s + ' in ' + curr_state)
return true
rescue StandardError => e
log_exception(e)
end
def in_progress(action)
if @steps_to_go.nil?
@steps_to_go = 1
elsif @steps_to_go <= 0
@steps_to_go = 1
end
curr_state = read_state
SystemDebug.debug(SystemDebug.engine_tasks, :read_state, curr_state)
# FIX ME Finx the source 0 :->:
curr_state.sub!(/\:->\:/,'')
case action
when :create
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :stop
return desired_state('stopped', curr_state) if curr_state== 'running'
when :start
return desired_state('running', curr_state) if curr_state== 'stopped'
when :pause
return desired_state('paused', curr_state) if curr_state== 'running'
when :restart
if curr_state == 'running'
@steps = [:start,:stop]
@steps_to_go = 2
return desired_state('stopped', curr_state)
end
return desired_state('running', curr_state)
when :unpause
return desired_state('running', curr_state) if curr_state== 'paused'
when :recreate
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('running', curr_state)
end
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :rebuild
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('nocontainer', curr_state)
end
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :reinstall
if curr_state== 'stopped'
@steps = [:create,:destroy]
@steps_to_go = 2
return desired_state('running', curr_state)
end
@steps_to_go = 2
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :build
return desired_state('running', curr_state) if curr_state== 'nocontainer'
when :delete
return desired_state('nocontainer', curr_state) if curr_state== 'stopped'
# desired_state('noimage')
when :destroy
return desired_state('nocontainer', curr_state) if curr_state== 'stopped' || curr_state== 'nocontainer'
end
return log_error_mesg('not in matching state want _' + tasks_final_state(action).to_s + '_but in ' + curr_state.class.name + ' ',curr_state )
# Perhaps ?return clear_task_at_hand
rescue StandardError => e
log_exception(e)
end
def task_complete(action)
return if action == 'create'
@last_task = action
SystemDebug.debug(SystemDebug.engine_tasks, :task_complete, ' ', action.to_s + ' as action for task ' + task_at_hand.to_s + " " + @steps_to_go.to_s + 'steps to go ',@steps)
expire_engine_info
clear_task_at_hand
SystemDebug.debug(SystemDebug.builder, :last_task, @last_task)
save_state unless @last_task == :delete
# FixMe Kludge unless docker event listener
ContainerStateFiles.delete_container_configs(container) if @last_task == :delete
return true
rescue StandardError => e
log_exception(e)
end
def task_at_hand
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
return nil unless File.exist?(fn)
task = File.read(fn)
if task_has_expired?(task)
expire_task_at_had
return nil
end
r = read_state(raw=true)
if tasks_final_state(task) == r
clear_task_at_hand
return nil
end
task
rescue StandardError => e
log_exception(e)
return nil
# @task_at_hand
end
def expire_task_at_had
SystemDebug.debug(SystemDebug.engine_tasks, 'expire Task ' + @task_at_hand.to_s )
clear_task_at_hand
end
def clear_task_at_hand
@steps_to_go -= 1
if @steps_to_go > 0
SystemDebug.debug(SystemDebug.engine_tasks, 'Multistep Task ' + @task_at_hand.to_s )
@task_at_hand = @steps[@steps_to_go - 1]
SystemDebug.debug(SystemDebug.engine_tasks, 'next Multistep Task ' + @task_at_hand.to_s)
# f = File.new(ContainerStateFiles.container_state_dir(self) + '/task_at_hand','w+')
# f.write(@task_at_hand.to_s)
# f.close
else
SystemDebug.debug(SystemDebug.engine_tasks, 'cleared Task ' + @task_at_hand.to_s)
@task_at_hand = nil
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
File.delete(fn) if File.exist?(fn)
end
rescue StandardError => e
# log_exception(e) Dont log exception
# well perhaps a perms or disk error but definitly not no such file
return true #possbile exception such file (another process alsop got the eot mesg and removed)
end
def wait_for_task(task)
loops=0
timeout = task_set_timeout(task)
# p :wait_for_task
SystemDebug.debug(SystemDebug.engine_tasks, :wait_for_task, task_at_hand)
return true if task_at_hand.nil?
fmtime = File.mtime(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
while fmtime == File.mtime(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
sleep(0.5)
loops+=1
SystemDebug.debug(SystemDebug.engine_tasks, :wft_loop, ' ', task_at_hand)
if loops > timeout * 2
return false
end
end
return true
rescue StandardError => e
log_exception(e)
return false
end
def task_failed(msg)
clear_task_at_hand
SystemDebug.debug(SystemDebug.engine_tasks,:TASK_FAILES______Doing, @task_at_hand)
@last_error = @container_api.last_error unless @container_api.nil?
SystemDebug.debug(SystemDebug.engine_tasks, :WITH, @last_error.to_s, msg.to_s)
task_complete(:failed)
return false
rescue StandardError => e
log_exception(e)
end
def wait_for_container_task(timeout=90)
fn = ContainerStateFiles.container_state_dir(self) + '/task_at_hand'
return true unless File.exist?(fn)
loop = 0
while File.exist?(fn)
sleep(0.5)
loop += 1
return false if loop > timeout * 2
end
return true
rescue StandardError => e
log_exception(e)
end
private
def tasks_final_state(task)
case task
when :create
return 'running'
when :stop
return 'stopped'
when :start
return 'running'
when :pause
return 'paused'
when :restart
return 'stopped'
when :unpause
return 'running'
when :reinstall
return 'running'
when :recreate
return 'running'
when :rebuild
return 'running'
when :build
return 'running'
when :delete
return 'nocontainer'
when :destroy
return 'destroyed'
end
rescue StandardError => e
log_exception(e)
end
def task_has_expired?(task)
fmtime = File.mtime(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
SystemDebug.debug(SystemDebug.engine_tasks, "mtime ", fmtime)
SystemDebug.debug(SystemDebug.engine_tasks, "timeout",task_set_timeout(task))
mtime = fmtime + task_set_timeout(task)
SystemDebug.debug(SystemDebug.engine_tasks, "expires ",fmtime)
SystemDebug.debug(SystemDebug.engine_tasks, "now ", Time.now)
if mtime < Time.now
File.delete(ContainerStateFiles.container_state_dir(self) + '/task_at_hand')
return true
end
return false
# no file problem with mtime etc means task has finished in progress and task file has dissapppeared
rescue StandardError => e
SystemDebug.debug(SystemDebug.engine_tasks, e, e.backtrace)
return true
end
def task_set_timeout(task)
@default_task_timeout = 20
@task_timeouts = {}
@task_timeouts[task.to_sym] = @default_task_timeout unless @task_timeouts.key?(task.to_sym)
@task_timeouts[:stop]= 60
@task_timeouts[:start]= 30
@task_timeouts[:restart]= 60
@task_timeouts[:recreate]= 90
@task_timeouts[:create]= 90
@task_timeouts[:build]= 300
@task_timeouts[:rebuild]= 120
@task_timeouts[:pause]= 20
@task_timeouts[:unpause]= 20
@task_timeouts[:destroy]= 30
@task_timeouts[:delete]= 40
@task_timeouts[:running]= 40
SystemDebug.debug(SystemDebug.engine_tasks, :timeout_set_for_task,task.to_sym, @task_timeouts[task.to_sym].to_s + 'secs')
# return @default_task_timeout
return @task_timeouts[task.to_sym]
end
def set_task_at_hand(state)
@task_at_hand = state
f = File.new(ContainerStateFiles.container_state_dir(self) + '/task_at_hand','w+')
f.write(state)
f.close
rescue StandardError => e
log_exception(e)
end
end |
module SchemaGenerator
class NotificationSchemaGenerator
def initialize(format, global_definitions)
@format = format
@global_definitions = global_definitions
end
def generate
{
"$schema" => "http://json-schema.org/draft-04/schema#",
"type" => "object",
"additionalProperties" => false,
"required" => required,
"properties" => properties,
"definitions" => definitions,
}
end
private
attr_reader :format, :global_definitions
def required
if unpublishing_format?
default_required_properties.sort
else
(default_required_properties + publishing_required_properties).sort
end
end
def unpublishing_format?
%w[gone redirect vanish].include?(format.schema_name)
end
def properties
default_properties.merge(derived_properties)
end
def default_properties
Jsonnet.load("formats/shared/default_properties/notification.jsonnet")
end
def derived_properties
{
"base_path" => format.base_path.definition,
"content_id" => format.content_id(frontend: false).definition,
"document_type" => format.document_type.definition,
"description" => format.description.definition,
"details" => format.details.definition,
"expanded_links" => ExpandedLinks.new(format).generate,
"links" => unexpanded_links,
"redirects" => format.redirects.definition,
"rendering_app" => format.rendering_app.definition,
"routes" => format.routes.definition,
"schema_name" => format.schema_name_definition,
"title" => format.title.definition,
}
end
def default_required_properties
%w[
base_path
content_id
locale
document_type
govuk_request_id
payload_version
schema_name
]
end
def publishing_required_properties
%w[
analytics_identifier
description
details
email_document_supertype
expanded_links
first_published_at
government_document_supertype
links
navigation_document_supertype
phase
public_updated_at
publishing_app
redirects
rendering_app
routes
title
update_type
user_journey_document_supertype
]
end
def definitions
all_definitions = global_definitions.merge(format.definitions)
ApplyChangeHistoryDefinitions.call(all_definitions)
DefinitionsResolver.new(properties, all_definitions).call
end
def unexpanded_links
content_links = format.content_links.guid_properties
edition_links = format.edition_links.guid_properties
{
"type" => "object",
"additionalProperties" => false,
"properties" => content_links.merge(edition_links),
}
end
end
end
Deprecate supertypes that aren't used downstream
These supertypes used to be consumed by downstream apps (search-api and
content-data). We've done some work to remove this dependency, so that the
only app still left relying on finding supertypes via a notification is
email-alert-api.
Our strategy is that supergroups are a point in time feature of a document
based on its type. We should therefore devolve working these out to apps
that wish to use them, and minimise the complexity of everything else.
- [Content-data-api not longer looks for them](alphagov/content-data-api@48a5c97#diff-cbf8b25d6d853acf58fa9057841f407d29ffe90649c75cf9e3f51b2d6e3aa1d3)
- [Search-api works them out for itself](https://github.com/alphagov/search-api/blob/987fa2313c0b35528eb87b892dccbd1cfd692dca/lib/indexer/document_preparer.rb#L155)
- [Publishing-api still sends them, but we don't want it to](https://github.com/alphagov/publishing-api/blob/a6ec79b0a8d77b991e0dd192f89dd88f9fe0b687/app/presenters/edition_presenter.rb#L52)
Email alert api still uses government_document_supertype and email_document_supertype
so we'll leave them for another day.
https://trello.com/c/hp5BJD2i/117-unused-incorrect-supertypes-in-content-items
module SchemaGenerator
class NotificationSchemaGenerator
def initialize(format, global_definitions)
@format = format
@global_definitions = global_definitions
end
def generate
{
"$schema" => "http://json-schema.org/draft-04/schema#",
"type" => "object",
"additionalProperties" => false,
"required" => required,
"properties" => properties,
"definitions" => definitions,
}
end
private
attr_reader :format, :global_definitions
def required
if unpublishing_format?
default_required_properties.sort
else
(default_required_properties + publishing_required_properties).sort
end
end
def unpublishing_format?
%w[gone redirect vanish].include?(format.schema_name)
end
def properties
default_properties.merge(derived_properties)
end
def default_properties
Jsonnet.load("formats/shared/default_properties/notification.jsonnet")
end
def derived_properties
{
"base_path" => format.base_path.definition,
"content_id" => format.content_id(frontend: false).definition,
"document_type" => format.document_type.definition,
"description" => format.description.definition,
"details" => format.details.definition,
"expanded_links" => ExpandedLinks.new(format).generate,
"links" => unexpanded_links,
"redirects" => format.redirects.definition,
"rendering_app" => format.rendering_app.definition,
"routes" => format.routes.definition,
"schema_name" => format.schema_name_definition,
"title" => format.title.definition,
}
end
def default_required_properties
%w[
base_path
content_id
locale
document_type
govuk_request_id
payload_version
schema_name
]
end
def publishing_required_properties
%w[
analytics_identifier
description
details
email_document_supertype
expanded_links
first_published_at
government_document_supertype
links
phase
public_updated_at
publishing_app
redirects
rendering_app
routes
title
update_type
]
end
def definitions
all_definitions = global_definitions.merge(format.definitions)
ApplyChangeHistoryDefinitions.call(all_definitions)
DefinitionsResolver.new(properties, all_definitions).call
end
def unexpanded_links
content_links = format.content_links.guid_properties
edition_links = format.edition_links.guid_properties
{
"type" => "object",
"additionalProperties" => false,
"properties" => content_links.merge(edition_links),
}
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.