CombinedText stringlengths 4 3.42M |
|---|
class Docfx < Formula
desc "Tools for building and publishing API documentation for .NET projects"
homepage "https://dotnet.github.io/docfx/"
url "https://github.com/dotnet/docfx/releases/download/v2.35/docfx.zip"
sha256 "738d3ea13e5c14143f986c568c0c4b0bb0cb9a7cbb75515e70c91558b2735c5a"
bottle :unneeded
depends_on "mono"
def install
libexec.install Dir["*"]
(bin/"docfx").write <<~EOS
#!/bin/bash
mono #{libexec}/docfx.exe "$@"
EOS
end
test do
system bin/"docfx", "init", "-q"
assert_predicate testpath/"docfx_project/docfx.json", :exist?,
"Failed to generate project"
end
end
docfx 2.35.1 (#27113)
class Docfx < Formula
desc "Tools for building and publishing API documentation for .NET projects"
homepage "https://dotnet.github.io/docfx/"
url "https://github.com/dotnet/docfx/releases/download/v2.35.1/docfx.zip"
sha256 "30d4d2eaff5801874676d4cd1ec9d27d16b9b0559b1a234f18aa142c52a34461"
bottle :unneeded
depends_on "mono"
def install
libexec.install Dir["*"]
(bin/"docfx").write <<~EOS
#!/bin/bash
mono #{libexec}/docfx.exe "$@"
EOS
end
test do
system bin/"docfx", "init", "-q"
assert_predicate testpath/"docfx_project/docfx.json", :exist?,
"Failed to generate project"
end
end
|
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
url "https://github.com/digitalocean/doctl/archive/v1.71.0.tar.gz"
sha256 "62a14c05bd4b8442f0a610f9eba7237fe67e1afc397750893cdf2b741aebfd54"
license "Apache-2.0"
head "https://github.com/digitalocean/doctl.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "38ff8eb380adf54840f16b346f07cfef339e57065aac54ed474cf9dde1bd1be8"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "16a80ceb75a97aa896c53a9afd3dc03badfcd9f0bcffbd52179b196f5944c133"
sha256 cellar: :any_skip_relocation, monterey: "87766c760569b86f5050a3eb94bb01b5bd66aa814b2cc5b85005d652050e6fff"
sha256 cellar: :any_skip_relocation, big_sur: "f2347ee26d1e9e7082eab370564fd706009d7695b6570b538b73602faef902a4"
sha256 cellar: :any_skip_relocation, catalina: "b17a08925d8b86f8003e789a2b61722b5b9d5d95af2e0788e5e48577fbda2809"
sha256 cellar: :any_skip_relocation, x86_64_linux: "f86834dd9a1a937b238a193069b901a435d9d92b3cbefd1dc7661d14619486a2"
end
depends_on "go" => :build
def install
base_flag = "-X github.com/digitalocean/doctl"
ldflags = %W[
#{base_flag}.Major=#{version.major}
#{base_flag}.Minor=#{version.minor}
#{base_flag}.Patch=#{version.patch}
#{base_flag}.Label=release
]
system "go", "build", *std_go_args(ldflags: ldflags), "./cmd/doctl"
(bash_completion/"doctl").write `#{bin}/doctl completion bash`
(zsh_completion/"_doctl").write `#{bin}/doctl completion zsh`
(fish_completion/"doctl.fish").write `#{bin}/doctl completion fish`
end
test do
assert_match "doctl version #{version}-release", shell_output("#{bin}/doctl version")
end
end
doctl: update 1.71.0 bottle.
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
url "https://github.com/digitalocean/doctl/archive/v1.71.0.tar.gz"
sha256 "62a14c05bd4b8442f0a610f9eba7237fe67e1afc397750893cdf2b741aebfd54"
license "Apache-2.0"
head "https://github.com/digitalocean/doctl.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "7e0bba29801fe489ae24a4a91b31ffe214226fad1077618b7e4426d2781ed6e3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "d0bd63ff07ea54e37dbc13484f0712219a6dc80b3b5bd79a7c5ea232e64bc03b"
sha256 cellar: :any_skip_relocation, monterey: "12c2dbbd07f350ac7cca39ebd2a8f50fd721546f488e7ba94051a17c2c8885f5"
sha256 cellar: :any_skip_relocation, big_sur: "e50f12562d19cc80d5290c8d17803470913e387bb879ff9091e09d6218536d4d"
sha256 cellar: :any_skip_relocation, catalina: "e901c0246406034f9bf66f65365242b929cfefa2c8a01dc33918f717219eb385"
sha256 cellar: :any_skip_relocation, x86_64_linux: "8f03ba48bb5d2e99b179515defddeb3f5a53737f6c80d82004ca191427128e68"
end
depends_on "go" => :build
def install
base_flag = "-X github.com/digitalocean/doctl"
ldflags = %W[
#{base_flag}.Major=#{version.major}
#{base_flag}.Minor=#{version.minor}
#{base_flag}.Patch=#{version.patch}
#{base_flag}.Label=release
]
system "go", "build", *std_go_args(ldflags: ldflags), "./cmd/doctl"
(bash_completion/"doctl").write `#{bin}/doctl completion bash`
(zsh_completion/"_doctl").write `#{bin}/doctl completion zsh`
(fish_completion/"doctl.fish").write `#{bin}/doctl completion fish`
end
test do
assert_match "doctl version #{version}-release", shell_output("#{bin}/doctl version")
end
end
|
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
stable do
if OS.linux?
case Hardware::CPU.arch
when :x86_64
url "https://github.com/digitalocean/doctl/releases/download/v1.8.3/doctl-1.8.3-linux-amd64.tar.gz"
version "1.8.3"
sha256 "e9eea4b2343ae8e43adc5a9c878ec4b51cfecb389059b8e385ab5bc1f5036851"
when :arm
url "file://#{__FILE__}"
version "1.8.3"
end
end
end
def install
odie "Platform not supported." if active_spec.url == "file://#{__FILE__}"
bin.install "doctl"
end
test do
system bin/"doctl", "version"
end
end
doctl 1.11.0
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
stable do
if OS.linux?
case Hardware::CPU.arch
when :x86_64
url "https://github.com/digitalocean/doctl/releases/download/v1.11.0/doctl-1.11.0-linux-amd64.tar.gz"
version "1.11.0"
sha256 "4d17fdb4c225f08db0087addc5c7dafca5561de3b6620a56b19bf95935e14128"
when :arm
url "file://#{__FILE__}"
version "1.11.0"
end
end
end
def install
odie "Platform not supported." if active_spec.url == "file://#{__FILE__}"
bin.install "doctl"
end
test do
system bin/"doctl", "version"
end
end
|
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
url "https://github.com/digitalocean/doctl/archive/v1.51.0.tar.gz"
sha256 "dec16ab128c2724eebfeadac05dd4554cd007a57998d3aae3009c71196578760"
license "Apache-2.0"
head "https://github.com/digitalocean/doctl.git"
bottle do
cellar :any_skip_relocation
sha256 "ab02daf972403a8f59066bd88f86797ebac3357330743b1fa7705969ef71dcf6" => :catalina
sha256 "b05021938d7b7a121257cab476f703b105ec7334b6d0929816193aabe2f52a53" => :mojave
sha256 "471f81ec8f53fe3d6f921426069b36fbd93247544ac589f383c3889681247e1e" => :high_sierra
end
depends_on "go" => :build
def install
base_flag = "-X github.com/digitalocean/doctl"
ldflags = %W[
#{base_flag}.Major=#{version.major}
#{base_flag}.Minor=#{version.minor}
#{base_flag}.Patch=#{version.patch}
#{base_flag}.Label=release
].join(" ")
system "go", "build", "-ldflags", ldflags, *std_go_args, "github.com/digitalocean/doctl/cmd/doctl"
(bash_completion/"doctl").write `#{bin}/doctl completion bash`
(zsh_completion/"_doctl").write `#{bin}/doctl completion zsh`
(fish_completion/"doctl.fish").write `#{bin}/doctl completion fish`
end
test do
assert_match "doctl version #{version}-release", shell_output("#{bin}/doctl version")
end
end
doctl 1.52.0
Closes #64500.
Signed-off-by: chenrui <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Doctl < Formula
desc "Command-line tool for DigitalOcean"
homepage "https://github.com/digitalocean/doctl"
url "https://github.com/digitalocean/doctl/archive/v1.52.0.tar.gz"
sha256 "e4a0a08d5be545b9bb035ed377c0b7e7705e217bf55d4f4423d7dc8afb0a5819"
license "Apache-2.0"
head "https://github.com/digitalocean/doctl.git"
bottle do
cellar :any_skip_relocation
sha256 "ab02daf972403a8f59066bd88f86797ebac3357330743b1fa7705969ef71dcf6" => :catalina
sha256 "b05021938d7b7a121257cab476f703b105ec7334b6d0929816193aabe2f52a53" => :mojave
sha256 "471f81ec8f53fe3d6f921426069b36fbd93247544ac589f383c3889681247e1e" => :high_sierra
end
depends_on "go" => :build
def install
base_flag = "-X github.com/digitalocean/doctl"
ldflags = %W[
#{base_flag}.Major=#{version.major}
#{base_flag}.Minor=#{version.minor}
#{base_flag}.Patch=#{version.patch}
#{base_flag}.Label=release
].join(" ")
system "go", "build", "-ldflags", ldflags, *std_go_args, "github.com/digitalocean/doctl/cmd/doctl"
(bash_completion/"doctl").write `#{bin}/doctl completion bash`
(zsh_completion/"_doctl").write `#{bin}/doctl completion zsh`
(fish_completion/"doctl.fish").write `#{bin}/doctl completion fish`
end
test do
assert_match "doctl version #{version}-release", shell_output("#{bin}/doctl version")
end
end
|
class Dps8m < Formula
desc "Simulator for the Multics dps-8/m mainframe"
homepage "https://ringzero.wikidot.com"
url "https://downloads.sourceforge.net/project/dps8m/Release%201.0/source.tgz"
version "1.0"
sha256 "51088dd91de888b918644c431eec22318640d28eb3050d9c01cd072aa7cca3c7"
head "https://github.com/charlesUnixPro/dps8m.git"
bottle do
cellar :any
sha256 "0c436100ea3f71f5e7fcb2a39ca6d6e08a4981b661117e6e8d18685df3c8d040" => :high_sierra
sha256 "eab4df90e0f3573923e9baab97f0685d793a5ae200f167be8f99fc1ec957b60a" => :sierra
sha256 "f079087bc567e575f5c7d83890d84cf872a58a3707fb08467b4eda2b6a10ccd7" => :el_capitan
end
depends_on "libuv"
def install
# Reported 23 Jul 2017 "make dosn't create bin directory"
# See https://sourceforge.net/p/dps8m/mailman/message/35960505/
bin.mkpath
system "make", "INSTALL_ROOT=#{prefix}", "install"
end
test do
(testpath/"test.exp").write <<~EOS
spawn #{bin}/dps8
set timeout 5
expect {
timeout { exit 1 }
"sim>"
}
send "help\r"
expect {
timeout { exit 2 }
"SKIPBOOT"
}
send "q\r"
expect {
timeout { exit 3 }
eof
}
EOS
assert_equal "Goodbye", shell_output("expect -f test.exp").lines.last.chomp
end
end
dps8m: update 1.0 bottle.
class Dps8m < Formula
desc "Simulator for the Multics dps-8/m mainframe"
homepage "https://ringzero.wikidot.com"
url "https://downloads.sourceforge.net/project/dps8m/Release%201.0/source.tgz"
version "1.0"
sha256 "51088dd91de888b918644c431eec22318640d28eb3050d9c01cd072aa7cca3c7"
head "https://github.com/charlesUnixPro/dps8m.git"
bottle do
cellar :any
sha256 "b238a69a4da50c3b951981e5b3c6e4683aeea100acbd0dacf449e723422c649d" => :mojave
sha256 "0c436100ea3f71f5e7fcb2a39ca6d6e08a4981b661117e6e8d18685df3c8d040" => :high_sierra
sha256 "eab4df90e0f3573923e9baab97f0685d793a5ae200f167be8f99fc1ec957b60a" => :sierra
sha256 "f079087bc567e575f5c7d83890d84cf872a58a3707fb08467b4eda2b6a10ccd7" => :el_capitan
end
depends_on "libuv"
def install
# Reported 23 Jul 2017 "make dosn't create bin directory"
# See https://sourceforge.net/p/dps8m/mailman/message/35960505/
bin.mkpath
system "make", "INSTALL_ROOT=#{prefix}", "install"
end
test do
(testpath/"test.exp").write <<~EOS
spawn #{bin}/dps8
set timeout 5
expect {
timeout { exit 1 }
"sim>"
}
send "help\r"
expect {
timeout { exit 2 }
"SKIPBOOT"
}
send "q\r"
expect {
timeout { exit 3 }
eof
}
EOS
assert_equal "Goodbye", shell_output("expect -f test.exp").lines.last.chomp
end
end
|
class Draco < Formula
desc "3D geometric mesh and point cloud compression library"
homepage "https://google.github.io/draco/"
url "https://github.com/google/draco/archive/1.3.5.tar.gz"
sha256 "a3ac692397574166ad99493ff1efcd7b5c69b580e7eb4500d5d181b2f676aa6e"
bottle do
cellar :any_skip_relocation
sha256 "64a93005a9d93ee47b55138b99258e1bc2412c350f5fa0956343ad860b6dd167" => :catalina
sha256 "c1bae68f6c01adcce21376bd26f66e9274fa5e733eed5b2e1e032a61e2641cef" => :mojave
sha256 "fd67b398a18f03ed070ceb6cd3dccd58ce761e2030f6bc75599df3fb49a4d8bf" => :high_sierra
sha256 "13422b36cce4d3e1541441d9a5c82539f13cbe0215cfab8b30c4ffaf43a12931" => :sierra
end
depends_on "cmake" => :build
def install
mkdir "build" do
system "cmake", "..", * std_cmake_args
system "make", "install"
end
pkgshare.install "testdata/cube_att.ply"
end
test do
system "#{bin}/draco_encoder", "-i", "#{pkgshare}/cube_att.ply",
"-o", "cube_att.drc"
assert_predicate testpath/"cube_att.drc", :exist?
end
end
draco 1.3.6
Closes #51097.
Signed-off-by: Bo Anderson <1d6e1cf70ec6f9ab28d3ea4b27a49a77654d370e@boanderson.me>
class Draco < Formula
desc "3D geometric mesh and point cloud compression library"
homepage "https://google.github.io/draco/"
url "https://github.com/google/draco/archive/1.3.6.tar.gz"
sha256 "80eaa54ef5fc687c9aeebb9bd24d936d3e6d2c6048f358be8b83fa088ef4b2cb"
bottle do
cellar :any_skip_relocation
sha256 "64a93005a9d93ee47b55138b99258e1bc2412c350f5fa0956343ad860b6dd167" => :catalina
sha256 "c1bae68f6c01adcce21376bd26f66e9274fa5e733eed5b2e1e032a61e2641cef" => :mojave
sha256 "fd67b398a18f03ed070ceb6cd3dccd58ce761e2030f6bc75599df3fb49a4d8bf" => :high_sierra
sha256 "13422b36cce4d3e1541441d9a5c82539f13cbe0215cfab8b30c4ffaf43a12931" => :sierra
end
depends_on "cmake" => :build
def install
mkdir "build" do
system "cmake", "..", * std_cmake_args
system "make", "install"
end
pkgshare.install "testdata/cube_att.ply"
end
test do
system "#{bin}/draco_encoder", "-i", "#{pkgshare}/cube_att.ply",
"-o", "cube_att.drc"
assert_predicate testpath/"cube_att.drc", :exist?
end
end
|
class Dwarf < Formula
desc "Object file manipulation tool"
homepage "https://github.com/elboza/dwarf-ng/"
url "https://github.com/elboza/dwarf-ng/archive/dwarf-0.3.2.tar.gz"
sha256 "dc3db5273c02f0b05beedada0a3935af0c60c011d7213f37c39f43ce088f9436"
bottle do
sha256 "ef1d01f93b35c4661d08b9a8a710cd99092a533735c646db2e205c8f2db93b95" => :high_sierra
sha256 "84c3d641587619c55073a819edf62b23a6437aeed72075e257272df685e226aa" => :sierra
sha256 "053fba2171b46fe1d9fd22f52ca5eee61e462682e2b9340c671505e5351fd5d6" => :el_capitan
sha256 "1a798403cb54f055465e16fe67e7db63dd693ee993d0a871c32c6f143621d7f3" => :yosemite
end
depends_on "flex"
depends_on "readline"
def install
%w[src/libdwarf.c doc/dwarf.man doc/xdwarf.man.html].each do |f|
inreplace f, "/etc/dwarfrc", etc/"dwarfrc"
end
system "make"
system "make", "install", "BINDIR=#{bin}", "MANDIR=#{man1}"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
int main(int argc, char *argv[]) {
printf("hello world\\n");
}
EOS
system ENV.cc, "test.c", "-o", "test"
output = shell_output("#{bin}/dwarf -c 'print $mac' test")
assert_equal "magic: 0xfeedfacf (-17958193)", output.lines[1].chomp
end
end
dwarf 0.4.0
Closes #18645.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Dwarf < Formula
desc "Object file manipulation tool"
homepage "https://github.com/elboza/dwarf-ng/"
url "https://github.com/elboza/dwarf-ng/archive/dwarf-0.4.0.tar.gz"
sha256 "a64656f53ded5166041ae25cc4b1ad9ab5046a5c4d4c05b727447e73c0d83da0"
bottle do
sha256 "ef1d01f93b35c4661d08b9a8a710cd99092a533735c646db2e205c8f2db93b95" => :high_sierra
sha256 "84c3d641587619c55073a819edf62b23a6437aeed72075e257272df685e226aa" => :sierra
sha256 "053fba2171b46fe1d9fd22f52ca5eee61e462682e2b9340c671505e5351fd5d6" => :el_capitan
sha256 "1a798403cb54f055465e16fe67e7db63dd693ee993d0a871c32c6f143621d7f3" => :yosemite
end
depends_on "flex"
depends_on "readline"
def install
%w[src/libdwarf.c doc/dwarf.man doc/xdwarf.man.html].each do |f|
inreplace f, "/etc/dwarfrc", etc/"dwarfrc"
end
system "make"
system "make", "install", "BINDIR=#{bin}", "MANDIR=#{man1}"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
int main(int argc, char *argv[]) {
printf("hello world\\n");
}
EOS
system ENV.cc, "test.c", "-o", "test"
output = shell_output("#{bin}/dwarf -c 'pp $mac' test")
assert_equal "magic: 0xfeedfacf (-17958193)", output.lines[0].chomp
end
end
|
class Emacs < Formula
desc "GNU Emacs text editor"
homepage "https://www.gnu.org/software/emacs/"
url "https://ftp.gnu.org/gnu/emacs/emacs-26.3.tar.xz"
mirror "https://ftpmirror.gnu.org/emacs/emacs-26.3.tar.xz"
sha256 "4d90e6751ad8967822c6e092db07466b9d383ef1653feb2f95c93e7de66d3485"
bottle do
sha256 "9ab33f4386ca5f7326a8c28da1324556ec990f682a7ca88641203da0b42dbdae" => :catalina
sha256 "8162a26246de7db44c53ea0d0ef0a806140318d19c69e8e5e33aa88ce7e823a8" => :mojave
sha256 "6a2629b6deddf99f81abb1990ecd6c87f0242a0eecbb6b6c2e4c3540e421d4c4" => :high_sierra
sha256 "2a47477e71766d7dd6b16c29ad5ba71817ed80d06212e3261ef3c776e7e9f5a2" => :sierra
end
head do
url "https://github.com/emacs-mirror/emacs.git"
depends_on "autoconf" => :build
depends_on "gnu-sed" => :build
depends_on "texinfo" => :build
end
disable! if ENV["CI"]
depends_on "pkg-config" => :build
depends_on "gnutls"
depends_on "jpeg" unless OS.mac?
uses_from_macos "libxml2"
uses_from_macos "ncurses"
on_linux do
depends_on "jpeg"
end
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--enable-locallisppath=#{HOMEBREW_PREFIX}/share/emacs/site-lisp
--infodir=#{info}/emacs
--prefix=#{prefix}
--with-gnutls
--without-x
--with-xml2
--without-dbus
--with-modules
--without-ns
--without-imagemagick
]
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
system "./configure", *args
system "make"
system "make", "install"
# Follow MacPorts and don't install ctags from Emacs. This allows Vim
# and Emacs and ctags to play together without violence.
(bin/"ctags").unlink
(man1/"ctags.1.gz").unlink
end
plist_options :manual => "emacs"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/emacs</string>
<string>--fg-daemon</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_equal "4", shell_output("#{bin}/emacs --batch --eval=\"(print (+ 2 2))\"").strip
end
end
emacs: remove duplicated dependency (#20596)
class Emacs < Formula
desc "GNU Emacs text editor"
homepage "https://www.gnu.org/software/emacs/"
url "https://ftp.gnu.org/gnu/emacs/emacs-26.3.tar.xz"
mirror "https://ftpmirror.gnu.org/emacs/emacs-26.3.tar.xz"
sha256 "4d90e6751ad8967822c6e092db07466b9d383ef1653feb2f95c93e7de66d3485"
bottle do
sha256 "9ab33f4386ca5f7326a8c28da1324556ec990f682a7ca88641203da0b42dbdae" => :catalina
sha256 "8162a26246de7db44c53ea0d0ef0a806140318d19c69e8e5e33aa88ce7e823a8" => :mojave
sha256 "6a2629b6deddf99f81abb1990ecd6c87f0242a0eecbb6b6c2e4c3540e421d4c4" => :high_sierra
sha256 "2a47477e71766d7dd6b16c29ad5ba71817ed80d06212e3261ef3c776e7e9f5a2" => :sierra
end
head do
url "https://github.com/emacs-mirror/emacs.git"
depends_on "autoconf" => :build
depends_on "gnu-sed" => :build
depends_on "texinfo" => :build
end
disable! if ENV["CI"]
depends_on "pkg-config" => :build
depends_on "gnutls"
uses_from_macos "libxml2"
uses_from_macos "ncurses"
on_linux do
depends_on "jpeg"
end
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--enable-locallisppath=#{HOMEBREW_PREFIX}/share/emacs/site-lisp
--infodir=#{info}/emacs
--prefix=#{prefix}
--with-gnutls
--without-x
--with-xml2
--without-dbus
--with-modules
--without-ns
--without-imagemagick
]
if build.head?
ENV.prepend_path "PATH", Formula["gnu-sed"].opt_libexec/"gnubin"
system "./autogen.sh"
end
system "./configure", *args
system "make"
system "make", "install"
# Follow MacPorts and don't install ctags from Emacs. This allows Vim
# and Emacs and ctags to play together without violence.
(bin/"ctags").unlink
(man1/"ctags.1.gz").unlink
end
plist_options :manual => "emacs"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/emacs</string>
<string>--fg-daemon</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_equal "4", shell_output("#{bin}/emacs --batch --eval=\"(print (+ 2 2))\"").strip
end
end
|
class Exiv2 < Formula
desc "EXIF and IPTC metadata manipulation library and tools"
homepage "https://www.exiv2.org/"
url "https://www.exiv2.org/builds/exiv2-0.27.2-Source.tar.gz"
sha256 "2652f56b912711327baff6dc0c90960818211cf7ab79bb5e1eb59320b78d153f"
head "https://github.com/Exiv2/exiv2.git"
bottle do
cellar :any
sha256 "3b78b8fbffcc6d62685bc4a9a0a51855f5ccf6fe7fabc866f0970e1a12ced0b4" => :mojave
sha256 "8a4e65d47307247b11127c00cdad18626425eafb271faaeb1c076beb57298e12" => :high_sierra
sha256 "fe386bc9bfe7270655a6b3163f8e33a6fc6e6f36512e6ac6e6a49a1650a6a485" => :sierra
end
depends_on "cmake" => :build
depends_on "gettext"
depends_on "libssh"
def install
args = std_cmake_args
args += %W[
-DEXIV2_ENABLE_XMP=ON
-DEXIV2_ENABLE_VIDEO=ON
-DEXIV2_ENABLE_PNG=ON
-DEXIV2_ENABLE_NLS=ON
-DEXIV2_ENABLE_PRINTUCS2=ON
-DEXIV2_ENABLE_LENSDATA=ON
-DEXIV2_ENABLE_VIDEO=ON
-DEXIV2_ENABLE_WEBREADY=ON
-DEXIV2_ENABLE_CURL=ON
-DEXIV2_ENABLE_SSH=ON
-DEXIV2_BUILD_SAMPLES=OFF
-DSSH_LIBRARY=#{Formula["libssh"].opt_lib}/libssh.dylib
-DSSH_INCLUDE_DIR=#{Formula["libssh"].opt_include}
..
]
mkdir "build.cmake" do
system "cmake", "-G", "Unix Makefiles", ".", *args
system "make", "install"
end
end
test do
assert_match "288 Bytes", shell_output("#{bin}/exiv2 #{test_fixtures("test.jpg")}", 253)
end
end
exiv2: update 0.27.2 bottle.
class Exiv2 < Formula
desc "EXIF and IPTC metadata manipulation library and tools"
homepage "https://www.exiv2.org/"
url "https://www.exiv2.org/builds/exiv2-0.27.2-Source.tar.gz"
sha256 "2652f56b912711327baff6dc0c90960818211cf7ab79bb5e1eb59320b78d153f"
head "https://github.com/Exiv2/exiv2.git"
bottle do
cellar :any
sha256 "fdadbb93ae659a651b2e7b899c3c2bf8910b8f0891661903a04a4c81a66ff534" => :catalina
sha256 "3b78b8fbffcc6d62685bc4a9a0a51855f5ccf6fe7fabc866f0970e1a12ced0b4" => :mojave
sha256 "8a4e65d47307247b11127c00cdad18626425eafb271faaeb1c076beb57298e12" => :high_sierra
sha256 "fe386bc9bfe7270655a6b3163f8e33a6fc6e6f36512e6ac6e6a49a1650a6a485" => :sierra
end
depends_on "cmake" => :build
depends_on "gettext"
depends_on "libssh"
def install
args = std_cmake_args
args += %W[
-DEXIV2_ENABLE_XMP=ON
-DEXIV2_ENABLE_VIDEO=ON
-DEXIV2_ENABLE_PNG=ON
-DEXIV2_ENABLE_NLS=ON
-DEXIV2_ENABLE_PRINTUCS2=ON
-DEXIV2_ENABLE_LENSDATA=ON
-DEXIV2_ENABLE_VIDEO=ON
-DEXIV2_ENABLE_WEBREADY=ON
-DEXIV2_ENABLE_CURL=ON
-DEXIV2_ENABLE_SSH=ON
-DEXIV2_BUILD_SAMPLES=OFF
-DSSH_LIBRARY=#{Formula["libssh"].opt_lib}/libssh.dylib
-DSSH_INCLUDE_DIR=#{Formula["libssh"].opt_include}
..
]
mkdir "build.cmake" do
system "cmake", "-G", "Unix Makefiles", ".", *args
system "make", "install"
end
end
test do
assert_match "288 Bytes", shell_output("#{bin}/exiv2 #{test_fixtures("test.jpg")}", 253)
end
end
|
class Expat < Formula
desc "XML 1.0 parser"
homepage "https://libexpat.github.io/"
url "https://downloads.sourceforge.net/project/expat/expat/2.2.2/expat-2.2.2.tar.bz2"
mirror "https://fossies.org/linux/www/expat-2.2.2.tar.bz2"
sha256 "4376911fcf81a23ebd821bbabc26fd933f3ac74833f74924342c29aad2c86046"
head "https://github.com/libexpat/libexpat.git"
bottle do
cellar :any
sha256 "5703d1c1102f1228d6112eec9e127a43568bcf6ad2c5500195cfa6f14245c94a" => :sierra
sha256 "31d368c2d0fbeee449e47af09aa28bc81b4f820a5ec0f240d0da2701fcfb2122" => :el_capitan
sha256 "d58a80f9cca7e993f6b001fb0defeb68ced7b9cd05c39a787a355a4bf915d5f2" => :yosemite
end
keg_only :provided_by_osx, "macOS includes Expat 1.5"
def install
system "./configure", "--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
#include "expat.h"
static void XMLCALL my_StartElementHandler(
void *userdata,
const XML_Char *name,
const XML_Char **atts)
{
printf("tag:%s|", name);
}
static void XMLCALL my_CharacterDataHandler(
void *userdata,
const XML_Char *s,
int len)
{
printf("data:%.*s|", len, s);
}
int main()
{
static const char str[] = "<str>Hello, world!</str>";
int result;
XML_Parser parser = XML_ParserCreate("utf-8");
XML_SetElementHandler(parser, my_StartElementHandler, NULL);
XML_SetCharacterDataHandler(parser, my_CharacterDataHandler);
result = XML_Parse(parser, str, sizeof(str), 1);
XML_ParserFree(parser);
return result;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lexpat", "-o", "test"
assert_equal "tag:str|data:Hello, world!|", shell_output("./test")
end
end
expat: update 2.2.2 bottle.
class Expat < Formula
desc "XML 1.0 parser"
homepage "https://libexpat.github.io/"
url "https://downloads.sourceforge.net/project/expat/expat/2.2.2/expat-2.2.2.tar.bz2"
mirror "https://fossies.org/linux/www/expat-2.2.2.tar.bz2"
sha256 "4376911fcf81a23ebd821bbabc26fd933f3ac74833f74924342c29aad2c86046"
head "https://github.com/libexpat/libexpat.git"
bottle do
cellar :any
sha256 "695b20a2db6da52fa0dd6a8a27b625ebbbbde60501d467bfdb2c35c014560f95" => :sierra
sha256 "fc2aeee22d324256fd60e57207a6b25ae73321007ad937b0c4d079c4525872d8" => :el_capitan
sha256 "cdf5f497c262b7ea270e38618ff4c54d7f6da22dc3c97355e977dfcb45223b08" => :yosemite
end
keg_only :provided_by_osx, "macOS includes Expat 1.5"
def install
system "./configure", "--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
#include "expat.h"
static void XMLCALL my_StartElementHandler(
void *userdata,
const XML_Char *name,
const XML_Char **atts)
{
printf("tag:%s|", name);
}
static void XMLCALL my_CharacterDataHandler(
void *userdata,
const XML_Char *s,
int len)
{
printf("data:%.*s|", len, s);
}
int main()
{
static const char str[] = "<str>Hello, world!</str>";
int result;
XML_Parser parser = XML_ParserCreate("utf-8");
XML_SetElementHandler(parser, my_StartElementHandler, NULL);
XML_SetCharacterDataHandler(parser, my_CharacterDataHandler);
result = XML_Parse(parser, str, sizeof(str), 1);
XML_ParserFree(parser);
return result;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-lexpat", "-o", "test"
assert_equal "tag:str|data:Hello, world!|", shell_output("./test")
end
end
|
class Fceux < Formula
desc "All-in-one NES/Famicom Emulator"
homepage "https://fceux.com/"
url "https://github.com/TASEmulators/fceux.git",
tag: "fceux-2.6.1",
revision: "7173d283c3a12f634ad5189c5a90ff495e1d266a"
license "GPL-2.0-only"
head "https://github.com/TASEmulators/fceux.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "703f7ec022ed71552e97c4ef5a5360ffce048e67d47fa30afd1f1afa553efff6"
sha256 cellar: :any, arm64_big_sur: "77420fc7beb82bee75341c2f5d3a3dfe345c157d38da236b76f7661240cbc419"
sha256 cellar: :any, big_sur: "f5f782bb0539fbaac000448965e9793700fddeed03f016e5f99b64a4966fd52f"
sha256 cellar: :any, catalina: "362643ca9ed5af946a9ce13a92f92c765c33cca3fbf47e0fcf5d2409c227589f"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "ffmpeg"
depends_on "minizip"
depends_on "qt@5"
depends_on "sdl2"
depends_on "x264"
def install
ENV["CXXFLAGS"] = "-DPUBLIC_RELEASE=1" if build.stable?
system "cmake", ".", *std_cmake_args
system "make"
cp "src/auxlib.lua", "output/luaScripts"
libexec.install "src/fceux.app/Contents/MacOS/fceux"
pkgshare.install ["output/luaScripts", "output/palettes", "output/tools"]
(bin/"fceux").write <<~EOS
#!/bin/bash
LUA_PATH=#{pkgshare}/luaScripts/?.lua #{libexec}/fceux "$@"
EOS
end
test do
system "#{bin}/fceux", "--help"
end
end
fceux: revision bump (ffmpeg 5.0)
class Fceux < Formula
desc "All-in-one NES/Famicom Emulator"
homepage "https://fceux.com/"
url "https://github.com/TASEmulators/fceux.git",
tag: "fceux-2.6.1",
revision: "7173d283c3a12f634ad5189c5a90ff495e1d266a"
license "GPL-2.0-only"
revision 1
head "https://github.com/TASEmulators/fceux.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "703f7ec022ed71552e97c4ef5a5360ffce048e67d47fa30afd1f1afa553efff6"
sha256 cellar: :any, arm64_big_sur: "77420fc7beb82bee75341c2f5d3a3dfe345c157d38da236b76f7661240cbc419"
sha256 cellar: :any, big_sur: "f5f782bb0539fbaac000448965e9793700fddeed03f016e5f99b64a4966fd52f"
sha256 cellar: :any, catalina: "362643ca9ed5af946a9ce13a92f92c765c33cca3fbf47e0fcf5d2409c227589f"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "ffmpeg"
depends_on "minizip"
depends_on "qt@5"
depends_on "sdl2"
depends_on "x264"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
def install
ENV["CXXFLAGS"] = "-DPUBLIC_RELEASE=1" if build.stable?
system "cmake", ".", *std_cmake_args
system "make"
cp "src/auxlib.lua", "output/luaScripts"
libexec.install "src/fceux.app/Contents/MacOS/fceux"
pkgshare.install ["output/luaScripts", "output/palettes", "output/tools"]
(bin/"fceux").write <<~EOS
#!/bin/bash
LUA_PATH=#{pkgshare}/luaScripts/?.lua #{libexec}/fceux "$@"
EOS
end
test do
system "#{bin}/fceux", "--help"
end
end
|
class Fceux < Formula
desc "All-in-one NES/Famicom Emulator"
homepage "https://fceux.com/"
url "https://github.com/TASEmulators/fceux.git",
tag: "fceux-2.6.4",
revision: "2b8c61802029721229a26592e4578f92efe814fb"
license "GPL-2.0-only"
revision 2
head "https://github.com/TASEmulators/fceux.git", branch: "master"
bottle do
rebuild 1
sha256 cellar: :any, arm64_monterey: "4e0229b41383077bc3ff7a3e9e43b4ebf248ebbb09d38078d89d7ea14d943a40"
sha256 cellar: :any, arm64_big_sur: "92c1e577709978b28cef7875358b6b5b3643f9301c56b5ae9eac98eaefd51bba"
sha256 cellar: :any, monterey: "cf55e27f1976a68608667cb3e5c7968c131e6d9a90e0691152784e1024006c19"
sha256 cellar: :any, big_sur: "e82e9537ee0427af36473a76b0b26198d22e577716c1e3c73ef6be45514c578d"
sha256 cellar: :any, catalina: "ace36678cd5d83047ca0038029e336f1833a80efd5c79ec88c1f02a78c4e1b74"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "ffmpeg"
depends_on "minizip"
depends_on "qt"
depends_on "sdl2"
depends_on "x264"
on_linux do
depends_on "mesa-glu"
end
fails_with gcc: "5"
def install
ENV["CXXFLAGS"] = "-DPUBLIC_RELEASE=1" if build.stable?
system "cmake", ".", *std_cmake_args, "-DQT6=ON"
system "make"
cp "src/auxlib.lua", "output/luaScripts"
fceux_path = OS.mac? ? "src/fceux.app/Contents/MacOS" : "src"
libexec.install Pathname.new(fceux_path)/"fceux"
pkgshare.install ["output/luaScripts", "output/palettes", "output/tools"]
(bin/"fceux").write <<~EOS
#!/bin/bash
LUA_PATH=#{pkgshare}/luaScripts/?.lua #{libexec}/fceux "$@"
EOS
end
test do
# Set QT_QPA_PLATFORM to minimal to avoid error:
# "This application failed to start because no Qt platform plugin could be initialized."
ENV["QT_QPA_PLATFORM"] = "minimal" if OS.linux? && ENV["HOMEBREW_GITHUB_ACTIONS"]
system "#{bin}/fceux", "--help"
end
end
fceux: update 2.6.4_2 bottle.
class Fceux < Formula
desc "All-in-one NES/Famicom Emulator"
homepage "https://fceux.com/"
url "https://github.com/TASEmulators/fceux.git",
tag: "fceux-2.6.4",
revision: "2b8c61802029721229a26592e4578f92efe814fb"
license "GPL-2.0-only"
revision 2
head "https://github.com/TASEmulators/fceux.git", branch: "master"
bottle do
rebuild 1
sha256 cellar: :any, arm64_ventura: "756430ce70feb593346978361c819e30c2c73ce4db936af2a0a0a69979907b73"
sha256 cellar: :any, arm64_monterey: "4e0229b41383077bc3ff7a3e9e43b4ebf248ebbb09d38078d89d7ea14d943a40"
sha256 cellar: :any, arm64_big_sur: "92c1e577709978b28cef7875358b6b5b3643f9301c56b5ae9eac98eaefd51bba"
sha256 cellar: :any, monterey: "cf55e27f1976a68608667cb3e5c7968c131e6d9a90e0691152784e1024006c19"
sha256 cellar: :any, big_sur: "e82e9537ee0427af36473a76b0b26198d22e577716c1e3c73ef6be45514c578d"
sha256 cellar: :any, catalina: "ace36678cd5d83047ca0038029e336f1833a80efd5c79ec88c1f02a78c4e1b74"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "ffmpeg"
depends_on "minizip"
depends_on "qt"
depends_on "sdl2"
depends_on "x264"
on_linux do
depends_on "mesa-glu"
end
fails_with gcc: "5"
def install
ENV["CXXFLAGS"] = "-DPUBLIC_RELEASE=1" if build.stable?
system "cmake", ".", *std_cmake_args, "-DQT6=ON"
system "make"
cp "src/auxlib.lua", "output/luaScripts"
fceux_path = OS.mac? ? "src/fceux.app/Contents/MacOS" : "src"
libexec.install Pathname.new(fceux_path)/"fceux"
pkgshare.install ["output/luaScripts", "output/palettes", "output/tools"]
(bin/"fceux").write <<~EOS
#!/bin/bash
LUA_PATH=#{pkgshare}/luaScripts/?.lua #{libexec}/fceux "$@"
EOS
end
test do
# Set QT_QPA_PLATFORM to minimal to avoid error:
# "This application failed to start because no Qt platform plugin could be initialized."
ENV["QT_QPA_PLATFORM"] = "minimal" if OS.linux? && ENV["HOMEBREW_GITHUB_ACTIONS"]
system "#{bin}/fceux", "--help"
end
end
|
require 'formula'
class Ffmbc < Formula
homepage 'http://code.google.com/p/ffmbc/'
url 'https://ffmbc.googlecode.com/files/FFmbc-0.7-rc8.tar.bz2'
sha1 '85a9673ac82a698bb96057fe027222efe6ebae28'
revision 1
bottle do
sha1 "2ec4e61817f6dca744a74f366b9c9d8912fb3d89" => :mavericks
sha1 "f9fd79a535a052862c3695a1525990c6df31e5d4" => :mountain_lion
sha1 "bccbff468429c7af94e8047688b5452184826c22" => :lion
end
option "without-x264", "Disable H.264 encoder"
option "without-lame", "Disable MP3 encoder"
option "without-xvid", "Disable Xvid MPEG-4 video encoder"
# manpages won't be built without texi2html
depends_on 'texi2html' => :build if MacOS.version >= :mountain_lion
depends_on 'yasm' => :build
depends_on 'x264' => :recommended
depends_on 'faac' => :recommended
depends_on 'lame' => :recommended
depends_on 'xvid' => :recommended
depends_on 'freetype' => :optional
depends_on 'theora' => :optional
depends_on 'libvorbis' => :optional
depends_on 'libogg' => :optional
depends_on 'libvpx' => :optional
def install
args = ["--prefix=#{prefix}",
"--disable-debug",
"--disable-shared",
"--enable-gpl",
"--enable-nonfree",
"--cc=#{ENV.cc}"]
args << "--enable-libx264" if build.with? 'x264'
args << "--enable-libfaac" if build.with? 'faac'
args << "--enable-libmp3lame" if build.with? 'lame'
args << "--enable-libxvid" if build.with? 'xvid'
args << "--enable-libfreetype" if build.with? 'freetype'
args << "--enable-libtheora" if build.with? 'theora'
args << "--enable-libvorbis" if build.with? 'libvorbis'
args << "--enable-libogg" if build.with? 'libogg'
args << "--enable-libvpx" if build.with? 'libvpx'
system "./configure", *args
system "make"
# ffmbc's lib and bin names conflict with ffmpeg and libav
# This formula will only install the commandline tools
mv "ffprobe", "ffprobe-bc"
bin.install "ffmbc", "ffprobe-bc"
end
def caveats
<<-EOS.undent
Due to naming conflicts with other FFmpeg forks, this formula installs
only static binaries - no shared libraries are built.
The `ffprobe` program has been renamed to `ffprobe-bc` to avoid name
conflicts with the FFmpeg executable of the same name.
EOS
end
test do
system "#{bin}/ffmbc", "-h"
end
end
ffmbc: fix man page generation
Fixes Homebrew/homebrew#34244.
require 'formula'
class Ffmbc < Formula
homepage 'http://code.google.com/p/ffmbc/'
url 'https://ffmbc.googlecode.com/files/FFmbc-0.7-rc8.tar.bz2'
sha1 '85a9673ac82a698bb96057fe027222efe6ebae28'
revision 1
bottle do
sha1 "2ec4e61817f6dca744a74f366b9c9d8912fb3d89" => :mavericks
sha1 "f9fd79a535a052862c3695a1525990c6df31e5d4" => :mountain_lion
sha1 "bccbff468429c7af94e8047688b5452184826c22" => :lion
end
option "without-x264", "Disable H.264 encoder"
option "without-lame", "Disable MP3 encoder"
option "without-xvid", "Disable Xvid MPEG-4 video encoder"
# manpages won't be built without texi2html
depends_on 'texi2html' => :build if MacOS.version >= :mountain_lion
depends_on 'yasm' => :build
depends_on 'x264' => :recommended
depends_on 'faac' => :recommended
depends_on 'lame' => :recommended
depends_on 'xvid' => :recommended
depends_on 'freetype' => :optional
depends_on 'theora' => :optional
depends_on 'libvorbis' => :optional
depends_on 'libogg' => :optional
depends_on 'libvpx' => :optional
patch :DATA # fix man page generation, fixed in upstream ffmpeg
def install
args = ["--prefix=#{prefix}",
"--disable-debug",
"--disable-shared",
"--enable-gpl",
"--enable-nonfree",
"--cc=#{ENV.cc}"]
args << "--enable-libx264" if build.with? 'x264'
args << "--enable-libfaac" if build.with? 'faac'
args << "--enable-libmp3lame" if build.with? 'lame'
args << "--enable-libxvid" if build.with? 'xvid'
args << "--enable-libfreetype" if build.with? 'freetype'
args << "--enable-libtheora" if build.with? 'theora'
args << "--enable-libvorbis" if build.with? 'libvorbis'
args << "--enable-libogg" if build.with? 'libogg'
args << "--enable-libvpx" if build.with? 'libvpx'
system "./configure", *args
system "make"
# ffmbc's lib and bin names conflict with ffmpeg and libav
# This formula will only install the commandline tools
mv "ffprobe", "ffprobe-bc"
bin.install "ffmbc", "ffprobe-bc"
end
def caveats
<<-EOS.undent
Due to naming conflicts with other FFmpeg forks, this formula installs
only static binaries - no shared libraries are built.
The `ffprobe` program has been renamed to `ffprobe-bc` to avoid name
conflicts with the FFmpeg executable of the same name.
EOS
end
test do
system "#{bin}/ffmbc", "-h"
end
end
__END__
diff --git a/doc/texi2pod.pl b/doc/texi2pod.pl
index 18531be..88b0a3f 100755
--- a/doc/texi2pod.pl
+++ b/doc/texi2pod.pl
@@ -297,6 +297,8 @@ $inf = pop @instack;
die "No filename or title\n" unless defined $fn && defined $tl;
+print "=encoding utf8\n\n";
+
$sects{NAME} = "$fn \- $tl\n";
$sects{FOOTNOTES} .= "=back\n" if exists $sects{FOOTNOTES};
|
class Ffms2 < Formula
desc "Libav/ffmpeg based source library and Avisynth plugin"
homepage "https://github.com/FFMS/ffms2"
url "https://github.com/FFMS/ffms2/archive/2.23.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/f/ffms2/ffms2_2.23.orig.tar.gz"
sha256 "b09b2aa2b1c6f87f94a0a0dd8284b3c791cbe77f0f3df57af99ddebcd15273ed"
revision 3
bottle do
cellar :any
sha256 "09986c3c0185b4947c078a755d52117b309b82fb271ced093f7a29c97a1d5c4b" => :catalina
sha256 "52e80b7671459161fcd103e4330e2641fe3701560dc08ddb924b562066ae465a" => :mojave
sha256 "a32ddc2efaf4ce050c2d3decc2bba75d94d40d26f11460a706b57eadb1ecea4f" => :high_sierra
end
head do
url "https://github.com/FFMS/ffms2.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "pkg-config" => :build
depends_on "ffmpeg"
resource "videosample" do
url "https://samples.mplayerhq.hu/V-codecs/lm20.avi"
sha256 "a0ab512c66d276fd3932aacdd6073f9734c7e246c8747c48bf5d9dd34ac8b392"
end
def install
# For Mountain Lion
ENV.libcxx
args = %W[
--disable-debug
--disable-dependency-tracking
--enable-avresample
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
# download small sample and check that the index was created
resource("videosample").stage do
system bin/"ffmsindex", "lm20.avi"
assert_predicate Pathname.pwd/"lm20.avi.ffindex", :exist?
end
end
end
ffms2: revision for x265
class Ffms2 < Formula
desc "Libav/ffmpeg based source library and Avisynth plugin"
homepage "https://github.com/FFMS/ffms2"
url "https://github.com/FFMS/ffms2/archive/2.23.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/f/ffms2/ffms2_2.23.orig.tar.gz"
sha256 "b09b2aa2b1c6f87f94a0a0dd8284b3c791cbe77f0f3df57af99ddebcd15273ed"
revision 4
bottle do
cellar :any
sha256 "09986c3c0185b4947c078a755d52117b309b82fb271ced093f7a29c97a1d5c4b" => :catalina
sha256 "52e80b7671459161fcd103e4330e2641fe3701560dc08ddb924b562066ae465a" => :mojave
sha256 "a32ddc2efaf4ce050c2d3decc2bba75d94d40d26f11460a706b57eadb1ecea4f" => :high_sierra
end
head do
url "https://github.com/FFMS/ffms2.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "pkg-config" => :build
depends_on "ffmpeg"
resource "videosample" do
url "https://samples.mplayerhq.hu/V-codecs/lm20.avi"
sha256 "a0ab512c66d276fd3932aacdd6073f9734c7e246c8747c48bf5d9dd34ac8b392"
end
def install
# For Mountain Lion
ENV.libcxx
args = %W[
--disable-debug
--disable-dependency-tracking
--enable-avresample
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
# download small sample and check that the index was created
resource("videosample").stage do
system bin/"ffmsindex", "lm20.avi"
assert_predicate Pathname.pwd/"lm20.avi.ffindex", :exist?
end
end
end
|
class Ffms2 < Formula
desc "Libav/ffmpeg based source library and Avisynth plugin"
homepage "https://github.com/FFMS/ffms2"
url "https://github.com/FFMS/ffms2/archive/2.23.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/f/ffms2/ffms2_2.23.orig.tar.gz"
sha256 "b09b2aa2b1c6f87f94a0a0dd8284b3c791cbe77f0f3df57af99ddebcd15273ed"
revision 4
bottle do
cellar :any
sha256 "09986c3c0185b4947c078a755d52117b309b82fb271ced093f7a29c97a1d5c4b" => :catalina
sha256 "52e80b7671459161fcd103e4330e2641fe3701560dc08ddb924b562066ae465a" => :mojave
sha256 "a32ddc2efaf4ce050c2d3decc2bba75d94d40d26f11460a706b57eadb1ecea4f" => :high_sierra
end
head do
url "https://github.com/FFMS/ffms2.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "pkg-config" => :build
depends_on "ffmpeg"
resource "videosample" do
url "https://samples.mplayerhq.hu/V-codecs/lm20.avi"
sha256 "a0ab512c66d276fd3932aacdd6073f9734c7e246c8747c48bf5d9dd34ac8b392"
end
def install
# For Mountain Lion
ENV.libcxx
args = %W[
--disable-debug
--disable-dependency-tracking
--enable-avresample
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
# download small sample and check that the index was created
resource("videosample").stage do
system bin/"ffmsindex", "lm20.avi"
assert_predicate Pathname.pwd/"lm20.avi.ffindex", :exist?
end
end
end
ffms2: update 2.23_4 bottle.
class Ffms2 < Formula
desc "Libav/ffmpeg based source library and Avisynth plugin"
homepage "https://github.com/FFMS/ffms2"
url "https://github.com/FFMS/ffms2/archive/2.23.tar.gz"
mirror "https://deb.debian.org/debian/pool/main/f/ffms2/ffms2_2.23.orig.tar.gz"
sha256 "b09b2aa2b1c6f87f94a0a0dd8284b3c791cbe77f0f3df57af99ddebcd15273ed"
revision 4
bottle do
cellar :any
sha256 "b6495a6e71b67427d075abbf334d41179593fd1576ab230f7a7da1f02f329500" => :catalina
sha256 "4e445388ec5eadeec544cc4f6dc119bd2c321194c1d7628ca61413d9ebdbe749" => :mojave
sha256 "8ff0f417a1455cc0c6f823ebb916c3be18f0a4cf8edecfda6970351060c07665" => :high_sierra
end
head do
url "https://github.com/FFMS/ffms2.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
end
depends_on "pkg-config" => :build
depends_on "ffmpeg"
resource "videosample" do
url "https://samples.mplayerhq.hu/V-codecs/lm20.avi"
sha256 "a0ab512c66d276fd3932aacdd6073f9734c7e246c8747c48bf5d9dd34ac8b392"
end
def install
# For Mountain Lion
ENV.libcxx
args = %W[
--disable-debug
--disable-dependency-tracking
--enable-avresample
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
# download small sample and check that the index was created
resource("videosample").stage do
system bin/"ffmsindex", "lm20.avi"
assert_predicate Pathname.pwd/"lm20.avi.ffindex", :exist?
end
end
end
|
class Fibjs < Formula
desc "JavaScript on Fiber"
homepage "https://fibjs.org/"
url "https://github.com/fibjs/fibjs/releases/download/v0.34.0/fullsrc.zip"
sha256 "57ff82526307274a59cf5d373f57d2aa7690e5b3e4c31a916de4f048fd84bf04"
license "GPL-3.0-only"
head "https://github.com/fibjs/fibjs.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "e5a997e76a3fa661cbdf75cf61e3e53e093451c2c5e9a1e55a16e6b8742fb55d"
sha256 cellar: :any_skip_relocation, catalina: "a93fbe33a0f6feca011ea1f21f69f2d5d72be12cc4b2ca11f217e53ad1a09c72"
sha256 cellar: :any_skip_relocation, mojave: "bef936d8399cb135a7073b6983ee41b0fb41e967c893a92db785f5d319ea453e"
end
depends_on "cmake" => :build
# LLVM is added as a test dependency to work around limitation in Homebrew's
# test compiler selection when using fails_with. Can remove :test when fixed.
# Issue ref: https://github.com/Homebrew/brew/issues/11795
uses_from_macos "llvm" => [:build, :test]
on_linux do
depends_on "libx11"
end
# https://github.com/fibjs/fibjs/blob/master/BUILDING.md
fails_with :gcc do
cause "Upstream does not support gcc."
end
def install
# help find X11 headers: fatal error: 'X11/Xlib.h' file not found
on_linux { ENV.append "CXXFLAGS", "-I#{HOMEBREW_PREFIX}/include" }
# the build script breaks when CI is set by Homebrew
with_env(CI: nil) do
system "./build", "clean"
system "./build", "release", "-j#{ENV.make_jobs}"
end
os = OS.mac? ? "Darwin" : "Linux"
bin.install "bin/#{os}_amd64_release/fibjs"
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/fibjs #{path}").strip
assert_equal "hello", output
end
end
fibjs: update 0.34.0 bottle.
class Fibjs < Formula
desc "JavaScript on Fiber"
homepage "https://fibjs.org/"
url "https://github.com/fibjs/fibjs/releases/download/v0.34.0/fullsrc.zip"
sha256 "57ff82526307274a59cf5d373f57d2aa7690e5b3e4c31a916de4f048fd84bf04"
license "GPL-3.0-only"
head "https://github.com/fibjs/fibjs.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "5b94c5a2291ada6c8987b23357ea3c0be4306d2ead6caca2b236a73643947d7f"
sha256 cellar: :any_skip_relocation, catalina: "74a446f80b494dee49981e4fa0dc68fc1653d81ba09dbf316e5bde3100360030"
sha256 cellar: :any_skip_relocation, mojave: "0489b047454da54566d071fddfc011c91ca8c44620631a1aebe359c887fb65fb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "7992458fb9fc9ab2aab41cdf12f7f68896c21202c9499d9a0900b32d1dfc7dd0"
end
depends_on "cmake" => :build
# LLVM is added as a test dependency to work around limitation in Homebrew's
# test compiler selection when using fails_with. Can remove :test when fixed.
# Issue ref: https://github.com/Homebrew/brew/issues/11795
uses_from_macos "llvm" => [:build, :test]
on_linux do
depends_on "libx11"
end
# https://github.com/fibjs/fibjs/blob/master/BUILDING.md
fails_with :gcc do
cause "Upstream does not support gcc."
end
def install
# help find X11 headers: fatal error: 'X11/Xlib.h' file not found
on_linux { ENV.append "CXXFLAGS", "-I#{HOMEBREW_PREFIX}/include" }
# the build script breaks when CI is set by Homebrew
with_env(CI: nil) do
system "./build", "clean"
system "./build", "release", "-j#{ENV.make_jobs}"
end
os = OS.mac? ? "Darwin" : "Linux"
bin.install "bin/#{os}_amd64_release/fibjs"
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/fibjs #{path}").strip
assert_equal "hello", output
end
end
|
class Fibjs < Formula
desc "JavaScript on Fiber"
homepage "http://fibjs.org"
url "https://github.com/xicilion/fibjs/releases/download/v0.1.7/fibjs-0.1.7-fullsrc.zip"
sha256 "92dfef31422c848816f992bd651f029837d4788c66acb83c8b93a084dec257ae"
head "https://github.com/xicilion/fibjs.git"
bottle do
cellar :any_skip_relocation
revision 1
sha256 "c72b7627b6e3a8bca4219abd2e3f2d41261815f5df2a1a4d4386077a434c7aa9" => :el_capitan
sha256 "40878d250cee1260b1262c124e1736d2a4c1f9526edcf7e707402e92c1965faa" => :yosemite
sha256 "67a6f871801208598b647160af3779525fa366f8bfb47d663972a43091ebc881" => :mavericks
end
depends_on "cmake" => :build
def install
system "./build", "Release", "-j#{ENV.make_jobs}"
bin.install "bin/Darwin_Release/fibjs"
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = `#{bin}/fibjs #{path}`.strip
assert_equal "hello", output
assert_equal 0, $?.exitstatus
end
end
fibjs 0.1.8
Closes Homebrew/homebrew#46421.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
class Fibjs < Formula
desc "JavaScript on Fiber"
homepage "http://fibjs.org"
url "https://github.com/xicilion/fibjs/releases/download/v0.1.8/fibjs-0.1.8-fullsrc.zip"
sha256 "42ee9fa64a4259d88d648b8e5538226b2158a2d3546468b14a14b81c10ee8ed0"
head "https://github.com/xicilion/fibjs.git"
bottle do
cellar :any_skip_relocation
revision 1
sha256 "c72b7627b6e3a8bca4219abd2e3f2d41261815f5df2a1a4d4386077a434c7aa9" => :el_capitan
sha256 "40878d250cee1260b1262c124e1736d2a4c1f9526edcf7e707402e92c1965faa" => :yosemite
sha256 "67a6f871801208598b647160af3779525fa366f8bfb47d663972a43091ebc881" => :mavericks
end
depends_on "cmake" => :build
def install
system "./build", "Release", "-j#{ENV.make_jobs}"
bin.install "bin/Darwin_Release/fibjs"
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = `#{bin}/fibjs #{path}`.strip
assert_equal "hello", output
assert_equal 0, $?.exitstatus
end
end
|
class Fizsh < Formula
desc "Fish-like front end for ZSH"
homepage "https://github.com/zsh-users/fizsh"
url "https://downloads.sourceforge.net/project/fizsh/fizsh-1.0.9.tar.gz"
sha256 "dbbbe03101f82e62f1dfe1f8af7cde23bc043833679bc74601a0a3d58a117b07"
license "BSD-3-Clause"
head "https://github.com/zsh-users/fizsh", :using => :git
bottle do
cellar :any_skip_relocation
sha256 "c1cb674d5a7219ea4c41f3becd0997cf55dfd26a06a17d739f14a3d59da7df75" => :catalina
sha256 "02457429b5257b916207bc7f46acd5577f8e01183437ef03b594991ba3e69466" => :mojave
sha256 "7916e571aaf891561a5a6be1ef9708e63ee17ecb41fe60b75129c765d3dad1cb" => :high_sierra
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :sierra
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :el_capitan
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :yosemite
sha256 "d25f3467f58d11b38efac3595c9e9244fcd558823d2d2e64a0a6744c8330099a" => :x86_64_linux # glibc 2.19
end
depends_on "zsh"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_equal "hello", shell_output("#{bin}/fizsh -c \"echo hello\"").strip
end
end
fizsh: fix RuboCop style.
See https://github.com/Homebrew/brew/pull/7867.
class Fizsh < Formula
desc "Fish-like front end for ZSH"
homepage "https://github.com/zsh-users/fizsh"
url "https://downloads.sourceforge.net/project/fizsh/fizsh-1.0.9.tar.gz"
sha256 "dbbbe03101f82e62f1dfe1f8af7cde23bc043833679bc74601a0a3d58a117b07"
license "BSD-3-Clause"
head "https://github.com/zsh-users/fizsh", using: :git
bottle do
cellar :any_skip_relocation
sha256 "c1cb674d5a7219ea4c41f3becd0997cf55dfd26a06a17d739f14a3d59da7df75" => :catalina
sha256 "02457429b5257b916207bc7f46acd5577f8e01183437ef03b594991ba3e69466" => :mojave
sha256 "7916e571aaf891561a5a6be1ef9708e63ee17ecb41fe60b75129c765d3dad1cb" => :high_sierra
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :sierra
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :el_capitan
sha256 "334ceff9d649e87f87be9d3f561ee38221f8c87712a3b506b145191dc51fc4bd" => :yosemite
sha256 "d25f3467f58d11b38efac3595c9e9244fcd558823d2d2e64a0a6744c8330099a" => :x86_64_linux # glibc 2.19
end
depends_on "zsh"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
assert_equal "hello", shell_output("#{bin}/fizsh -c \"echo hello\"").strip
end
end
|
class Fobis < Formula
include Language::Python::Virtualenv
desc "KISS build tool for automaticaly building modern Fortran projects"
homepage "https://github.com/szaghi/FoBiS"
url "https://files.pythonhosted.org/packages/2f/7e/dd1bf258ea12f28b38f7416fec75792307bb624a939e255eec261e01fa89/FoBiS.py-2.9.3.tar.gz"
sha256 "ea3d064039fb08f690e86b66dbb12616a41304eaaf6caa2fa9f84b71bb27bdbf"
revision 1
bottle do
cellar :any_skip_relocation
sha256 "a2c2ad4f9320ea0017b34356fc36d23a2b3f87d39e5fd44e0c9918b152144d89" => :mojave
sha256 "34f1802723d27f435fd2ed72fceb75fbda38d350efc227753d05a53f2c876b7a" => :high_sierra
sha256 "38cda93369dddfd8f52f43271f4d0eea5b78506a4b74265a16ec5e09c60ca532" => :sierra
end
depends_on "gcc" # for gfortran
depends_on "graphviz"
depends_on "python"
resource "pygooglechart" do
url "https://files.pythonhosted.org/packages/95/88/54f91552de1e1b0085c02b96671acfba6e351915de3a12a398533fc82e20/pygooglechart-0.4.0.tar.gz"
sha256 "018d4dd800eea8e0e42a4b3af2a3d5d6b2a2b39e366071b7f270e9628b5f6454"
end
resource "graphviz" do
url "https://files.pythonhosted.org/packages/fa/d1/63b62dee9e55368f60b5ea445e6afb361bb47e692fc27553f3672e16efb8/graphviz-0.8.2.zip"
sha256 "606741c028acc54b1a065b33045f8c89ee0927ea77273ec409ac988f2c3d1091"
end
def install
venv = virtualenv_create(libexec, "python3")
venv.pip_install "pygooglechart"
venv.pip_install "graphviz"
venv.pip_install_and_link buildpath
end
test do
(testpath/"test-mod.f90").write <<~EOS
module fobis_test_m
implicit none
character(*), parameter :: message = "Hello FoBiS"
end module
EOS
(testpath/"test-prog.f90").write <<~EOS
program fobis_test
use iso_fortran_env, only: stdout => output_unit
use fobis_test_m, only: message
implicit none
write(stdout,'(A)') message
end program
EOS
system "#{bin}/FoBiS.py", "build", "-compiler", "gnu"
assert_match /Hello FoBiS/, shell_output(testpath/"test-prog")
end
end
fobis 2.9.5
Closes #40295.
Signed-off-by: Izaak Beekman <63a262980226ea55b5aaaf5a7502ec30d6edcc7a@gmail.com>
class Fobis < Formula
include Language::Python::Virtualenv
desc "KISS build tool for automaticaly building modern Fortran projects"
homepage "https://github.com/szaghi/FoBiS"
url "https://files.pythonhosted.org/packages/7a/49/9ccbc08da74f0c37901b07e00aa8e6419895c45723b80119994d89a72eec/FoBiS.py-2.9.5.tar.gz"
sha256 "0f27bad2c662d2df666ede8bfdd9b1f3fb41e293cb7008da388c52efef060335"
bottle do
cellar :any_skip_relocation
sha256 "a2c2ad4f9320ea0017b34356fc36d23a2b3f87d39e5fd44e0c9918b152144d89" => :mojave
sha256 "34f1802723d27f435fd2ed72fceb75fbda38d350efc227753d05a53f2c876b7a" => :high_sierra
sha256 "38cda93369dddfd8f52f43271f4d0eea5b78506a4b74265a16ec5e09c60ca532" => :sierra
end
depends_on "gcc" # for gfortran
depends_on "graphviz"
depends_on "python"
resource "pygooglechart" do
url "https://files.pythonhosted.org/packages/95/88/54f91552de1e1b0085c02b96671acfba6e351915de3a12a398533fc82e20/pygooglechart-0.4.0.tar.gz"
sha256 "018d4dd800eea8e0e42a4b3af2a3d5d6b2a2b39e366071b7f270e9628b5f6454"
end
resource "graphviz" do
url "https://files.pythonhosted.org/packages/fa/d1/63b62dee9e55368f60b5ea445e6afb361bb47e692fc27553f3672e16efb8/graphviz-0.8.2.zip"
sha256 "606741c028acc54b1a065b33045f8c89ee0927ea77273ec409ac988f2c3d1091"
end
def install
venv = virtualenv_create(libexec, "python3")
venv.pip_install "pygooglechart"
venv.pip_install "graphviz"
venv.pip_install_and_link buildpath
end
test do
(testpath/"test-mod.f90").write <<~EOS
module fobis_test_m
implicit none
character(*), parameter :: message = "Hello FoBiS"
end module
EOS
(testpath/"test-prog.f90").write <<~EOS
program fobis_test
use iso_fortran_env, only: stdout => output_unit
use fobis_test_m, only: message
implicit none
write(stdout,'(A)') message
end program
EOS
system "#{bin}/FoBiS.py", "build", "-compiler", "gnu"
assert_match /Hello FoBiS/, shell_output(testpath/"test-prog")
end
end
|
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2017.10.16.00.tar.gz"
sha256 "725d98bae22c2930231773909ddd87063fe4b4dd822d65422c19bd028c2f6362"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "912aaf99d0e8450a02030fe504ce830ba30e870576cb9f7ad93347c9854cbd15" => :high_sierra
sha256 "936276693cf96e289af8cb2483e515be8e94e009671613fed834fc8d432c2ee5" => :sierra
sha256 "017ab3164a932fae4b64d887f1c82154257df1642cb1ca29e879a121e8088119" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "double-conversion"
depends_on "glog"
depends_on "gflags"
depends_on "boost"
depends_on "libevent"
depends_on "xz"
depends_on "snappy"
depends_on "lz4"
depends_on "openssl"
# https://github.com/facebook/folly/issues/451
depends_on :macos => :el_capitan
needs :cxx11
# Known issue upstream. They're working on it:
# https://github.com/facebook/folly/pull/445
fails_with :gcc => "6"
def install
ENV.cxx11
cd "folly" do
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
folly 2017.10.23.00
Closes #19757.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2017.10.23.00.tar.gz"
sha256 "69185a8985082e1f56ba706947cdb0bf3bfe1e65d00a854302520d0ddb51c869"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "912aaf99d0e8450a02030fe504ce830ba30e870576cb9f7ad93347c9854cbd15" => :high_sierra
sha256 "936276693cf96e289af8cb2483e515be8e94e009671613fed834fc8d432c2ee5" => :sierra
sha256 "017ab3164a932fae4b64d887f1c82154257df1642cb1ca29e879a121e8088119" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "double-conversion"
depends_on "glog"
depends_on "gflags"
depends_on "boost"
depends_on "libevent"
depends_on "xz"
depends_on "snappy"
depends_on "lz4"
depends_on "openssl"
# https://github.com/facebook/folly/issues/451
depends_on :macos => :el_capitan
needs :cxx11
# Known issue upstream. They're working on it:
# https://github.com/facebook/folly/pull/445
fails_with :gcc => "6"
def install
ENV.cxx11
cd "folly" do
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
|
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2017.09.18.00.tar.gz"
sha256 "588b9bf141b91387e45b5a4a86ecf1ddd2c35078a4ce2b50df6dfcefe407313e"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "d7ed7d7defc5f5b4888733c77d2a7324f658418beaa5695fefab50a8eab8e29c" => :sierra
sha256 "abfd0aa06ddd2d766d0eb948dd590d3a5197161cc93544f296f90e882e1c54e0" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "double-conversion"
depends_on "glog"
depends_on "gflags"
depends_on "boost"
depends_on "libevent"
depends_on "xz"
depends_on "snappy"
depends_on "lz4"
depends_on "openssl"
# https://github.com/facebook/folly/issues/451
depends_on :macos => :el_capitan
needs :cxx11
# Known issue upstream. They're working on it:
# https://github.com/facebook/folly/pull/445
fails_with :gcc => "6"
def install
ENV.cxx11
cd "folly" do
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cc").write <<-EOS.undent
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
folly: update 2017.09.18.00 bottle.
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2017.09.18.00.tar.gz"
sha256 "588b9bf141b91387e45b5a4a86ecf1ddd2c35078a4ce2b50df6dfcefe407313e"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "ac9e0747a7faa53d99b31c4dea839be3c3280548e3b6e6563a55738591a14ec0" => :sierra
sha256 "3831309e570852c498e5fcbd94d088de0f817c1b9d0fbd9a1c7d8f4ce56490bc" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "double-conversion"
depends_on "glog"
depends_on "gflags"
depends_on "boost"
depends_on "libevent"
depends_on "xz"
depends_on "snappy"
depends_on "lz4"
depends_on "openssl"
# https://github.com/facebook/folly/issues/451
depends_on :macos => :el_capitan
needs :cxx11
# Known issue upstream. They're working on it:
# https://github.com/facebook/folly/pull/445
fails_with :gcc => "6"
def install
ENV.cxx11
cd "folly" do
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make"
system "make", "install"
end
end
test do
(testpath/"test.cc").write <<-EOS.undent
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
|
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2020.12.14.00.tar.gz"
sha256 "a3590caac887575bc0a936ae2380126c0edf754c9bdaef7ed84316f68665c791"
license "Apache-2.0"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "caf7341407834c3906042837ed6b9f9c8dd188a9b99e27dc4e3433dfbc9de7a6" => :big_sur
sha256 "e06eac57d1193bd42d1a0cd24803596b1deda9e906bd70b8684593e0e0166511" => :arm64_big_sur
sha256 "9a87ddf555a3fc95c0431edd9f2afb3be37ab297c5cfef9a79519fbfd5eab190" => :catalina
sha256 "64cad7b489cb4bc0822cbd639cb1e7b2b5a15f8ee5fb7c6895946cdc9761f10a" => :mojave
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "double-conversion"
depends_on "fmt"
depends_on "gflags"
depends_on "glog"
depends_on "libevent"
depends_on "lz4"
# https://github.com/facebook/folly/issues/966
depends_on macos: :high_sierra
depends_on "openssl@1.1"
depends_on "snappy"
depends_on "xz"
depends_on "zstd"
def install
mkdir "_build" do
args = std_cmake_args + %w[
-DFOLLY_USE_JEMALLOC=OFF
]
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=ON"
system "make"
system "make", "install"
system "make", "clean"
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=OFF"
system "make"
lib.install "libfolly.a", "folly/libfollybenchmark.a"
end
end
test do
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
folly 2021.01.18.01
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2021.01.18.01.tar.gz"
sha256 "5f69ed08e13ffb849526eff26118fea3f8f075e29951b1530081c45b63327ee4"
license "Apache-2.0"
head "https://github.com/facebook/folly.git"
bottle do
cellar :any
sha256 "caf7341407834c3906042837ed6b9f9c8dd188a9b99e27dc4e3433dfbc9de7a6" => :big_sur
sha256 "e06eac57d1193bd42d1a0cd24803596b1deda9e906bd70b8684593e0e0166511" => :arm64_big_sur
sha256 "9a87ddf555a3fc95c0431edd9f2afb3be37ab297c5cfef9a79519fbfd5eab190" => :catalina
sha256 "64cad7b489cb4bc0822cbd639cb1e7b2b5a15f8ee5fb7c6895946cdc9761f10a" => :mojave
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "double-conversion"
depends_on "fmt"
depends_on "gflags"
depends_on "glog"
depends_on "libevent"
depends_on "lz4"
# https://github.com/facebook/folly/issues/966
depends_on macos: :high_sierra
depends_on "openssl@1.1"
depends_on "snappy"
depends_on "xz"
depends_on "zstd"
def install
mkdir "_build" do
args = std_cmake_args + %w[
-DFOLLY_USE_JEMALLOC=OFF
]
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=ON"
system "make"
system "make", "install"
system "make", "clean"
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=OFF"
system "make"
lib.install "libfolly.a", "folly/libfollybenchmark.a"
end
end
test do
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
|
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2022.02.14.00.tar.gz"
sha256 "6ec28672a524e1d9d89cdb528d64362b939b4f0afc526767d93aa1ebb2639fc1"
license "Apache-2.0"
head "https://github.com/facebook/folly.git", branch: "main"
bottle do
sha256 cellar: :any, arm64_monterey: "296dd25f8bc82c034b44d6e312b48c0395ad4cd091f0a4aa9035699377114039"
sha256 cellar: :any, arm64_big_sur: "a7d321f22a73a9ea0f73d971c96f646695fcbac3c56ff954312d668ab64f25fe"
sha256 cellar: :any, monterey: "48d2b6afc5d1a74af4d26002c6627e905f3cae06ea166e0f9401d68932ac7397"
sha256 cellar: :any, big_sur: "b22b6ef90b85d3d508ef921506630088d7b7ab53a05c9816c4e5816c8a2e3f85"
sha256 cellar: :any, catalina: "34b08153bc0d6261879a83bd640f58bcaa11c0231ea51bd78feb14c3d6ab8b97"
sha256 cellar: :any_skip_relocation, x86_64_linux: "3e5174aac08d90a97296997b91e70cff1c5c1827ba3334b2473b30ee760e9480"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "double-conversion"
depends_on "fmt"
depends_on "gflags"
depends_on "glog"
depends_on "libevent"
depends_on "lz4"
depends_on "openssl@1.1"
depends_on "snappy"
depends_on "xz"
depends_on "zstd"
on_macos do
depends_on "llvm" if DevelopmentTools.clang_build_version <= 1100
end
on_linux do
depends_on "gcc"
end
fails_with :clang do
build 1100
# https://github.com/facebook/folly/issues/1545
cause <<-EOS
Undefined symbols for architecture x86_64:
"std::__1::__fs::filesystem::path::lexically_normal() const"
EOS
end
fails_with gcc: "5"
def install
ENV.llvm_clang if OS.mac? && (DevelopmentTools.clang_build_version <= 1100)
mkdir "_build" do
args = std_cmake_args + %w[
-DFOLLY_USE_JEMALLOC=OFF
]
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=ON"
system "make"
system "make", "install"
system "make", "clean"
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=OFF"
system "make"
lib.install "libfolly.a", "folly/libfollybenchmark.a"
end
end
test do
# Force use of Clang rather than LLVM Clang
ENV.clang if OS.mac?
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
folly: revision bump (boost 1.78.0)
class Folly < Formula
desc "Collection of reusable C++ library artifacts developed at Facebook"
homepage "https://github.com/facebook/folly"
url "https://github.com/facebook/folly/archive/v2022.02.14.00.tar.gz"
sha256 "6ec28672a524e1d9d89cdb528d64362b939b4f0afc526767d93aa1ebb2639fc1"
license "Apache-2.0"
revision 1
head "https://github.com/facebook/folly.git", branch: "main"
bottle do
sha256 cellar: :any, arm64_monterey: "296dd25f8bc82c034b44d6e312b48c0395ad4cd091f0a4aa9035699377114039"
sha256 cellar: :any, arm64_big_sur: "a7d321f22a73a9ea0f73d971c96f646695fcbac3c56ff954312d668ab64f25fe"
sha256 cellar: :any, monterey: "48d2b6afc5d1a74af4d26002c6627e905f3cae06ea166e0f9401d68932ac7397"
sha256 cellar: :any, big_sur: "b22b6ef90b85d3d508ef921506630088d7b7ab53a05c9816c4e5816c8a2e3f85"
sha256 cellar: :any, catalina: "34b08153bc0d6261879a83bd640f58bcaa11c0231ea51bd78feb14c3d6ab8b97"
sha256 cellar: :any_skip_relocation, x86_64_linux: "3e5174aac08d90a97296997b91e70cff1c5c1827ba3334b2473b30ee760e9480"
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "boost"
depends_on "double-conversion"
depends_on "fmt"
depends_on "gflags"
depends_on "glog"
depends_on "libevent"
depends_on "lz4"
depends_on "openssl@1.1"
depends_on "snappy"
depends_on "xz"
depends_on "zstd"
on_macos do
depends_on "llvm" if DevelopmentTools.clang_build_version <= 1100
end
on_linux do
depends_on "gcc"
end
fails_with :clang do
build 1100
# https://github.com/facebook/folly/issues/1545
cause <<-EOS
Undefined symbols for architecture x86_64:
"std::__1::__fs::filesystem::path::lexically_normal() const"
EOS
end
fails_with gcc: "5"
def install
ENV.llvm_clang if OS.mac? && (DevelopmentTools.clang_build_version <= 1100)
mkdir "_build" do
args = std_cmake_args + %w[
-DFOLLY_USE_JEMALLOC=OFF
]
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=ON"
system "make"
system "make", "install"
system "make", "clean"
system "cmake", "..", *args, "-DBUILD_SHARED_LIBS=OFF"
system "make"
lib.install "libfolly.a", "folly/libfollybenchmark.a"
end
end
test do
# Force use of Clang rather than LLVM Clang
ENV.clang if OS.mac?
(testpath/"test.cc").write <<~EOS
#include <folly/FBVector.h>
int main() {
folly::fbvector<int> numbers({0, 1, 2, 3});
numbers.reserve(10);
for (int i = 4; i < 10; i++) {
numbers.push_back(i * 2);
}
assert(numbers[6] == 12);
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cc", "-I#{include}", "-L#{lib}",
"-lfolly", "-o", "test"
system "./test"
end
end
|
class Fplll < Formula
desc "Lattice algorithms using floating-point arithmetic"
homepage "https://github.com/fplll/fplll"
url "https://github.com/fplll/fplll/releases/download/5.4.1/fplll-5.4.1.tar.gz"
sha256 "7bd887957173aa592091772c1c36f6aa606b3b2ace0d14e2c26c7463dcf2deb7"
license "LGPL-2.1"
bottle do
sha256 arm64_big_sur: "9818320e953a6a251958ee097350481f86952bfac55e9f8219ecac92071738fe"
sha256 big_sur: "3683b4c40387324054f6eb43ee129cd28cb7107e21cab7425b9da5fc1834578f"
sha256 catalina: "de1d71773f6fe6baaf83e6b7c8cbc1521842854536242482a35b70b1c37a4b7b"
sha256 mojave: "dc27cc471e40516aba9bd490813f5853a9fe326ea490ee27f6cf57f5c916f1fb"
end
depends_on "automake" => :build
depends_on "pkg-config" => :test
depends_on "gmp"
depends_on "mpfr"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"m1.fplll").write("[[10 11][11 12]]")
assert_equal "[[0 1 ]\n[1 0 ]\n]\n", `#{bin/"fplll"} m1.fplll`
(testpath/"m2.fplll").write("[[17 42 4][50 75 108][11 47 33]][100 101 102]")
assert_equal "[107 88 96]\n", `#{bin/"fplll"} -a cvp m2.fplll`
(testpath/"test.cpp").write <<~EOS
#include <fplll.h>
#include <vector>
#include <stdio.h>
int main(int c, char **v) {
ZZ_mat<mpz_t> b;
std::vector<Z_NR<mpz_t>> sol_coord;
if (c > 1) { // just a compile test
shortest_vector(b, sol_coord);
}
return 0;
}
EOS
system "pkg-config", "fplll", "--cflags"
system "pkg-config", "fplll", "--libs"
pkg_config_flags = `pkg-config --cflags --libs gmp mpfr fplll`.chomp.split
system ENV.cxx, "-std=c++11", "test.cpp", *pkg_config_flags, "-o", "test"
system "./test"
end
end
fplll: update 5.4.1 bottle.
class Fplll < Formula
desc "Lattice algorithms using floating-point arithmetic"
homepage "https://github.com/fplll/fplll"
url "https://github.com/fplll/fplll/releases/download/5.4.1/fplll-5.4.1.tar.gz"
sha256 "7bd887957173aa592091772c1c36f6aa606b3b2ace0d14e2c26c7463dcf2deb7"
license "LGPL-2.1"
bottle do
sha256 arm64_big_sur: "377b5606bb5c319ad73824afcc907f946a1c7c60ddb5a0e181d257fcafc5c5ce"
sha256 big_sur: "6290a0d579e8ffba1dab159d70f9e10f58d6600cad8564469fe066b24aa8f170"
sha256 catalina: "bf39fa78c92642f571b4514a61ebe43c5aef39ff25d0e4de969cbf6bf323cc11"
sha256 mojave: "2219fe4523fced68e1f9a8038848cf54d0588cc652229bcbf2bf03e696f7b971"
end
depends_on "automake" => :build
depends_on "pkg-config" => :test
depends_on "gmp"
depends_on "mpfr"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"m1.fplll").write("[[10 11][11 12]]")
assert_equal "[[0 1 ]\n[1 0 ]\n]\n", `#{bin/"fplll"} m1.fplll`
(testpath/"m2.fplll").write("[[17 42 4][50 75 108][11 47 33]][100 101 102]")
assert_equal "[107 88 96]\n", `#{bin/"fplll"} -a cvp m2.fplll`
(testpath/"test.cpp").write <<~EOS
#include <fplll.h>
#include <vector>
#include <stdio.h>
int main(int c, char **v) {
ZZ_mat<mpz_t> b;
std::vector<Z_NR<mpz_t>> sol_coord;
if (c > 1) { // just a compile test
shortest_vector(b, sol_coord);
}
return 0;
}
EOS
system "pkg-config", "fplll", "--cflags"
system "pkg-config", "fplll", "--libs"
pkg_config_flags = `pkg-config --cflags --libs gmp mpfr fplll`.chomp.split
system ENV.cxx, "-std=c++11", "test.cpp", *pkg_config_flags, "-o", "test"
system "./test"
end
end
|
class G3log < Formula
desc "Asynchronous, 'crash safe', logger that is easy to use"
homepage "https://github.com/KjellKod/g3log"
url "https://github.com/KjellKod/g3log/archive/1.3.4.tar.gz"
sha256 "2fe8815e5f5afec6b49bdfedfba1e86b8e58a5dc89fd97f4868fb7f3141aed19"
license "Unlicense"
bottle do
cellar :any
sha256 "5e24eda970bf16a1d737e0112ef7e86651c6cdd29b14b6dd4beec2faf9f9d292" => :big_sur
sha256 "d6ab9b85de4f0bc70d278210ac4a89c2780b4a271dc474fdd2a4ac16933a3d38" => :arm64_big_sur
sha256 "3325a5a22c63c02f6c3a7d9b35f533e579f369ff2871f7152d0ca4994bb049d3" => :catalina
sha256 "f44e98ef652573827da51288539acb1122af634b79f61f8ec2687b7b5184e971" => :mojave
end
depends_on "cmake" => :build
depends_on macos: :el_capitan # needs thread-local storage
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS.gsub(/TESTDIR/, testpath)
#include <g3log/g3log.hpp>
#include <g3log/logworker.hpp>
int main()
{
using namespace g3;
auto worker = LogWorker::createLogWorker();
worker->addDefaultLogger("test", "TESTDIR");
g3::initializeLogging(worker.get());
LOG(DEBUG) << "Hello World";
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cpp", "-L#{lib}", "-lg3log", "-o", "test"
system "./test"
Dir.glob(testpath/"test.g3log.*.log").any?
end
end
g3log: remove old fixes
class G3log < Formula
desc "Asynchronous, 'crash safe', logger that is easy to use"
homepage "https://github.com/KjellKod/g3log"
url "https://github.com/KjellKod/g3log/archive/1.3.4.tar.gz"
sha256 "2fe8815e5f5afec6b49bdfedfba1e86b8e58a5dc89fd97f4868fb7f3141aed19"
license "Unlicense"
bottle do
cellar :any
sha256 "5e24eda970bf16a1d737e0112ef7e86651c6cdd29b14b6dd4beec2faf9f9d292" => :big_sur
sha256 "d6ab9b85de4f0bc70d278210ac4a89c2780b4a271dc474fdd2a4ac16933a3d38" => :arm64_big_sur
sha256 "3325a5a22c63c02f6c3a7d9b35f533e579f369ff2871f7152d0ca4994bb049d3" => :catalina
sha256 "f44e98ef652573827da51288539acb1122af634b79f61f8ec2687b7b5184e971" => :mojave
end
depends_on "cmake" => :build
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS.gsub(/TESTDIR/, testpath)
#include <g3log/g3log.hpp>
#include <g3log/logworker.hpp>
int main()
{
using namespace g3;
auto worker = LogWorker::createLogWorker();
worker->addDefaultLogger("test", "TESTDIR");
g3::initializeLogging(worker.get());
LOG(DEBUG) << "Hello World";
return 0;
}
EOS
system ENV.cxx, "-std=c++14", "test.cpp", "-L#{lib}", "-lg3log", "-o", "test"
system "./test"
Dir.glob(testpath/"test.g3log.*.log").any?
end
end
|
class Gammu < Formula
desc "Command-line utility to control a phone"
homepage "https://wammu.eu/gammu/"
url "https://dl.cihar.com/gammu/releases/gammu-1.40.0.tar.xz"
sha256 "a760a3520d9f3a16a4ed73cefaabdbd86125bec73c6fa056ca3f0a4be8478dd6"
revision 2
head "https://github.com/gammu/gammu.git"
bottle do
sha256 "2101eb2a42043451ccb30f7bb2626f5d4c653ad1d025e8778155c4e26099f1b5" => :mojave
sha256 "b7592492869a4e30486465923ac75de80ececb472caaf11315a320b68aa30a74" => :high_sierra
sha256 "7f3e7d721eadda817e77e463bccf34fdbf2574796eefc4da53e2d2cb5ef49839" => :sierra
end
depends_on "cmake" => :build
depends_on "glib"
depends_on "openssl"
def install
# Disable opportunistic linking against Postgres
inreplace "CMakeLists.txt", "macro_optional_find_package (Postgres)", ""
mkdir "build" do
system "cmake", "..", "-DBASH_COMPLETION_COMPLETIONSDIR:PATH=#{bash_completion}", *std_cmake_args
system "make", "install"
end
end
test do
system bin/"gammu", "--help"
end
end
gammu: move to OpenSSL 1.1
class Gammu < Formula
desc "Command-line utility to control a phone"
homepage "https://wammu.eu/gammu/"
url "https://dl.cihar.com/gammu/releases/gammu-1.40.0.tar.xz"
sha256 "a760a3520d9f3a16a4ed73cefaabdbd86125bec73c6fa056ca3f0a4be8478dd6"
revision 3
head "https://github.com/gammu/gammu.git"
bottle do
sha256 "2101eb2a42043451ccb30f7bb2626f5d4c653ad1d025e8778155c4e26099f1b5" => :mojave
sha256 "b7592492869a4e30486465923ac75de80ececb472caaf11315a320b68aa30a74" => :high_sierra
sha256 "7f3e7d721eadda817e77e463bccf34fdbf2574796eefc4da53e2d2cb5ef49839" => :sierra
end
depends_on "cmake" => :build
depends_on "glib"
depends_on "openssl@1.1"
def install
# Disable opportunistic linking against Postgres
inreplace "CMakeLists.txt", "macro_optional_find_package (Postgres)", ""
mkdir "build" do
system "cmake", "..", "-DBASH_COMPLETION_COMPLETIONSDIR:PATH=#{bash_completion}", *std_cmake_args
system "make", "install"
end
end
test do
system bin/"gammu", "--help"
end
end
|
class Gauge < Formula
desc "Test automation tool that supports executable documentation"
homepage "https://gauge.org"
url "https://github.com/getgauge/gauge/archive/v1.3.2.tar.gz"
sha256 "d4cfdf3cf07a17149b6ee8dfd8b2a8f5082923238f18909bc46d870863dddc4a"
license "Apache-2.0"
head "https://github.com/getgauge/gauge.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "571362c12c48900fc53a8e898f2cf4c7fdca02ed20808e456a91235691936dbc"
sha256 cellar: :any_skip_relocation, big_sur: "005cbc0be81d61a78109cb9987403f07b3e70bfc298f1427b167c23f73b137d9"
sha256 cellar: :any_skip_relocation, catalina: "4ae99ffca72f63f41097fa089d5a530df6d0768717126f42d906703542265fcd"
sha256 cellar: :any_skip_relocation, mojave: "d264f669c99b83305f195d9532f88c3588eb85f591b40b0253c07ac460a9bf61"
sha256 cellar: :any_skip_relocation, x86_64_linux: "33f427b92d86c373cfd84586d41b1782264db5b2c6397e8178f61eac9a67eab0"
end
depends_on "go" => :build
def install
system "go", "run", "build/make.go"
system "go", "run", "build/make.go", "--install", "--prefix", prefix
end
test do
(testpath/"manifest.json").write <<~EOS
{
"Plugins": [
"html-report"
]
}
EOS
system("#{bin}/gauge", "install")
assert_predicate testpath/".gauge/plugins", :exist?
system("#{bin}/gauge", "config", "check_updates", "false")
assert_match "false", shell_output("#{bin}/gauge config check_updates")
assert_match version.to_s, shell_output("#{bin}/gauge -v 2>&1")
end
end
gauge: update 1.3.2 bottle.
class Gauge < Formula
desc "Test automation tool that supports executable documentation"
homepage "https://gauge.org"
url "https://github.com/getgauge/gauge/archive/v1.3.2.tar.gz"
sha256 "d4cfdf3cf07a17149b6ee8dfd8b2a8f5082923238f18909bc46d870863dddc4a"
license "Apache-2.0"
head "https://github.com/getgauge/gauge.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "81884a878b76b1c09bab8adbca6548aba40cdc1ea65f5c826ed28560345135a1"
sha256 cellar: :any_skip_relocation, big_sur: "aadf40951be3277c2ddd808866b4e917db93dc81e92889eceb1a9260a67bc8df"
sha256 cellar: :any_skip_relocation, catalina: "998d2288299f24fa24500020164e57702b0242f39d5203c2595867c6de310a28"
sha256 cellar: :any_skip_relocation, mojave: "820b15a2c99f17a1e1769e48504ea746d3fcf68a1cf9efee7a34b1501609503f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "5f695cc25af482c7e6bd0d6b56e81254125546f880b9abf14ebf6eca77263e6b"
end
depends_on "go" => :build
def install
system "go", "run", "build/make.go"
system "go", "run", "build/make.go", "--install", "--prefix", prefix
end
test do
(testpath/"manifest.json").write <<~EOS
{
"Plugins": [
"html-report"
]
}
EOS
system("#{bin}/gauge", "install")
assert_predicate testpath/".gauge/plugins", :exist?
system("#{bin}/gauge", "config", "check_updates", "false")
assert_match "false", shell_output("#{bin}/gauge config check_updates")
assert_match version.to_s, shell_output("#{bin}/gauge -v 2>&1")
end
end
|
class GccAT5 < Formula
desc "GNU Compiler Collection"
homepage "https://gcc.gnu.org/"
url "https://ftp.gnu.org/gnu/gcc/gcc-5.5.0/gcc-5.5.0.tar.xz"
mirror "https://ftpmirror.gnu.org/gcc/gcc-5.5.0/gcc-5.5.0.tar.xz"
sha256 "530cea139d82fe542b358961130c69cfde8b3d14556370b65823d2f91f0ced87"
revision 6
livecheck do
url :stable
regex(%r{href=.*?gcc[._-]v?(5(?:\.\d+)+)(?:/?["' >]|\.t)}i)
end
bottle do
sha256 "01a2818d89c25b22bdf8b6c597186cf986f4d2c2175741a98967ec933e5e7907" => :high_sierra
end
# The bottles are built on systems with the CLT installed, and do not work
# out of the box on Xcode-only systems due to an incorrect sysroot.
pour_bottle? do
reason "The bottle needs the Xcode CLT to be installed."
satisfy { MacOS::CLT.installed? }
end
depends_on maximum_macos: [:high_sierra, :build]
depends_on "gmp"
depends_on "isl@0.18"
depends_on "libmpc"
depends_on "mpfr"
uses_from_macos "zlib"
# GCC bootstraps itself, so it is OK to have an incompatible C++ stdlib
cxxstdlib_check :skip
# Fix build with Xcode 9
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=82091
if DevelopmentTools.clang_build_version >= 900
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/078797f1b9/gcc%405/xcode9.patch"
sha256 "e1546823630c516679371856338abcbab381efaf9bd99511ceedcce3cf7c0199"
end
end
# Fix Apple headers, otherwise they trigger a build failure in libsanitizer
# GCC bug report: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83531
# Apple radar 36176941
if MacOS.version == :high_sierra
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/413cfac6/gcc%405/10.13_headers.patch"
sha256 "94aaec20c8c7bfd3c41ef8fb7725bd524b1c0392d11a411742303a3465d18d09"
end
end
# Patch for Xcode bug, taken from https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89864#c43
# This should be removed in the next release of GCC if fixed by apple; this is an xcode bug,
# but this patch is a work around committed to GCC trunk
if MacOS::Xcode.version >= "10.2"
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/91d57ebe88e17255965fa88b53541335ef16f64a/gcc%405/gcc5-xcode10.2.patch"
sha256 "6834bec30c54ab1cae645679e908713102f376ea0fc2ee993b3c19995832fe56"
end
end
def install
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete "LD"
# C, C++, ObjC and Fortran compilers are always built
languages = %w[c c++ fortran objc obj-c++]
version_suffix = version.major.to_s
# Even when suffixes are appended, the info pages conflict when
# install-info is run so pretend we have an outdated makeinfo
# to prevent their build.
ENV["gcc_cv_prog_makeinfo_modern"] = "no"
args = [
"--build=x86_64-apple-darwin#{OS.kernel_version}",
"--prefix=#{prefix}",
"--libdir=#{lib}/gcc/#{version_suffix}",
"--enable-languages=#{languages.join(",")}",
# Make most executables versioned to avoid conflicts.
"--program-suffix=-#{version_suffix}",
"--with-gmp=#{Formula["gmp"].opt_prefix}",
"--with-mpfr=#{Formula["mpfr"].opt_prefix}",
"--with-mpc=#{Formula["libmpc"].opt_prefix}",
"--with-isl=#{Formula["isl@0.18"].opt_prefix}",
"--with-system-zlib",
"--enable-libstdcxx-time=yes",
"--enable-stage1-checking",
"--enable-checking=release",
"--enable-lto",
"--enable-plugin",
# A no-op unless --HEAD is built because in head warnings will
# raise errors. But still a good idea to include.
"--disable-werror",
"--disable-nls",
"--with-pkgversion=Homebrew GCC #{pkg_version} #{build.used_options*" "}".strip,
"--with-bugurl=https://github.com/Homebrew/homebrew-core/issues",
"--enable-multilib",
]
# System headers may not be in /usr/include
sdk = MacOS.sdk_path_if_needed
if sdk
args << "--with-native-system-header-dir=/usr/include"
args << "--with-sysroot=#{sdk}"
end
# Avoid reference to sed shim
args << "SED=/usr/bin/sed"
# Ensure correct install names when linking against libgcc_s;
# see discussion in https://github.com/Homebrew/homebrew/pull/34303
inreplace "libgcc/config/t-slibgcc-darwin", "@shlib_slibdir@", "#{HOMEBREW_PREFIX}/lib/gcc/#{version_suffix}"
mkdir "build" do
system "../configure", *args
system "make", "bootstrap"
system "make", "install"
end
# Handle conflicts between GCC formulae.
# Rename man7.
Dir.glob(man7/"*.7") { |file| add_suffix file, version_suffix }
# Even when we disable building info pages some are still installed.
info.rmtree
end
def add_suffix(file, suffix)
dir = File.dirname(file)
ext = File.extname(file)
base = File.basename(file, ext)
File.rename file, "#{dir}/#{base}-#{suffix}#{ext}"
end
test do
(testpath/"hello-c.c").write <<~EOS
#include <stdio.h>
int main()
{
puts("Hello, world!");
return 0;
}
EOS
system bin/"gcc-#{version.major}", "-o", "hello-c", "hello-c.c"
assert_equal "Hello, world!\n", `./hello-c`
end
end
gcc@5: update 5.5.0_6 bottle.
class GccAT5 < Formula
desc "GNU Compiler Collection"
homepage "https://gcc.gnu.org/"
url "https://ftp.gnu.org/gnu/gcc/gcc-5.5.0/gcc-5.5.0.tar.xz"
mirror "https://ftpmirror.gnu.org/gcc/gcc-5.5.0/gcc-5.5.0.tar.xz"
sha256 "530cea139d82fe542b358961130c69cfde8b3d14556370b65823d2f91f0ced87"
revision 6
livecheck do
url :stable
regex(%r{href=.*?gcc[._-]v?(5(?:\.\d+)+)(?:/?["' >]|\.t)}i)
end
bottle do
sha256 "dcc9059b725fd7c87842287bbedf60a28745417652d42a300dcd944e15986f36" => :high_sierra
end
# The bottles are built on systems with the CLT installed, and do not work
# out of the box on Xcode-only systems due to an incorrect sysroot.
pour_bottle? do
reason "The bottle needs the Xcode CLT to be installed."
satisfy { MacOS::CLT.installed? }
end
depends_on maximum_macos: [:high_sierra, :build]
depends_on "gmp"
depends_on "isl@0.18"
depends_on "libmpc"
depends_on "mpfr"
uses_from_macos "zlib"
# GCC bootstraps itself, so it is OK to have an incompatible C++ stdlib
cxxstdlib_check :skip
# Fix build with Xcode 9
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=82091
if DevelopmentTools.clang_build_version >= 900
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/078797f1b9/gcc%405/xcode9.patch"
sha256 "e1546823630c516679371856338abcbab381efaf9bd99511ceedcce3cf7c0199"
end
end
# Fix Apple headers, otherwise they trigger a build failure in libsanitizer
# GCC bug report: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83531
# Apple radar 36176941
if MacOS.version == :high_sierra
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/413cfac6/gcc%405/10.13_headers.patch"
sha256 "94aaec20c8c7bfd3c41ef8fb7725bd524b1c0392d11a411742303a3465d18d09"
end
end
# Patch for Xcode bug, taken from https://gcc.gnu.org/bugzilla/show_bug.cgi?id=89864#c43
# This should be removed in the next release of GCC if fixed by apple; this is an xcode bug,
# but this patch is a work around committed to GCC trunk
if MacOS::Xcode.version >= "10.2"
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/91d57ebe88e17255965fa88b53541335ef16f64a/gcc%405/gcc5-xcode10.2.patch"
sha256 "6834bec30c54ab1cae645679e908713102f376ea0fc2ee993b3c19995832fe56"
end
end
def install
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete "LD"
# C, C++, ObjC and Fortran compilers are always built
languages = %w[c c++ fortran objc obj-c++]
version_suffix = version.major.to_s
# Even when suffixes are appended, the info pages conflict when
# install-info is run so pretend we have an outdated makeinfo
# to prevent their build.
ENV["gcc_cv_prog_makeinfo_modern"] = "no"
args = [
"--build=x86_64-apple-darwin#{OS.kernel_version}",
"--prefix=#{prefix}",
"--libdir=#{lib}/gcc/#{version_suffix}",
"--enable-languages=#{languages.join(",")}",
# Make most executables versioned to avoid conflicts.
"--program-suffix=-#{version_suffix}",
"--with-gmp=#{Formula["gmp"].opt_prefix}",
"--with-mpfr=#{Formula["mpfr"].opt_prefix}",
"--with-mpc=#{Formula["libmpc"].opt_prefix}",
"--with-isl=#{Formula["isl@0.18"].opt_prefix}",
"--with-system-zlib",
"--enable-libstdcxx-time=yes",
"--enable-stage1-checking",
"--enable-checking=release",
"--enable-lto",
"--enable-plugin",
# A no-op unless --HEAD is built because in head warnings will
# raise errors. But still a good idea to include.
"--disable-werror",
"--disable-nls",
"--with-pkgversion=Homebrew GCC #{pkg_version} #{build.used_options*" "}".strip,
"--with-bugurl=https://github.com/Homebrew/homebrew-core/issues",
"--enable-multilib",
]
# System headers may not be in /usr/include
sdk = MacOS.sdk_path_if_needed
if sdk
args << "--with-native-system-header-dir=/usr/include"
args << "--with-sysroot=#{sdk}"
end
# Avoid reference to sed shim
args << "SED=/usr/bin/sed"
# Ensure correct install names when linking against libgcc_s;
# see discussion in https://github.com/Homebrew/homebrew/pull/34303
inreplace "libgcc/config/t-slibgcc-darwin", "@shlib_slibdir@", "#{HOMEBREW_PREFIX}/lib/gcc/#{version_suffix}"
mkdir "build" do
system "../configure", *args
system "make", "bootstrap"
system "make", "install"
end
# Handle conflicts between GCC formulae.
# Rename man7.
Dir.glob(man7/"*.7") { |file| add_suffix file, version_suffix }
# Even when we disable building info pages some are still installed.
info.rmtree
end
def add_suffix(file, suffix)
dir = File.dirname(file)
ext = File.extname(file)
base = File.basename(file, ext)
File.rename file, "#{dir}/#{base}-#{suffix}#{ext}"
end
test do
(testpath/"hello-c.c").write <<~EOS
#include <stdio.h>
int main()
{
puts("Hello, world!");
return 0;
}
EOS
system bin/"gcc-#{version.major}", "-o", "hello-c", "hello-c.c"
assert_equal "Hello, world!\n", `./hello-c`
end
end
|
class Gdal2 < Formula
desc "GDAL: Geospatial Data Abstraction Library"
homepage "http://www.gdal.org/"
url "http://download.osgeo.org/gdal/2.1.3/gdal-2.1.3.tar.gz"
sha256 "ae6a0a0dc6eb45a981a46db27e3dfe16c644fcf04732557e2cb315776974074a"
revision 2
# bottle do
# root_url "http://qgis.dakotacarto.com/bottles"
# sha256 "9b818da55cd28d5b7b0e5263108e1ce9576943557d30137ce2061a5c75d7534b" => :sierra
# end
head do
url "https://svn.osgeo.org/gdal/trunk/gdal"
depends_on "doxygen" => :build
end
def plugins_subdirectory
gdal_ver_list = version.to_s.split(".")
"gdalplugins/#{gdal_ver_list[0]}.#{gdal_ver_list[1]}"
end
keg_only "Older version of gdal is in main tap and installs similar components"
option "with-complete", "Use additional Homebrew libraries to provide more drivers."
option "with-qhull", "Build with internal qhull libary support"
option "with-opencl", "Build with OpenCL acceleration."
option "with-armadillo", "Build with Armadillo accelerated TPS transforms."
option "with-unsupported", "Allow configure to drag in any library it can find. Invoke this at your own risk."
option "with-mdb", "Build with Access MDB driver (requires Java 1.6+ JDK/JRE, from Apple or Oracle)."
option "with-gnm", "Build with Geographic Network Model support"
option "with-libkml", "Build with Google's libkml driver (requires libkml --HEAD or >= 1.3)"
option "with-swig-java", "Build the swig java bindings"
deprecated_option "enable-opencl" => "with-opencl"
deprecated_option "enable-armadillo" => "with-armadillo"
deprecated_option "enable-unsupported" => "with-unsupported"
deprecated_option "enable-mdb" => "with-mdb"
deprecated_option "complete" => "with-complete"
depends_on "libpng"
depends_on "jpeg"
depends_on "giflib"
depends_on "libtiff"
depends_on "libgeotiff"
depends_on "proj"
depends_on "geos"
depends_on "sqlite" # To ensure compatibility with SpatiaLite.
depends_on "pcre" # for REGEXP operator in SQLite/Spatialite driver
depends_on "freexl"
depends_on "libspatialite"
depends_on "postgresql" => :optional
depends_on "mysql" => :optional
depends_on "homebrew/science/armadillo" if build.with? "armadillo"
if build.with? "libkml"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
if build.with? "complete"
# Raster libraries
depends_on "homebrew/science/netcdf" # Also brings in HDF5
depends_on "jasper"
depends_on "webp"
depends_on "homebrew/science/cfitsio"
depends_on "epsilon"
depends_on "libdap"
depends_on "libxml2"
depends_on "openjpeg"
# Vector libraries
depends_on "unixodbc" # OS X version is not complete enough
depends_on "xerces-c"
# Other libraries
depends_on "xz" # get liblzma compression algorithm library from XZutils
depends_on "poppler"
depends_on "podofo"
depends_on "json-c"
end
depends_on :java => ["1.7+", :optional, :build]
if build.with? "swig-java"
depends_on "ant" => :build
depends_on "swig" => :build
end
resource "libkml" do
# Until 1.3 is stable, use master branch
url "https://github.com/google/libkml.git",
:revision => "9b50572641f671194e523ad21d0171ea6537426e"
version "1.3-dev"
end
def configure_args
args = [
# Base configuration.
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-local=#{prefix}",
"--with-threads",
"--with-libtool",
# GDAL native backends.
"--with-pcraster=internal",
"--with-pcidsk=internal",
"--with-bsb",
"--with-grib",
"--with-pam",
# Backends supported by OS X.
"--with-libiconv-prefix=/usr",
"--with-libz=/usr",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-expat=/usr",
"--with-curl=/usr/bin/curl-config",
# Default Homebrew backends.
"--with-jpeg=#{HOMEBREW_PREFIX}",
"--without-jpeg12", # Needs specially configured JPEG and TIFF libraries.
"--with-gif=#{HOMEBREW_PREFIX}",
"--with-libtiff=#{HOMEBREW_PREFIX}",
"--with-geotiff=#{HOMEBREW_PREFIX}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-freexl=#{HOMEBREW_PREFIX}",
"--with-spatialite=#{HOMEBREW_PREFIX}",
"--with-geos=#{HOMEBREW_PREFIX}/bin/geos-config",
"--with-static-proj4=#{HOMEBREW_PREFIX}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
# GRASS backend explicitly disabled. Creates a chicken-and-egg problem.
# Should be installed separately after GRASS installation using the
# official GDAL GRASS plugin.
"--without-grass",
"--without-libgrass",
]
# Optional Homebrew packages supporting additional formats.
supported_backends = %w[
liblzma
cfitsio
hdf5
netcdf
jasper
xerces
odbc
dods-root
epsilon
webp
openjpeg
podofo
pdfium
]
if build.with? "complete"
supported_backends.delete "liblzma"
args << "--with-liblzma=yes"
supported_backends.delete "pdfium"
args << "--with-pdfium=yes"
args.concat supported_backends.map { |b| "--with-" + b + "=" + HOMEBREW_PREFIX }
elsif build.without? "unsupported"
args.concat supported_backends.map { |b| "--without-" + b }
end
# The following libraries are either proprietary, not available for public
# download or have no stable version in the Homebrew core that is
# compatible with GDAL. Interested users will have to install such software
# manually and most likely have to tweak the install routine.
#
# Podofo is disabled because Poppler provides the same functionality and
# then some.
unsupported_backends = %w[
gta
ogdi
fme
hdf4
fgdb
ecw
kakadu
mrsid
jp2mrsid
mrsid_lidar
msg
oci
ingres
dwgdirect
idb
sde
rasdaman
sosi
]
args.concat unsupported_backends.map { |b| "--without-" + b } if build.without? "unsupported"
# Database support.
args << (build.with?("postgresql") ? "--with-pg=#{HOMEBREW_PREFIX}/bin/pg_config" : "--without-pg")
args << (build.with?("mysql") ? "--with-mysql=#{HOMEBREW_PREFIX}/bin/mysql_config" : "--without-mysql")
if build.with? "mdb"
args << "--with-java=yes"
# The rpath is only embedded for Oracle (non-framework) installs
args << "--with-jvm-lib-add-rpath=yes"
args << "--with-mdb=yes"
end
args << "--with-libkml=#{libexec}" if build.with? "libkml"
args << "--with-qhull=#{build.with?("qhull") ? "internal" : "no"}"
args << "--with-gnm" if build.with? "gnm"
# Python is installed manually to ensure everything is properly sandboxed.
# see
args << "--without-python"
# Scripting APIs that have not been re-worked to respect Homebrew prefixes.
#
# Currently disabled as they install willy-nilly into locations outside of
# the Homebrew prefix. Enable if you feel like it, but uninstallation may be
# a manual affair.
#
# TODO: Fix installation of script bindings so they install into the
# Homebrew prefix.
args << "--without-perl"
args << "--without-php"
args << "--without-ruby"
args << (build.with?("opencl") ? "--with-opencl" : "--without-opencl")
args << (build.with?("armadillo") ? "--with-armadillo=#{Formula["armadillo"].opt_prefix}" : "--with-armadillo=no")
args
end
def install
if build.with? "complete"
# patch to "Handle prefix renaming in jasper 1.900.28" (not yet reported)
# use inreplace due to CRLF patching issue
inreplace "frmts/jpeg2000/jpeg2000_vsil_io.cpp" do |s|
# replace order matters here!
s.gsub! "uchar", "jas_uchar"
s.gsub! "unsigned char", "jas_uchar"
end
inreplace "frmts/jpeg2000/jpeg2000_vsil_io.h" do |s|
s.sub! %r{(<jasper/jasper\.h>)}, "\\1\n\n" + <<-EOS.undent
/* Handle prefix renaming in jasper 1.900.28 */
#ifndef jas_uchar
#define jas_uchar unsigned char
#endif
EOS
end
end
if build.with? "libkml"
resource("libkml").stage do
# See main `libkml` formula for info on patches
inreplace "configure.ac", "-Werror", ""
inreplace "third_party/Makefile.am" do |s|
s.sub! /(lib_LTLIBRARIES =) libminizip.la liburiparser.la/, "\\1"
s.sub! /(noinst_LTLIBRARIES = libgtest.la libgtest_main.la)/,
"\\1 libminizip.la liburiparser.la"
s.sub! /(libminizip_la_LDFLAGS =)/, "\\1 -static"
s.sub! /(liburiparser_la_LDFLAGS =)/, "\\1 -static"
end
system "./autogen.sh"
system "./configure", "--prefix=#{libexec}"
system "make", "install"
end
end
# Linking flags for SQLite are not added at a critical moment when the GDAL
# library is being assembled. This causes the build to fail due to missing
# symbols. Also, ensure Homebrew SQLite is used so that Spatialite is
# functional.
#
# Fortunately, this can be remedied using LDFLAGS.
sqlite = Formula["sqlite"]
ENV.append "LDFLAGS", "-L#{sqlite.opt_lib} -lsqlite3"
ENV.append "CFLAGS", "-I#{sqlite.opt_include}"
# Reset ARCHFLAGS to match how we build.
ENV["ARCHFLAGS"] = "-arch #{MacOS.preferred_arch}"
# Fix hardcoded mandir: http://trac.osgeo.org/gdal/ticket/5092
inreplace "configure", %r[^mandir='\$\{prefix\}/man'$], ""
# These libs are statically linked in vendored libkml and libkml formula
inreplace "configure", " -lminizip -luriparser", "" if build.with? "libkml"
system "./configure", *configure_args
system "make"
system "make", "install"
# Add GNM headers for gdal2-python swig wrapping
include.install Dir["gnm/**/*.h"] if build.with? "gnm"
# Create versioned plugins path for other formulae
(HOMEBREW_PREFIX/"lib/#{plugins_subdirectory}").mkpath
if build.with? "swig-java"
cd "swig/java" do
inreplace "java.opt", "linux", "darwin"
inreplace "java.opt", "#JAVA_HOME = /usr/lib/jvm/java-6-openjdk/", "JAVA_HOME=$(shell echo $$JAVA_HOME)"
system "make"
system "make", "install"
# Install the jar that complements the native JNI bindings
system "ant"
lib.install "gdal.jar"
end
end
system "make", "man" if build.head?
system "make", "install-man"
# Clean up any stray doxygen files.
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
def caveats
s = <<-EOS.undent
Plugins for this version of GDAL/OGR, generated by other formulae, should
be symlinked to the following directory:
#{HOMEBREW_PREFIX}/lib/#{plugins_subdirectory}
You may need to set the following enviroment variable:
export GDAL_DRIVER_PATH=#{HOMEBREW_PREFIX}/lib/gdalplugins
PYTHON BINDINGS are now built in a separate formula: gdal2-python
EOS
if build.with? "mdb"
s += <<-EOS.undent
To have a functional MDB driver, install supporting .jar files in:
`/Library/Java/Extensions/`
See: `http://www.gdal.org/ogr/drv_mdb.html`
EOS
end
s
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
end
end
gdal2: add sierra bottle
[ci skip]
class Gdal2 < Formula
desc "GDAL: Geospatial Data Abstraction Library"
homepage "http://www.gdal.org/"
url "http://download.osgeo.org/gdal/2.1.3/gdal-2.1.3.tar.gz"
sha256 "ae6a0a0dc6eb45a981a46db27e3dfe16c644fcf04732557e2cb315776974074a"
revision 2
bottle do
root_url "http://qgis.dakotacarto.com/bottles"
sha256 "42484c9457275d2e8c76c7fe745233074d4a7864c54b2107295bdcb56105dc90" => :sierra
end
head do
url "https://svn.osgeo.org/gdal/trunk/gdal"
depends_on "doxygen" => :build
end
def plugins_subdirectory
gdal_ver_list = version.to_s.split(".")
"gdalplugins/#{gdal_ver_list[0]}.#{gdal_ver_list[1]}"
end
keg_only "Older version of gdal is in main tap and installs similar components"
option "with-complete", "Use additional Homebrew libraries to provide more drivers."
option "with-qhull", "Build with internal qhull libary support"
option "with-opencl", "Build with OpenCL acceleration."
option "with-armadillo", "Build with Armadillo accelerated TPS transforms."
option "with-unsupported", "Allow configure to drag in any library it can find. Invoke this at your own risk."
option "with-mdb", "Build with Access MDB driver (requires Java 1.6+ JDK/JRE, from Apple or Oracle)."
option "with-gnm", "Build with Geographic Network Model support"
option "with-libkml", "Build with Google's libkml driver (requires libkml --HEAD or >= 1.3)"
option "with-swig-java", "Build the swig java bindings"
deprecated_option "enable-opencl" => "with-opencl"
deprecated_option "enable-armadillo" => "with-armadillo"
deprecated_option "enable-unsupported" => "with-unsupported"
deprecated_option "enable-mdb" => "with-mdb"
deprecated_option "complete" => "with-complete"
depends_on "libpng"
depends_on "jpeg"
depends_on "giflib"
depends_on "libtiff"
depends_on "libgeotiff"
depends_on "proj"
depends_on "geos"
depends_on "sqlite" # To ensure compatibility with SpatiaLite.
depends_on "pcre" # for REGEXP operator in SQLite/Spatialite driver
depends_on "freexl"
depends_on "libspatialite"
depends_on "postgresql" => :optional
depends_on "mysql" => :optional
depends_on "homebrew/science/armadillo" if build.with? "armadillo"
if build.with? "libkml"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
if build.with? "complete"
# Raster libraries
depends_on "homebrew/science/netcdf" # Also brings in HDF5
depends_on "jasper"
depends_on "webp"
depends_on "homebrew/science/cfitsio"
depends_on "epsilon"
depends_on "libdap"
depends_on "libxml2"
depends_on "openjpeg"
# Vector libraries
depends_on "unixodbc" # OS X version is not complete enough
depends_on "xerces-c"
# Other libraries
depends_on "xz" # get liblzma compression algorithm library from XZutils
depends_on "poppler"
depends_on "podofo"
depends_on "json-c"
end
depends_on :java => ["1.7+", :optional, :build]
if build.with? "swig-java"
depends_on "ant" => :build
depends_on "swig" => :build
end
resource "libkml" do
# Until 1.3 is stable, use master branch
url "https://github.com/google/libkml.git",
:revision => "9b50572641f671194e523ad21d0171ea6537426e"
version "1.3-dev"
end
def configure_args
args = [
# Base configuration.
"--prefix=#{prefix}",
"--mandir=#{man}",
"--disable-debug",
"--with-local=#{prefix}",
"--with-threads",
"--with-libtool",
# GDAL native backends.
"--with-pcraster=internal",
"--with-pcidsk=internal",
"--with-bsb",
"--with-grib",
"--with-pam",
# Backends supported by OS X.
"--with-libiconv-prefix=/usr",
"--with-libz=/usr",
"--with-png=#{Formula["libpng"].opt_prefix}",
"--with-expat=/usr",
"--with-curl=/usr/bin/curl-config",
# Default Homebrew backends.
"--with-jpeg=#{HOMEBREW_PREFIX}",
"--without-jpeg12", # Needs specially configured JPEG and TIFF libraries.
"--with-gif=#{HOMEBREW_PREFIX}",
"--with-libtiff=#{HOMEBREW_PREFIX}",
"--with-geotiff=#{HOMEBREW_PREFIX}",
"--with-sqlite3=#{Formula["sqlite"].opt_prefix}",
"--with-freexl=#{HOMEBREW_PREFIX}",
"--with-spatialite=#{HOMEBREW_PREFIX}",
"--with-geos=#{HOMEBREW_PREFIX}/bin/geos-config",
"--with-static-proj4=#{HOMEBREW_PREFIX}",
"--with-libjson-c=#{Formula["json-c"].opt_prefix}",
# GRASS backend explicitly disabled. Creates a chicken-and-egg problem.
# Should be installed separately after GRASS installation using the
# official GDAL GRASS plugin.
"--without-grass",
"--without-libgrass",
]
# Optional Homebrew packages supporting additional formats.
supported_backends = %w[
liblzma
cfitsio
hdf5
netcdf
jasper
xerces
odbc
dods-root
epsilon
webp
openjpeg
podofo
pdfium
]
if build.with? "complete"
supported_backends.delete "liblzma"
args << "--with-liblzma=yes"
supported_backends.delete "pdfium"
args << "--with-pdfium=yes"
args.concat supported_backends.map { |b| "--with-" + b + "=" + HOMEBREW_PREFIX }
elsif build.without? "unsupported"
args.concat supported_backends.map { |b| "--without-" + b }
end
# The following libraries are either proprietary, not available for public
# download or have no stable version in the Homebrew core that is
# compatible with GDAL. Interested users will have to install such software
# manually and most likely have to tweak the install routine.
#
# Podofo is disabled because Poppler provides the same functionality and
# then some.
unsupported_backends = %w[
gta
ogdi
fme
hdf4
fgdb
ecw
kakadu
mrsid
jp2mrsid
mrsid_lidar
msg
oci
ingres
dwgdirect
idb
sde
rasdaman
sosi
]
args.concat unsupported_backends.map { |b| "--without-" + b } if build.without? "unsupported"
# Database support.
args << (build.with?("postgresql") ? "--with-pg=#{HOMEBREW_PREFIX}/bin/pg_config" : "--without-pg")
args << (build.with?("mysql") ? "--with-mysql=#{HOMEBREW_PREFIX}/bin/mysql_config" : "--without-mysql")
if build.with? "mdb"
args << "--with-java=yes"
# The rpath is only embedded for Oracle (non-framework) installs
args << "--with-jvm-lib-add-rpath=yes"
args << "--with-mdb=yes"
end
args << "--with-libkml=#{libexec}" if build.with? "libkml"
args << "--with-qhull=#{build.with?("qhull") ? "internal" : "no"}"
args << "--with-gnm" if build.with? "gnm"
# Python is installed manually to ensure everything is properly sandboxed.
# see
args << "--without-python"
# Scripting APIs that have not been re-worked to respect Homebrew prefixes.
#
# Currently disabled as they install willy-nilly into locations outside of
# the Homebrew prefix. Enable if you feel like it, but uninstallation may be
# a manual affair.
#
# TODO: Fix installation of script bindings so they install into the
# Homebrew prefix.
args << "--without-perl"
args << "--without-php"
args << "--without-ruby"
args << (build.with?("opencl") ? "--with-opencl" : "--without-opencl")
args << (build.with?("armadillo") ? "--with-armadillo=#{Formula["armadillo"].opt_prefix}" : "--with-armadillo=no")
args
end
def install
if build.with? "complete"
# patch to "Handle prefix renaming in jasper 1.900.28" (not yet reported)
# use inreplace due to CRLF patching issue
inreplace "frmts/jpeg2000/jpeg2000_vsil_io.cpp" do |s|
# replace order matters here!
s.gsub! "uchar", "jas_uchar"
s.gsub! "unsigned char", "jas_uchar"
end
inreplace "frmts/jpeg2000/jpeg2000_vsil_io.h" do |s|
s.sub! %r{(<jasper/jasper\.h>)}, "\\1\n\n" + <<-EOS.undent
/* Handle prefix renaming in jasper 1.900.28 */
#ifndef jas_uchar
#define jas_uchar unsigned char
#endif
EOS
end
end
if build.with? "libkml"
resource("libkml").stage do
# See main `libkml` formula for info on patches
inreplace "configure.ac", "-Werror", ""
inreplace "third_party/Makefile.am" do |s|
s.sub! /(lib_LTLIBRARIES =) libminizip.la liburiparser.la/, "\\1"
s.sub! /(noinst_LTLIBRARIES = libgtest.la libgtest_main.la)/,
"\\1 libminizip.la liburiparser.la"
s.sub! /(libminizip_la_LDFLAGS =)/, "\\1 -static"
s.sub! /(liburiparser_la_LDFLAGS =)/, "\\1 -static"
end
system "./autogen.sh"
system "./configure", "--prefix=#{libexec}"
system "make", "install"
end
end
# Linking flags for SQLite are not added at a critical moment when the GDAL
# library is being assembled. This causes the build to fail due to missing
# symbols. Also, ensure Homebrew SQLite is used so that Spatialite is
# functional.
#
# Fortunately, this can be remedied using LDFLAGS.
sqlite = Formula["sqlite"]
ENV.append "LDFLAGS", "-L#{sqlite.opt_lib} -lsqlite3"
ENV.append "CFLAGS", "-I#{sqlite.opt_include}"
# Reset ARCHFLAGS to match how we build.
ENV["ARCHFLAGS"] = "-arch #{MacOS.preferred_arch}"
# Fix hardcoded mandir: http://trac.osgeo.org/gdal/ticket/5092
inreplace "configure", %r[^mandir='\$\{prefix\}/man'$], ""
# These libs are statically linked in vendored libkml and libkml formula
inreplace "configure", " -lminizip -luriparser", "" if build.with? "libkml"
system "./configure", *configure_args
system "make"
system "make", "install"
# Add GNM headers for gdal2-python swig wrapping
include.install Dir["gnm/**/*.h"] if build.with? "gnm"
# Create versioned plugins path for other formulae
(HOMEBREW_PREFIX/"lib/#{plugins_subdirectory}").mkpath
if build.with? "swig-java"
cd "swig/java" do
inreplace "java.opt", "linux", "darwin"
inreplace "java.opt", "#JAVA_HOME = /usr/lib/jvm/java-6-openjdk/", "JAVA_HOME=$(shell echo $$JAVA_HOME)"
system "make"
system "make", "install"
# Install the jar that complements the native JNI bindings
system "ant"
lib.install "gdal.jar"
end
end
system "make", "man" if build.head?
system "make", "install-man"
# Clean up any stray doxygen files.
Dir.glob("#{bin}/*.dox") { |p| rm p }
end
def caveats
s = <<-EOS.undent
Plugins for this version of GDAL/OGR, generated by other formulae, should
be symlinked to the following directory:
#{HOMEBREW_PREFIX}/lib/#{plugins_subdirectory}
You may need to set the following enviroment variable:
export GDAL_DRIVER_PATH=#{HOMEBREW_PREFIX}/lib/gdalplugins
PYTHON BINDINGS are now built in a separate formula: gdal2-python
EOS
if build.with? "mdb"
s += <<-EOS.undent
To have a functional MDB driver, install supporting .jar files in:
`/Library/Java/Extensions/`
See: `http://www.gdal.org/ogr/drv_mdb.html`
EOS
end
s
end
test do
# basic tests to see if third-party dylibs are loading OK
system "#{bin}/gdalinfo", "--formats"
system "#{bin}/ogrinfo", "--formats"
end
end
|
class Geph2 < Formula
desc "Modular Internet censorship circumvention system"
homepage "https://geph.io"
url "https://github.com/geph-official/geph2/archive/v0.22.2.tar.gz"
sha256 "dd1ccd9c5aac06b46d57b9ba7aab00b6f42b3ec8fde85d00f09e2e474e7c1dc1"
license "GPL-3.0-only"
bottle do
cellar :any_skip_relocation
sha256 "7c48c5f3498c0baa1aacd187d715e12ef0625eed6012af544e8a10ff3768a2ef" => :catalina
sha256 "7c48c5f3498c0baa1aacd187d715e12ef0625eed6012af544e8a10ff3768a2ef" => :mojave
sha256 "7c48c5f3498c0baa1aacd187d715e12ef0625eed6012af544e8a10ff3768a2ef" => :high_sierra
end
depends_on "go" => :build
def install
bin_path = buildpath/"src/github.com/geph-official/geph2"
bin_path.install Dir["*"]
cd bin_path/"cmd/geph-client" do
ENV["CGO_ENABLED"] = "0"
system "go", "build", "-o",
bin/"geph-client", "-v", "-trimpath"
end
end
test do
assert_match "username = homebrew", shell_output("#{bin}/geph-client -username homebrew -dumpflags")
end
end
geph2: update 0.22.2 bottle.
class Geph2 < Formula
desc "Modular Internet censorship circumvention system"
homepage "https://geph.io"
url "https://github.com/geph-official/geph2/archive/v0.22.2.tar.gz"
sha256 "dd1ccd9c5aac06b46d57b9ba7aab00b6f42b3ec8fde85d00f09e2e474e7c1dc1"
license "GPL-3.0-only"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "6a596d4f5c73bdaf166874f9ad4c8721d8ccbbe39da45541b00293a4b55675d1" => :catalina
sha256 "6a596d4f5c73bdaf166874f9ad4c8721d8ccbbe39da45541b00293a4b55675d1" => :mojave
sha256 "6a596d4f5c73bdaf166874f9ad4c8721d8ccbbe39da45541b00293a4b55675d1" => :high_sierra
end
depends_on "go" => :build
def install
bin_path = buildpath/"src/github.com/geph-official/geph2"
bin_path.install Dir["*"]
cd bin_path/"cmd/geph-client" do
ENV["CGO_ENABLED"] = "0"
system "go", "build", "-o",
bin/"geph-client", "-v", "-trimpath"
end
end
test do
assert_match "username = homebrew", shell_output("#{bin}/geph-client -username homebrew -dumpflags")
end
end
|
class Ghcup < Formula
desc "Installer for the general purpose language Haskell"
homepage "https://www.haskell.org/ghcup/"
# There is a tarball at Hackage, but that doesn't include the shell completions.
url "https://gitlab.haskell.org/haskell/ghcup-hs/-/archive/v0.1.18.0/ghcup-hs-v0.1.18.0.tar.bz2"
sha256 "fac7e5fd0ec6d95c3d2daa56b4d77ec8daa37b179b43e62c528d90053b01aeb9"
license "LGPL-3.0-only"
head "https://gitlab.haskell.org/haskell/ghcup-hs.git", branch: "master"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "c8fd25d92e26b13e31a78cd0185258321cfa381375ed0e29bb16c023d7d1763d"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a6c374ee90254045c3df618268852ed7182d38a404a36656d8c91a3176007331"
sha256 cellar: :any_skip_relocation, monterey: "e5739de269fbe1ff0b03ae564fef3714b030caca20fcd5f500132e308fdca885"
sha256 cellar: :any_skip_relocation, big_sur: "ace684548a140c8db60e51e40e750dccc711e73ca2976b919b46eabeeb83097e"
sha256 cellar: :any_skip_relocation, catalina: "39d01f386acc5221527cbbc3509030a161049b252bf8b62507b00fb87e93805a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "970569f896725c826a38193e32fe89cb4521eedcf4a8f1e4f704d98f3809811b"
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
uses_from_macos "ncurses"
uses_from_macos "zlib"
# upstream mr: https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/277
patch do
url "https://gitlab.haskell.org/fishtreesugar/ghcup-hs/-/commit/22f0081303b14ea1da10e6ec5020a41dab591668.diff"
sha256 "ae513910d39f5d6b3d00de5d5f4da1420263c581168dabd221f2fe4f941c7c65"
end
def install
system "cabal", "v2-update"
# `+disable-upgrade` disables the self-upgrade feature.
system "cabal", "v2-install", *std_cabal_v2_args, "--flags=+disable-upgrade"
bash_completion.install "scripts/shell-completions/bash" => "ghcup"
fish_completion.install "scripts/shell-completions/fish" => "ghcup.fish"
zsh_completion.install "scripts/shell-completions/zsh" => "_ghcup"
end
test do
assert_match "ghc", shell_output("#{bin}/ghcup list")
end
end
ghcup: update 0.1.18.0 bottle.
class Ghcup < Formula
desc "Installer for the general purpose language Haskell"
homepage "https://www.haskell.org/ghcup/"
# There is a tarball at Hackage, but that doesn't include the shell completions.
url "https://gitlab.haskell.org/haskell/ghcup-hs/-/archive/v0.1.18.0/ghcup-hs-v0.1.18.0.tar.bz2"
sha256 "fac7e5fd0ec6d95c3d2daa56b4d77ec8daa37b179b43e62c528d90053b01aeb9"
license "LGPL-3.0-only"
head "https://gitlab.haskell.org/haskell/ghcup-hs.git", branch: "master"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "c8fd25d92e26b13e31a78cd0185258321cfa381375ed0e29bb16c023d7d1763d"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a6c374ee90254045c3df618268852ed7182d38a404a36656d8c91a3176007331"
sha256 cellar: :any_skip_relocation, ventura: "8cae0d1d4a20e8fa9a6dba80bfd44fcfa56417fd4dc91ef46c792ff8f86ae166"
sha256 cellar: :any_skip_relocation, monterey: "e5739de269fbe1ff0b03ae564fef3714b030caca20fcd5f500132e308fdca885"
sha256 cellar: :any_skip_relocation, big_sur: "ace684548a140c8db60e51e40e750dccc711e73ca2976b919b46eabeeb83097e"
sha256 cellar: :any_skip_relocation, catalina: "39d01f386acc5221527cbbc3509030a161049b252bf8b62507b00fb87e93805a"
sha256 cellar: :any_skip_relocation, x86_64_linux: "970569f896725c826a38193e32fe89cb4521eedcf4a8f1e4f704d98f3809811b"
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
uses_from_macos "ncurses"
uses_from_macos "zlib"
# upstream mr: https://gitlab.haskell.org/haskell/ghcup-hs/-/merge_requests/277
patch do
url "https://gitlab.haskell.org/fishtreesugar/ghcup-hs/-/commit/22f0081303b14ea1da10e6ec5020a41dab591668.diff"
sha256 "ae513910d39f5d6b3d00de5d5f4da1420263c581168dabd221f2fe4f941c7c65"
end
def install
system "cabal", "v2-update"
# `+disable-upgrade` disables the self-upgrade feature.
system "cabal", "v2-install", *std_cabal_v2_args, "--flags=+disable-upgrade"
bash_completion.install "scripts/shell-completions/bash" => "ghcup"
fish_completion.install "scripts/shell-completions/fish" => "ghcup.fish"
zsh_completion.install "scripts/shell-completions/zsh" => "_ghcup"
end
test do
assert_match "ghc", shell_output("#{bin}/ghcup list")
end
end
|
require 'formula'
class Gitsh < Formula
SYSTEM_RUBY_PATH = '/usr/bin/ruby'
HOMEBREW_RUBY_PATH = "#{HOMEBREW_PREFIX}/bin/ruby"
homepage 'http://thoughtbot.github.io/gitsh/'
url 'http://thoughtbot.github.io/gitsh/gitsh-0.9.tar.gz'
sha1 'd6920cfbb7f2f974b38b4478105c837b356345a5'
def self.old_system_ruby?
system_ruby_version = `#{SYSTEM_RUBY_PATH} -e "puts RUBY_VERSION"`.chomp
system_ruby_version < '1.9.3'
end
if old_system_ruby?
depends_on 'Ruby'
end
def install
set_ruby_path
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/gitsh", "--version"
end
private
def set_ruby_path
if self.class.old_system_ruby? || File.exist?(HOMEBREW_RUBY_PATH)
ENV['RUBY'] = HOMEBREW_RUBY_PATH
else
ENV['RUBY'] = SYSTEM_RUBY_PATH
end
end
end
gitsh: Release version 0.10
require 'formula'
class Gitsh < Formula
SYSTEM_RUBY_PATH = '/usr/bin/ruby'
HOMEBREW_RUBY_PATH = "#{HOMEBREW_PREFIX}/bin/ruby"
homepage 'http://thoughtbot.github.io/gitsh/'
url 'http://thoughtbot.github.io/gitsh/gitsh-0.10.tar.gz'
sha1 'f3b62fb6bf6b7cce003df6324512f67cdc3a9061'
def self.old_system_ruby?
system_ruby_version = `#{SYSTEM_RUBY_PATH} -e "puts RUBY_VERSION"`.chomp
system_ruby_version < '1.9.3'
end
if old_system_ruby?
depends_on 'Ruby'
end
def install
set_ruby_path
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "#{bin}/gitsh", "--version"
end
private
def set_ruby_path
if self.class.old_system_ruby? || File.exist?(HOMEBREW_RUBY_PATH)
ENV['RUBY'] = HOMEBREW_RUBY_PATH
else
ENV['RUBY'] = SYSTEM_RUBY_PATH
end
end
end
|
class Gitui < Formula
desc "Blazing fast terminal-ui for git written in rust"
homepage "https://github.com/extrawurst/gitui"
url "https://github.com/extrawurst/gitui/archive/v0.21.0.tar.gz"
sha256 "da99defad08bd455c12398438e846aa71c160acfbcc60d06b9c852c5d7ef1d99"
license "MIT"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_ventura: "94ec4ffddc9ff28779be048ceeacec1f126d7f9a9bb5d9f03c00ad755dd57853"
sha256 cellar: :any_skip_relocation, arm64_monterey: "12d9e243bd1aca529db25c40bffe2b5da5aa6c3be6427d4d614babd7e48915ca"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "7a306856b92c55080ec447be66c6a03e36428154d0e7b63922d60ba6c3f8624c"
sha256 cellar: :any_skip_relocation, monterey: "66c45e185fc3c3494b1c965bd4373a2ea34996b26fc8e839548eccca81f18271"
sha256 cellar: :any_skip_relocation, big_sur: "ebe75d0d97989cd3f3ae83cda32cc545bed4f2f9f798dff2039737e4e3ec6cdb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c592a405dcef3c5325063972bbac70b6fd599e02f9f5191a71ddb90e2e84d25a"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
end
test do
system "git", "clone", "https://github.com/extrawurst/gitui.git"
(testpath/"gitui").cd { system "git", "checkout", "v0.7.0" }
input, _, wait_thr = Open3.popen2 "script -q screenlog.ansi"
input.puts "stty rows 80 cols 130"
input.puts "env LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 TERM=xterm #{bin}/gitui -d gitui"
sleep 1
# select log tab
input.puts "2"
sleep 1
# inspect commit (return + right arrow key)
input.puts "\r"
sleep 1
input.puts "\e[C"
sleep 1
input.close
screenlog = (testpath/"screenlog.ansi").read
# remove ANSI colors
screenlog.encode!("UTF-8", "binary",
invalid: :replace,
undef: :replace,
replace: "")
screenlog.gsub!(/\e\[([;\d]+)?m/, "")
assert_match "Author: Stephan Dilly", screenlog
assert_match "Date: 2020-06-15", screenlog
assert_match "Sha: 9c2a31846c417d8775a346ceaf38e77b710d3aab", screenlog
ensure
Process.kill("TERM", wait_thr.pid)
end
end
gitui: update 0.21.0 bottle.
class Gitui < Formula
desc "Blazing fast terminal-ui for git written in rust"
homepage "https://github.com/extrawurst/gitui"
url "https://github.com/extrawurst/gitui/archive/v0.21.0.tar.gz"
sha256 "da99defad08bd455c12398438e846aa71c160acfbcc60d06b9c852c5d7ef1d99"
license "MIT"
bottle do
rebuild 1
sha256 cellar: :any_skip_relocation, arm64_ventura: "94ec4ffddc9ff28779be048ceeacec1f126d7f9a9bb5d9f03c00ad755dd57853"
sha256 cellar: :any_skip_relocation, arm64_monterey: "12d9e243bd1aca529db25c40bffe2b5da5aa6c3be6427d4d614babd7e48915ca"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "7a306856b92c55080ec447be66c6a03e36428154d0e7b63922d60ba6c3f8624c"
sha256 cellar: :any_skip_relocation, monterey: "66c45e185fc3c3494b1c965bd4373a2ea34996b26fc8e839548eccca81f18271"
sha256 cellar: :any_skip_relocation, big_sur: "ebe75d0d97989cd3f3ae83cda32cc545bed4f2f9f798dff2039737e4e3ec6cdb"
sha256 cellar: :any_skip_relocation, catalina: "d0641624fb4122d90fdac164f0813ad83667c8db06759d9bc814d16ea2519c3b"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c592a405dcef3c5325063972bbac70b6fd599e02f9f5191a71ddb90e2e84d25a"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
end
test do
system "git", "clone", "https://github.com/extrawurst/gitui.git"
(testpath/"gitui").cd { system "git", "checkout", "v0.7.0" }
input, _, wait_thr = Open3.popen2 "script -q screenlog.ansi"
input.puts "stty rows 80 cols 130"
input.puts "env LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 TERM=xterm #{bin}/gitui -d gitui"
sleep 1
# select log tab
input.puts "2"
sleep 1
# inspect commit (return + right arrow key)
input.puts "\r"
sleep 1
input.puts "\e[C"
sleep 1
input.close
screenlog = (testpath/"screenlog.ansi").read
# remove ANSI colors
screenlog.encode!("UTF-8", "binary",
invalid: :replace,
undef: :replace,
replace: "")
screenlog.gsub!(/\e\[([;\d]+)?m/, "")
assert_match "Author: Stephan Dilly", screenlog
assert_match "Date: 2020-06-15", screenlog
assert_match "Sha: 9c2a31846c417d8775a346ceaf38e77b710d3aab", screenlog
ensure
Process.kill("TERM", wait_thr.pid)
end
end
|
class Gitui < Formula
desc "Blazing fast terminal-ui for git written in rust"
homepage "https://github.com/extrawurst/gitui"
url "https://github.com/extrawurst/gitui/archive/v0.16.0.tar.gz"
sha256 "181ee579613daf5f7cde39d29c464a14332614c25951765b16ad65fdc6e00188"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "3d50103b6ee31fe0adbb2fec8e3b618f0929c9df41671fb3c2ead438a750de75"
sha256 cellar: :any_skip_relocation, big_sur: "c2a1d2fc613bfc00eb3d4e23cf64c9c877995ed3b37895de4b0bb30a66db2d4c"
sha256 cellar: :any_skip_relocation, catalina: "0dde0cdbb92472ec0f77226c868f0584c9f4bbb37f6b64c7858a88321aa12197"
sha256 cellar: :any_skip_relocation, mojave: "b854ed73adc6a072c55fa794188560e27fde7e9bd89502cdd02312a53e6ba771"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
end
test do
system "git", "clone", "https://github.com/extrawurst/gitui.git"
(testpath/"gitui").cd { system "git", "checkout", "v0.7.0" }
input, _, wait_thr = Open3.popen2 "script -q screenlog.ansi"
input.puts "stty rows 80 cols 130"
input.puts "env LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 TERM=xterm #{bin}/gitui -d gitui"
sleep 1
# select log tab
input.puts "2"
sleep 1
# inspect commit (return + right arrow key)
input.puts "\r"
sleep 1
input.puts "\e[C"
sleep 1
input.close
screenlog = (testpath/"screenlog.ansi").read
# remove ANSI colors
screenlog.encode!("UTF-8", "binary",
invalid: :replace,
undef: :replace,
replace: "")
screenlog.gsub!(/\e\[([;\d]+)?m/, "")
assert_match "Author: Stephan Dilly", screenlog
assert_match "Date: 2020-06-15", screenlog
assert_match "Sha: 9c2a31846c417d8775a346ceaf38e77b710d3aab", screenlog
ensure
Process.kill("TERM", wait_thr.pid)
end
end
gitui: update 0.16.0 bottle.
class Gitui < Formula
desc "Blazing fast terminal-ui for git written in rust"
homepage "https://github.com/extrawurst/gitui"
url "https://github.com/extrawurst/gitui/archive/v0.16.0.tar.gz"
sha256 "181ee579613daf5f7cde39d29c464a14332614c25951765b16ad65fdc6e00188"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "922b02ea87791bf93ef8bda43f1d486eae11ea7d09cc38172d850b4c2689a39b"
sha256 cellar: :any_skip_relocation, big_sur: "ae28951e0b9eb23f44bdf4f4b928c8f3b2516ce852ec92b01fb012240d64c984"
sha256 cellar: :any_skip_relocation, catalina: "98e8623f0f2c16a89baf87873706b871b9d1537c1b9548441aca11037b2ddfa6"
sha256 cellar: :any_skip_relocation, mojave: "a3b4548d4cf4ce96d5e6830a5cfc3a30f530107fa0c2093f0cd125c9e8341d69"
end
depends_on "rust" => :build
uses_from_macos "zlib"
def install
system "cargo", "install", *std_cargo_args
end
test do
system "git", "clone", "https://github.com/extrawurst/gitui.git"
(testpath/"gitui").cd { system "git", "checkout", "v0.7.0" }
input, _, wait_thr = Open3.popen2 "script -q screenlog.ansi"
input.puts "stty rows 80 cols 130"
input.puts "env LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8 TERM=xterm #{bin}/gitui -d gitui"
sleep 1
# select log tab
input.puts "2"
sleep 1
# inspect commit (return + right arrow key)
input.puts "\r"
sleep 1
input.puts "\e[C"
sleep 1
input.close
screenlog = (testpath/"screenlog.ansi").read
# remove ANSI colors
screenlog.encode!("UTF-8", "binary",
invalid: :replace,
undef: :replace,
replace: "")
screenlog.gsub!(/\e\[([;\d]+)?m/, "")
assert_match "Author: Stephan Dilly", screenlog
assert_match "Date: 2020-06-15", screenlog
assert_match "Sha: 9c2a31846c417d8775a346ceaf38e77b710d3aab", screenlog
ensure
Process.kill("TERM", wait_thr.pid)
end
end
|
class Gleam < Formula
desc "✨ A statically typed language for the Erlang VM"
homepage "https://gleam.run"
url "https://github.com/lpil/gleam/archive/v0.10.0.tar.gz"
sha256 "f909fc7c89c950f7a8fffc943733dc4bac3b0b88eb7d7760eb04732347459ba1"
bottle do
cellar :any_skip_relocation
sha256 "70777be1c8558f699d379114b7105b1666855ee95b3fd69b7af439a44038d0bd" => :catalina
sha256 "2d8c0593e9f1a7435f126687b16422eb5f1e5edd7d0c4132c332fdd16a6c3108" => :mojave
sha256 "e95b247a6e79610200134cdad6754663bedacad6a06efaac808a4d4e22f1e8c1" => :high_sierra
end
depends_on "rust" => :build
depends_on "erlang"
depends_on "rebar3"
on_linux do
depends_on "pkg-config" => :build
end
def install
system "cargo", "install", *std_cargo_args
end
test do
Dir.chdir testpath
system "#{bin}/gleam", "new", "test_project"
Dir.chdir "test_project"
system "rebar3", "eunit"
end
end
gleam: update 0.10.0 bottle.
class Gleam < Formula
desc "✨ A statically typed language for the Erlang VM"
homepage "https://gleam.run"
url "https://github.com/lpil/gleam/archive/v0.10.0.tar.gz"
sha256 "f909fc7c89c950f7a8fffc943733dc4bac3b0b88eb7d7760eb04732347459ba1"
bottle do
cellar :any_skip_relocation
sha256 "d6cc30c84004f50f76c4d7501814e5311482619c96b56e7e51f9b706e3490c25" => :catalina
sha256 "724d94e8bf2556d9485423de6ed21efbc53326114f245d9ac24d96e20f57a96d" => :mojave
sha256 "4735f65ab3a0ae5614feaf1433cd103edc1ea6ee2de59c2383e051047834ff55" => :high_sierra
end
depends_on "rust" => :build
depends_on "erlang"
depends_on "rebar3"
on_linux do
depends_on "pkg-config" => :build
end
def install
system "cargo", "install", *std_cargo_args
end
test do
Dir.chdir testpath
system "#{bin}/gleam", "new", "test_project"
Dir.chdir "test_project"
system "rebar3", "eunit"
end
end
|
require "os/linux/glibc"
class BrewedGlibcNotOlderRequirement < Requirement
fatal true
satisfy(build_env: false) do
Glibc.version >= OS::Linux::Glibc.system_version
end
def message
<<~EOS
Your system's glibc version is #{OS::Linux::Glibc.system_version}, and Homebrew's glibc version is #{Glibc.version}.
Installing a version of glibc that is older than your system's can break formulae installed from source.
EOS
end
def display_s
"System glibc < #{Glibc.version}"
end
end
class GlibcBaseRequirement < Requirement
def message
tool = self.class::TOOL
version = self.class::VERSION
<<~EOS
#{[tool, version].compact.join(" ")} is required to build glibc.
Install #{tool} with your host package manager if you have sudo access:
sudo apt-get install #{tool}
sudo yum install #{tool}
EOS
end
def display_s
"#{self.class::TOOL} #{self.class::VERSION}".strip
end
end
class GawkRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "gawk".freeze
VERSION = "3.1.2 (or later)".freeze
end
class MakeRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "make".freeze
VERSION = "3.79 (or later)".freeze
end
class SedRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "sed".freeze
VERSION = nil
end
class LinuxKernelRequirement < Requirement
fatal true
MINIMUM_LINUX_KERNEL_VERSION = "2.6.32".freeze
satisfy(build_env: false) do
OS.kernel_version >= MINIMUM_LINUX_KERNEL_VERSION
end
def message
<<~EOS
Linux kernel version #{MINIMUM_LINUX_KERNEL_VERSION} or later is required by glibc.
Your system has Linux kernel version #{OS.kernel_version}.
EOS
end
def display_s
"Linux kernel #{MINIMUM_LINUX_KERNEL_VERSION} (or later)"
end
end
class Glibc < Formula
desc "GNU C Library"
homepage "https://www.gnu.org/software/libc/"
url "https://ftp.gnu.org/gnu/glibc/glibc-2.23.tar.gz"
sha256 "2bd08abb24811cda62e17e61e9972f091f02a697df550e2e44ddcfb2255269d2"
license all_of: ["GPL-2.0-or-later", "LGPL-2.1-or-later"]
livecheck do
skip "glibc is pinned to the version present in Homebrew CI"
end
depends_on "binutils" => :build
depends_on GawkRequirement => :build
depends_on "linux-headers" => :build
depends_on MakeRequirement => :build
depends_on SedRequirement => :build
depends_on BrewedGlibcNotOlderRequirement
depends_on :linux
depends_on LinuxKernelRequirement
# GCC 4.7 or later is required.
fails_with gcc: "4.6"
def install
# Fix Error: `loc1@GLIBC_2.2.5' can't be versioned to common symbol 'loc1'
# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=
# Fixed in glibc 2.24
inreplace "misc/regexp.c", /^(char \*loc[12s]);$/, "\\1 __attribute__ ((nocommon));"
# Setting RPATH breaks glibc.
%w[
LDFLAGS LD_LIBRARY_PATH LD_RUN_PATH LIBRARY_PATH
HOMEBREW_DYNAMIC_LINKER HOMEBREW_LIBRARY_PATHS HOMEBREW_RPATH_PATHS
].each { |x| ENV.delete x }
# Use brewed ld.so.preload rather than the hotst's /etc/ld.so.preload
inreplace "elf/rtld.c", '= "/etc/ld.so.preload";', '= SYSCONFDIR "/ld.so.preload";'
mkdir "build" do
args = [
"--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-obsolete-rpc",
"--without-selinux",
"--with-binutils=#{Formula["binutils"].bin}",
"--with-headers=#{Formula["linux-headers"].include}",
]
system "../configure", *args
system "make", "all"
system "make", "install"
prefix.install_symlink "lib" => "lib64"
end
# Install ld.so symlink.
ln_sf lib/"ld-linux-x86-64.so.2", HOMEBREW_PREFIX/"lib/ld.so"
end
def post_install
# Compile locale definition files
mkdir_p lib/"locale"
# Get all extra installed locales from the system, except C locales
locales = ENV.map do |k, v|
v if k[/^LANG$|^LC_/] && v != "C" && !v.start_with?("C.")
end.compact
# en_US.UTF-8 is required by gawk make check
locales = (locales + ["en_US.UTF-8"]).sort.uniq
ohai "Installing locale data for #{locales.join(" ")}"
locales.each do |locale|
lang, charmap = locale.split(".", 2)
if charmap.present?
charmap = "UTF-8" if charmap == "utf8"
system bin/"localedef", "-i", lang, "-f", charmap, locale
else
system bin/"localedef", "-i", lang, locale
end
end
# Set the local time zone
sys_localtime = Pathname("/etc/localtime")
brew_localtime = prefix/"etc/localtime"
(prefix/"etc").install_symlink sys_localtime if sys_localtime.exist? && brew_localtime.exist?
# Set zoneinfo correctly using the system installed zoneinfo
sys_zoneinfo = Pathname("/usr/share/zoneinfo")
brew_zoneinfo = share/"zoneinfo"
share.install_symlink sys_zoneinfo if sys_zoneinfo.exist? && !brew_zoneinfo.exist?
end
test do
assert_match "Usage", shell_output("#{lib}/ld-#{version}.so 2>&1", 127)
safe_system "#{lib}/libc-#{version}.so", "--version"
safe_system "#{bin}/locale", "--version"
end
end
glibc: create symlink to ld.so in post install (#78299)
require "os/linux/glibc"
class BrewedGlibcNotOlderRequirement < Requirement
fatal true
satisfy(build_env: false) do
Glibc.version >= OS::Linux::Glibc.system_version
end
def message
<<~EOS
Your system's glibc version is #{OS::Linux::Glibc.system_version}, and Homebrew's glibc version is #{Glibc.version}.
Installing a version of glibc that is older than your system's can break formulae installed from source.
EOS
end
def display_s
"System glibc < #{Glibc.version}"
end
end
class GlibcBaseRequirement < Requirement
def message
tool = self.class::TOOL
version = self.class::VERSION
<<~EOS
#{[tool, version].compact.join(" ")} is required to build glibc.
Install #{tool} with your host package manager if you have sudo access:
sudo apt-get install #{tool}
sudo yum install #{tool}
EOS
end
def display_s
"#{self.class::TOOL} #{self.class::VERSION}".strip
end
end
class GawkRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "gawk".freeze
VERSION = "3.1.2 (or later)".freeze
end
class MakeRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "make".freeze
VERSION = "3.79 (or later)".freeze
end
class SedRequirement < GlibcBaseRequirement
fatal true
satisfy(build_env: false) { which(TOOL).present? }
TOOL = "sed".freeze
VERSION = nil
end
class LinuxKernelRequirement < Requirement
fatal true
MINIMUM_LINUX_KERNEL_VERSION = "2.6.32".freeze
satisfy(build_env: false) do
OS.kernel_version >= MINIMUM_LINUX_KERNEL_VERSION
end
def message
<<~EOS
Linux kernel version #{MINIMUM_LINUX_KERNEL_VERSION} or later is required by glibc.
Your system has Linux kernel version #{OS.kernel_version}.
EOS
end
def display_s
"Linux kernel #{MINIMUM_LINUX_KERNEL_VERSION} (or later)"
end
end
class Glibc < Formula
desc "GNU C Library"
homepage "https://www.gnu.org/software/libc/"
url "https://ftp.gnu.org/gnu/glibc/glibc-2.23.tar.gz"
sha256 "2bd08abb24811cda62e17e61e9972f091f02a697df550e2e44ddcfb2255269d2"
license all_of: ["GPL-2.0-or-later", "LGPL-2.1-or-later"]
livecheck do
skip "glibc is pinned to the version present in Homebrew CI"
end
depends_on "binutils" => :build
depends_on GawkRequirement => :build
depends_on "linux-headers" => :build
depends_on MakeRequirement => :build
depends_on SedRequirement => :build
depends_on BrewedGlibcNotOlderRequirement
depends_on :linux
depends_on LinuxKernelRequirement
# GCC 4.7 or later is required.
fails_with gcc: "4.6"
def install
# Fix Error: `loc1@GLIBC_2.2.5' can't be versioned to common symbol 'loc1'
# See https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=
# Fixed in glibc 2.24
inreplace "misc/regexp.c", /^(char \*loc[12s]);$/, "\\1 __attribute__ ((nocommon));"
# Setting RPATH breaks glibc.
%w[
LDFLAGS LD_LIBRARY_PATH LD_RUN_PATH LIBRARY_PATH
HOMEBREW_DYNAMIC_LINKER HOMEBREW_LIBRARY_PATHS HOMEBREW_RPATH_PATHS
].each { |x| ENV.delete x }
# Use brewed ld.so.preload rather than the hotst's /etc/ld.so.preload
inreplace "elf/rtld.c", '= "/etc/ld.so.preload";', '= SYSCONFDIR "/ld.so.preload";'
mkdir "build" do
args = [
"--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-obsolete-rpc",
"--without-selinux",
"--with-binutils=#{Formula["binutils"].bin}",
"--with-headers=#{Formula["linux-headers"].include}",
]
system "../configure", *args
system "make", "all"
system "make", "install"
prefix.install_symlink "lib" => "lib64"
end
end
def post_install
# Install ld.so symlink.
ln_sf lib/"ld-linux-x86-64.so.2", HOMEBREW_PREFIX/"lib/ld.so"
# Compile locale definition files
mkdir_p lib/"locale"
# Get all extra installed locales from the system, except C locales
locales = ENV.map do |k, v|
v if k[/^LANG$|^LC_/] && v != "C" && !v.start_with?("C.")
end.compact
# en_US.UTF-8 is required by gawk make check
locales = (locales + ["en_US.UTF-8"]).sort.uniq
ohai "Installing locale data for #{locales.join(" ")}"
locales.each do |locale|
lang, charmap = locale.split(".", 2)
if charmap.present?
charmap = "UTF-8" if charmap == "utf8"
system bin/"localedef", "-i", lang, "-f", charmap, locale
else
system bin/"localedef", "-i", lang, locale
end
end
# Set the local time zone
sys_localtime = Pathname("/etc/localtime")
brew_localtime = prefix/"etc/localtime"
(prefix/"etc").install_symlink sys_localtime if sys_localtime.exist? && brew_localtime.exist?
# Set zoneinfo correctly using the system installed zoneinfo
sys_zoneinfo = Pathname("/usr/share/zoneinfo")
brew_zoneinfo = share/"zoneinfo"
share.install_symlink sys_zoneinfo if sys_zoneinfo.exist? && !brew_zoneinfo.exist?
end
test do
assert_match "Usage", shell_output("#{lib}/ld-#{version}.so 2>&1", 127)
safe_system "#{lib}/libc-#{version}.so", "--version"
safe_system "#{bin}/locale", "--version"
end
end
|
class GmtAT4 < Formula
desc "Manipulation of geographic and Cartesian data sets"
homepage "https://gmt.soest.hawaii.edu/"
url "ftp://ftp.soest.hawaii.edu/gmt/gmt-4.5.18-src.tar.bz2"
mirror "https://fossies.org/linux/misc/GMT/gmt-4.5.18-src.tar.bz2"
mirror "https://mirrors.ustc.edu.cn/gmt/gmt-4.5.18-src.tar.bz2"
sha256 "27c30b516c317fed8e44efa84a0262f866521d80cfe76a61bf12952efb522b63"
revision 1
bottle do
sha256 "ed486f18b1de6cbf1470cbe9a3a3800628ed8ee02fd1e773afdd4d6e48664e27" => :high_sierra
sha256 "00131f50e07e8d62408b4a5deeb15f01f8f4d58227f59092cc00fe3aa64c31c7" => :sierra
sha256 "2365d6c8e21218609bcd0dbd9974b314f777f2214fb5d67df3a075929964f2ae" => :el_capitan
end
keg_only :versioned_formula
depends_on "gdal"
depends_on "netcdf"
resource "gshhg" do
url "ftp://ftp.soest.hawaii.edu/gmt/gshhg-gmt-2.3.7.tar.gz"
mirror "https://fossies.org/linux/misc/GMT/gshhg-gmt-2.3.7.tar.gz"
mirror "https://mirrors.ustc.edu.cn/gmt/gshhg-gmt-2.3.7.tar.gz"
sha256 "9bb1a956fca0718c083bef842e625797535a00ce81f175df08b042c2a92cfe7f"
end
def install
ENV.deparallelize # Parallel builds don't work due to missing makefile dependencies
system "./configure", "--prefix=#{prefix}",
"--datadir=#{share}/gmt4",
"--enable-gdal=#{Formula["gdal"].opt_prefix}",
"--enable-netcdf=#{Formula["netcdf"].opt_prefix}",
"--enable-shared",
"--enable-triangle",
"--disable-xgrid",
"--disable-mex"
system "make"
system "make", "install-gmt", "install-data", "install-suppl", "install-man"
(share/"gmt4").install resource("gshhg")
end
test do
system "#{bin}/gmt pscoast -R-90/-70/0/20 -JM6i -P -Ba5 -Gchocolate > test.ps"
assert_predicate testpath/"test.ps", :exist?
end
end
gmt@4: update 4.5.18_1 bottle.
class GmtAT4 < Formula
desc "Manipulation of geographic and Cartesian data sets"
homepage "https://gmt.soest.hawaii.edu/"
url "ftp://ftp.soest.hawaii.edu/gmt/gmt-4.5.18-src.tar.bz2"
mirror "https://fossies.org/linux/misc/GMT/gmt-4.5.18-src.tar.bz2"
mirror "https://mirrors.ustc.edu.cn/gmt/gmt-4.5.18-src.tar.bz2"
sha256 "27c30b516c317fed8e44efa84a0262f866521d80cfe76a61bf12952efb522b63"
revision 1
bottle do
sha256 "bf936092777345ea6977e2e7c2856bee1226d7a1a29f17c9ad1a9d47e87d27c7" => :mojave
sha256 "ed486f18b1de6cbf1470cbe9a3a3800628ed8ee02fd1e773afdd4d6e48664e27" => :high_sierra
sha256 "00131f50e07e8d62408b4a5deeb15f01f8f4d58227f59092cc00fe3aa64c31c7" => :sierra
sha256 "2365d6c8e21218609bcd0dbd9974b314f777f2214fb5d67df3a075929964f2ae" => :el_capitan
end
keg_only :versioned_formula
depends_on "gdal"
depends_on "netcdf"
resource "gshhg" do
url "ftp://ftp.soest.hawaii.edu/gmt/gshhg-gmt-2.3.7.tar.gz"
mirror "https://fossies.org/linux/misc/GMT/gshhg-gmt-2.3.7.tar.gz"
mirror "https://mirrors.ustc.edu.cn/gmt/gshhg-gmt-2.3.7.tar.gz"
sha256 "9bb1a956fca0718c083bef842e625797535a00ce81f175df08b042c2a92cfe7f"
end
def install
ENV.deparallelize # Parallel builds don't work due to missing makefile dependencies
system "./configure", "--prefix=#{prefix}",
"--datadir=#{share}/gmt4",
"--enable-gdal=#{Formula["gdal"].opt_prefix}",
"--enable-netcdf=#{Formula["netcdf"].opt_prefix}",
"--enable-shared",
"--enable-triangle",
"--disable-xgrid",
"--disable-mex"
system "make"
system "make", "install-gmt", "install-data", "install-suppl", "install-man"
(share/"gmt4").install resource("gshhg")
end
test do
system "#{bin}/gmt pscoast -R-90/-70/0/20 -JM6i -P -Ba5 -Gchocolate > test.ps"
assert_predicate testpath/"test.ps", :exist?
end
end
|
require 'rubygems'
require 'rake'
# Note that Haml's gem-compilation process requires access to the filesystem.
# This means that it cannot be automatically run by e.g. GitHub's gem system.
# However, a build server automatically packages the master branch
# every time it's pushed to; this is made available as the haml-edge gem.
HAML_GEMSPEC = Gem::Specification.new do |spec|
spec.rubyforge_project = 'haml'
spec.name = File.exist?('EDGE_GEM_VERSION') ? 'haml-edge' : 'haml'
spec.summary = "An elegant, structured XHTML/XML templating engine.\nComes with Sass, a similar CSS templating engine."
spec.version = File.read('VERSION').strip
spec.authors = ['Nathan Weizenbaum', 'Hampton Catlin']
spec.email = 'haml@googlegroups.com'
spec.description = <<-END
Haml (HTML Abstraction Markup Language) is a layer on top of XHTML or XML
that's designed to express the structure of XHTML or XML documents
in a non-repetitive, elegant, easy way,
using indentation rather than closing tags
and allowing Ruby to be embedded with ease.
It was originally envisioned as a plugin for Ruby on Rails,
but it can function as a stand-alone templating engine.
END
spec.add_development_dependency 'yard', '>= 0.5.3'
spec.add_development_dependency 'maruku', '>= 0.5.9'
# We need the revision file to exist,
# so we just create it if it doesn't.
# It'll usually just get overwritten, though.
File.open('REVISION', 'w') { |f| f.puts "(unknown)" } unless File.exist?('REVISION')
readmes = FileList.new('*') do |list|
list.exclude(/(^|[^.a-z])[a-z]+/)
list.exclude('TODO')
list.include('REVISION')
end.to_a
spec.executables = ['haml', 'html2haml', 'sass', 'css2sass']
spec.files = FileList['rails/init.rb', 'lib/**/*', 'bin/*', 'test/**/*',
'extra/**/*', 'Rakefile', 'init.rb', '.yardopts'].to_a + readmes
spec.homepage = 'http://haml-lang.com/'
spec.has_rdoc = true
spec.extra_rdoc_files = readmes
spec.rdoc_options += [
'--title', 'Haml',
'--main', 'README.rdoc',
'--exclude', 'lib/haml/buffer.rb',
'--line-numbers',
'--inline-source'
]
spec.test_files = FileList['test/**/*_test.rb'].to_a
end
[Sass] Include FSSM in the gemspec.
require 'rubygems'
require 'rake'
# Note that Haml's gem-compilation process requires access to the filesystem.
# This means that it cannot be automatically run by e.g. GitHub's gem system.
# However, a build server automatically packages the master branch
# every time it's pushed to; this is made available as the haml-edge gem.
HAML_GEMSPEC = Gem::Specification.new do |spec|
spec.rubyforge_project = 'haml'
spec.name = File.exist?('EDGE_GEM_VERSION') ? 'haml-edge' : 'haml'
spec.summary = "An elegant, structured XHTML/XML templating engine.\nComes with Sass, a similar CSS templating engine."
spec.version = File.read('VERSION').strip
spec.authors = ['Nathan Weizenbaum', 'Hampton Catlin']
spec.email = 'haml@googlegroups.com'
spec.description = <<-END
Haml (HTML Abstraction Markup Language) is a layer on top of XHTML or XML
that's designed to express the structure of XHTML or XML documents
in a non-repetitive, elegant, easy way,
using indentation rather than closing tags
and allowing Ruby to be embedded with ease.
It was originally envisioned as a plugin for Ruby on Rails,
but it can function as a stand-alone templating engine.
END
spec.add_development_dependency 'yard', '>= 0.5.3'
spec.add_development_dependency 'maruku', '>= 0.5.9'
# We need the revision file to exist,
# so we just create it if it doesn't.
# It'll usually just get overwritten, though.
File.open('REVISION', 'w') { |f| f.puts "(unknown)" } unless File.exist?('REVISION')
readmes = FileList.new('*') do |list|
list.exclude(/(^|[^.a-z])[a-z]+/)
list.exclude('TODO')
list.include('REVISION')
end.to_a
spec.executables = ['haml', 'html2haml', 'sass', 'css2sass']
spec.files = FileList['rails/init.rb', 'lib/**/*', 'vendor/**/*',
'bin/*', 'test/**/*', 'extra/**/*', 'Rakefile', 'init.rb',
'.yardopts'].to_a + readmes
spec.homepage = 'http://haml-lang.com/'
spec.has_rdoc = true
spec.extra_rdoc_files = readmes
spec.rdoc_options += [
'--title', 'Haml',
'--main', 'README.rdoc',
'--exclude', 'lib/haml/buffer.rb',
'--line-numbers',
'--inline-source'
]
spec.test_files = FileList['test/**/*_test.rb'].to_a
end
|
class Gobby < Formula
desc "Multi-platform collaborative text editor"
homepage "http://gobby.0x539.de"
url "http://releases.0x539.de/gobby/gobby-0.5.0.tar.gz"
sha256 "8ceb3598d27cfccdf9c9889b781c4c5c8e1731ca6beb183f5d4555644c06bd98"
revision 3
head "git://git.0x539.de/git/gobby.git"
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gtkmm3"
depends_on "gsasl"
depends_on "gnutls"
depends_on "libxml++"
depends_on "gtksourceview3"
depends_on "gettext"
depends_on "hicolor-icon-theme"
depends_on "libinfinity"
needs :cxx11
# Necessary to remove mandatory gtk-mac-integration
# it's badly broken as it depends on an ancient version of ige-mac-integration
# since it depends on gtk3, it doesn't even need gtk-mac-integration anymore
# This has already been fixed upstream: gtk2 support has been dropped completely
# and all traces of ige-mac-integration have been removed from the code
patch :DATA
def install
ENV.cxx11
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--with-gtk3"
system "make", "install"
end
test do
# executable (GUI)
system bin/"gobby-0.5", "--version"
end
end
__END__
diff --git a/code/core/header.cpp b/code/core/header.cpp
index eba2fb1..510608e 100644
--- a/code/core/header.cpp
+++ b/code/core/header.cpp
@@ -24,10 +24,6 @@
#include <gtksourceview/gtksourcelanguage.h>
#include <gdk/gdkkeysyms.h>
-#ifdef PLATFORM_OSX_NATIVE
-#include <ige-mac-menu.h>
-#endif
-
namespace {
Glib::ustring ui_desc =
"<ui>"
@@ -519,28 +515,7 @@ Gobby::Header::Header(Preferences& preferences,
);
}
-#ifdef PLATFORM_OSX_NATIVE
- ige_mac_menu_set_menu_bar(GTK_MENU_SHELL(m_menubar->gobj()));
-
- ige_mac_menu_set_quit_menu_item(GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuFile/FileQuit")->gobj()));
-
- ige_mac_menu_add_app_menu_item(
- ige_mac_menu_add_app_menu_group(), GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuHelp/HelpAbout")->gobj()),
- NULL);
-
- ige_mac_menu_add_app_menu_item(
- ige_mac_menu_add_app_menu_group(), GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuEdit/EditPreferences")
- ->gobj()),
- NULL);
-#else
pack_start(*m_menubar, Gtk::PACK_SHRINK);
-#endif
pack_start(*m_toolbar, Gtk::PACK_SHRINK);
m_toolbar->set_toolbar_style(preferences.appearance.toolbar_style);
diff --git a/configure b/configure
index 7dabb26..0987444 100755
--- a/configure
+++ b/configure
@@ -4955,7 +4955,7 @@ $as_echo "#define PLATFORM_OSX 1" >>confdefs.h
$as_echo "#define PLATFORM_OSX_NATIVE 1" >>confdefs.h
- required_libs="$required_libs ige-mac-integration"
+ required_libs="$required_libs"
fi
fi
cat >confcache <<\_ACEOF
gobby: add 0.5.0_3 bottle.
class Gobby < Formula
desc "Multi-platform collaborative text editor"
homepage "http://gobby.0x539.de"
url "http://releases.0x539.de/gobby/gobby-0.5.0.tar.gz"
sha256 "8ceb3598d27cfccdf9c9889b781c4c5c8e1731ca6beb183f5d4555644c06bd98"
revision 3
head "git://git.0x539.de/git/gobby.git"
bottle do
sha256 "78045e763ef4add614e757f3c22b4bdd6266930550b80850ac3967955eb0a24e" => :sierra
sha256 "21e2f4df3800d200209ab46be26e30a29579a93395cb2e8b385c23d41906f12e" => :el_capitan
sha256 "dfb1e48d8d013fa4f969d2617b816d02bc019fe396d3d789634a146374faf10f" => :yosemite
end
depends_on "pkg-config" => :build
depends_on "intltool" => :build
depends_on "gtkmm3"
depends_on "gsasl"
depends_on "gnutls"
depends_on "libxml++"
depends_on "gtksourceview3"
depends_on "gettext"
depends_on "hicolor-icon-theme"
depends_on "libinfinity"
needs :cxx11
# Necessary to remove mandatory gtk-mac-integration
# it's badly broken as it depends on an ancient version of ige-mac-integration
# since it depends on gtk3, it doesn't even need gtk-mac-integration anymore
# This has already been fixed upstream: gtk2 support has been dropped completely
# and all traces of ige-mac-integration have been removed from the code
patch :DATA
def install
ENV.cxx11
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--with-gtk3"
system "make", "install"
end
test do
# executable (GUI)
system bin/"gobby-0.5", "--version"
end
end
__END__
diff --git a/code/core/header.cpp b/code/core/header.cpp
index eba2fb1..510608e 100644
--- a/code/core/header.cpp
+++ b/code/core/header.cpp
@@ -24,10 +24,6 @@
#include <gtksourceview/gtksourcelanguage.h>
#include <gdk/gdkkeysyms.h>
-#ifdef PLATFORM_OSX_NATIVE
-#include <ige-mac-menu.h>
-#endif
-
namespace {
Glib::ustring ui_desc =
"<ui>"
@@ -519,28 +515,7 @@ Gobby::Header::Header(Preferences& preferences,
);
}
-#ifdef PLATFORM_OSX_NATIVE
- ige_mac_menu_set_menu_bar(GTK_MENU_SHELL(m_menubar->gobj()));
-
- ige_mac_menu_set_quit_menu_item(GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuFile/FileQuit")->gobj()));
-
- ige_mac_menu_add_app_menu_item(
- ige_mac_menu_add_app_menu_group(), GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuHelp/HelpAbout")->gobj()),
- NULL);
-
- ige_mac_menu_add_app_menu_item(
- ige_mac_menu_add_app_menu_group(), GTK_MENU_ITEM(
- m_ui_manager->get_widget(
- "/MenuMainBar/MenuEdit/EditPreferences")
- ->gobj()),
- NULL);
-#else
pack_start(*m_menubar, Gtk::PACK_SHRINK);
-#endif
pack_start(*m_toolbar, Gtk::PACK_SHRINK);
m_toolbar->set_toolbar_style(preferences.appearance.toolbar_style);
diff --git a/configure b/configure
index 7dabb26..0987444 100755
--- a/configure
+++ b/configure
@@ -4955,7 +4955,7 @@ $as_echo "#define PLATFORM_OSX 1" >>confdefs.h
$as_echo "#define PLATFORM_OSX_NATIVE 1" >>confdefs.h
- required_libs="$required_libs ige-mac-integration"
+ required_libs="$required_libs"
fi
fi
cat >confcache <<\_ACEOF
|
require 'formula'
class Gpsim < Formula
homepage 'http://gpsim.sourceforge.net/'
url 'http://sourceforge.net/projects/gpsim/files/gpsim/0.25.0/gpsim-0.25.0.tar.gz'
sha1 'bff4122ad29adbd64c6ee37159698dfd0d6ca503'
depends_on 'pkg-config' => :build
depends_on 'popt'
depends_on 'glib'
def install
system "./configure", "--disable-dependency-tracking",
"--disable-gui",
"--disable-shared",
"--prefix=#{prefix}"
system "make all"
system "make install"
end
end
gpsim 0.26.1
Closes Homebrew/homebrew#19612.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Gpsim < Formula
homepage 'http://gpsim.sourceforge.net/'
url 'http://sourceforge.net/projects/gpsim/files/gpsim/0.26.0/gpsim-0.26.1.tar.gz'
sha1 '7e1c3cc5a821b3458717a94a09bc484bf6937b25'
head 'svn://svn.code.sf.net/p/gpsim/code/trunk'
depends_on 'pkg-config' => :build
depends_on 'popt'
depends_on 'glib'
# Patch is upstream; test if it is needed in next release
def patches
DATA
end
def install
system "./configure", "--disable-gui",
"--disable-shared",
"--prefix=#{prefix}"
system "make all"
system "make install"
end
end
__END__
diff -Naur gpsim-0.26.1/configure.ac gpsim-0.26.1-patch/configure.ac
--- gpsim-0.26.1/configure.ac 2011-04-26 07:13:37.000000000 -0300
+++ gpsim-0.26.1-patch/configure.ac 2013-04-23 10:42:52.000000000 -0300
@@ -79,15 +79,20 @@
else
dnl gtk2 checks
- $PKGCONFIG --exists gtkextra-2.0
+ GTKEXTRAMOD="gtkextra-2.0"
+ $PKGCONFIG --exists $GTKEXTRAMOD
if test $? != 0; then
- AC_MSG_ERROR(Cannot find gtkextra-2.0 package)
+ GTKEXTRAMOD="gtkextra-3.0"
+ $PKGCONFIG --exists $GTKEXTRAMOD
+ if test $? != 0; then
+ AC_MSG_ERROR(Cannot find gtkextra-2.0 or gtkextra-3.0 package)
+ fi
fi
X_LDFLAGS=`$PKGCONFIG --libs gtk+-2.0 gthread-2.0`
X_CFLAGS=`$PKGCONFIG --cflags gtk+-2.0`
- Y_LDFLAGS=`$PKGCONFIG --libs gtkextra-2.0`
- Y_CFLAGS=`$PKGCONFIG --cflags gtkextra-2.0`
+ Y_LDFLAGS=`$PKGCONFIG --libs $GTKEXTRAMOD`
+ Y_CFLAGS=`$PKGCONFIG --cflags $GTKEXTRAMOD`
GTK_VERSION_T=`$PKGCONFIG --modversion gtk+-2.0`
echo linking with gtk-$GTK_VERSION_T
AC_DEFINE_UNQUOTED([GTK_VERSION],"$GTK_VERSION_T",[gtk version])
diff -Naur gpsim-0.26.1/src/bitlog.h gpsim-0.26.1-patch/src/bitlog.h
--- gpsim-0.26.1/src/bitlog.h 2010-06-05 03:46:30.000000000 -0300
+++ gpsim-0.26.1-patch/src/bitlog.h 2013-04-23 10:37:09.000000000 -0300
@@ -25,7 +25,7 @@
// include the absolute minimum portion of GLIB to get the definitions
// for guint64, etc.
-#include <glibconfig.h>
+#include <glib.h>
/**********************************************************************
* boolean event logging
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.67'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
adding some routes
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.68'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
|
require 'formula'
class GradsSupplementary < Formula
url 'ftp://cola.gmu.edu/grads/data2.tar.gz'
sha1 'e1cd5f9c4fe8d6ed344a29ee00413aeb6323b7cd'
end
class Grads < Formula
homepage 'http://www.iges.org/grads/grads.html'
url 'ftp://cola.gmu.edu/grads/2.0/grads-2.0.2-bin-darwin9.8-intel.tar.gz'
sha1 '0d42581c614ae627f4b53113c16c0695bd233362'
def install
rm 'bin/INSTALL'
prefix.install 'bin/COPYRIGHT'
prefix.install 'bin'
# Install the required supplementary files
GradsSupplementary.new.brew{ (lib+'grads').install Dir['*'] }
end
def caveats
if HOMEBREW_PREFIX.to_s != '/usr/local' then <<-EOS.undent
In order to use the GrADS tools, you will need to set the GADDIR
environment variable to:
#{HOMEBREW_PREFIX}/lib/grads
EOS
end
end
end
grads: point to documentation in caveats
require 'formula'
class GradsSupplementary < Formula
url 'ftp://cola.gmu.edu/grads/data2.tar.gz'
sha1 'e1cd5f9c4fe8d6ed344a29ee00413aeb6323b7cd'
end
class Grads < Formula
homepage 'http://www.iges.org/grads/grads.html'
url 'ftp://cola.gmu.edu/grads/2.0/grads-2.0.2-bin-darwin9.8-intel.tar.gz'
sha1 '0d42581c614ae627f4b53113c16c0695bd233362'
def install
rm 'bin/INSTALL'
prefix.install 'bin/COPYRIGHT'
prefix.install 'bin'
# Install the required supplementary files
GradsSupplementary.new.brew{ (lib+'grads').install Dir['*'] }
end
def caveats; <<-EOS.undent
In order to use the GrADS tools, you may need to set some environmental
variables. See the documentation at:
http://www.iges.org/grads/gadoc/gradcomdgrads.html
EOS
end
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.93'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
fixed revision problems
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.94'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
|
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap-2.8/gsoap_2.8.104.zip"
sha256 "60fcd137c59a7640470f873d9a0a29c62896a15e8517a2f5a03eb2d7eebc0c52"
bottle do
sha256 "6721742878f94c04756d18ca5d4555d43cfbae6d33add3a1afb4c0f20c0c3684" => :catalina
sha256 "a6c9a0b6057e9117b678a0f9232d33f88770cf838365fbdbc628b7d9fc92b8bb" => :mojave
sha256 "0baeffa7065bfafc8572ff3bb23cd85cf82b49aa1fe9f25f3b02171d1f1100dc" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "openssl@1.1"
uses_from_macos "bison"
uses_from_macos "flex"
uses_from_macos "zlib"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
gsoap: update 2.8.104 bottle.
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap-2.8/gsoap_2.8.104.zip"
sha256 "60fcd137c59a7640470f873d9a0a29c62896a15e8517a2f5a03eb2d7eebc0c52"
bottle do
sha256 "a180cea83603a954af06249e89902c1ce1870a2a173ca5fcb38256dd1f166699" => :catalina
sha256 "8d76726a053764e6469eb396a5dc841b0652bdf7a0d0162eb0f1478d30602da0" => :mojave
sha256 "5cba55bdc1815a5967bd20ffb945a83daff49c06baf99786c57bac21d27486ad" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "openssl@1.1"
uses_from_macos "bison"
uses_from_macos "flex"
uses_from_macos "zlib"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.13'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
version bump
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.16'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
|
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap-2.8/gsoap_2.8.69.zip"
sha256 "df0cc9ab66dce85f1842ca07abebaf9b04ca48a0a39013f90571e8172d4b0c7b"
bottle do
sha256 "e065cba0eadc2b19e3d539a2cbd76f0dda1c9a00c28d37d0fcaf3a13b3903345" => :mojave
sha256 "c5969928177671d714dca7317f4bc8688d86f14bd6c0b8821576dd127e1065e2" => :high_sierra
sha256 "a70c67df1b4d6ab270deb5d202fc035f8879db31262d2c65767dd4de2589f7e8" => :sierra
sha256 "66189df3d92cdb93a80ae564be571e2e450ce9600bf3ecfe3bf5a54be776710b" => :el_capitan
end
depends_on "openssl"
def install
# Contacted upstream by email and been told this should be fixed by 2.8.37,
# it is due to the compilation of symbol2.c and soapcpp2_yacc.h not being
# ordered correctly in parallel.
ENV.deparallelize
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
gsoap 2.8.70
Closes #31647.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap-2.8/gsoap_2.8.70.zip"
sha256 "5b6933394ae1c76faa9a4814c44f74fc8aeef521b57f18d62ae952ecf38d0edd"
bottle do
sha256 "e065cba0eadc2b19e3d539a2cbd76f0dda1c9a00c28d37d0fcaf3a13b3903345" => :mojave
sha256 "c5969928177671d714dca7317f4bc8688d86f14bd6c0b8821576dd127e1065e2" => :high_sierra
sha256 "a70c67df1b4d6ab270deb5d202fc035f8879db31262d2c65767dd4de2589f7e8" => :sierra
sha256 "66189df3d92cdb93a80ae564be571e2e450ce9600bf3ecfe3bf5a54be776710b" => :el_capitan
end
depends_on "openssl"
def install
# Contacted upstream by email and been told this should be fixed by 2.8.37,
# it is due to the compilation of symbol2.c and soapcpp2_yacc.h not being
# ordered correctly in parallel.
ENV.deparallelize
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
|
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.56'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
version bump
s Please enter the commit message for your changes. Lines starting
# encoding: UTF-8
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'spree_designer_dashboard'
s.version = '1.2.57'
s.summary = 'Designer Dashboard Functionality for Scout & Nimble'
s.description = ''
s.required_ruby_version = '>= 1.9.3'
s.author = 'Jamie Stephens'
s.email = 'jamie@blueroot.com'
# s.homepage = 'http://www.spreecommerce.com'
#s.files = `git ls-files`.split("\n")
#s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.require_path = 'lib'
s.requirements << 'none'
#s.add_dependency 'spree_core', '~> 2.1.0.beta'
#s.add_dependency 'spree_core', '~> 2.0.3'
s.add_development_dependency 'capybara', '1.0.1'
s.add_development_dependency 'factory_girl', '~> 2.6.4'
s.add_development_dependency 'ffaker'
s.add_development_dependency 'rspec-rails', '~> 2.9'
s.add_development_dependency 'sqlite3'
end
|
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap_2.8.117.zip"
sha256 "7cadf8808cfd982629948fe09e4fa6cd18e23cafd40df0aaaff1b1f5b695c442"
license any_of: ["GPL-2.0-or-later", "gSOAP-1.3b"]
livecheck do
url :stable
regex(%r{url=.*?/gsoap[._-]v?(\d+(?:\.\d+)+)\.zip}i)
end
bottle do
sha256 arm64_monterey: "64a4384227bd706458260fcd39352c1065bed4458aafe88bee947cd79bc0d632"
sha256 arm64_big_sur: "fb074963567f15e35317ba190540835a9701746ba9a590002415ae6dc4083d96"
sha256 monterey: "2d615817bed23a54c53aee8b9efef6629f1b7980305d5e3580873db125968607"
sha256 big_sur: "84f24f86809047fdd972a9d192514b0d9ab08a27821ce07a3e60f33a44697459"
sha256 catalina: "6bee22cc8b7c43f3d41b51bd75c50cf0f444feed51d9bf5a4f7dd288d662a42d"
sha256 mojave: "d90706cf140e9b273d56007f6e56db0933686d50afae9a9703b862af3bce5b55"
sha256 x86_64_linux: "2ecb0911a0996e51f00d7e35a3729865bc19eeec3aaf5189ce54ee7e18c5d5a5"
end
depends_on "autoconf" => :build
depends_on "openssl@1.1"
uses_from_macos "bison"
uses_from_macos "flex"
uses_from_macos "zlib"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
gsoap 2.8.118
Closes #92030.
Signed-off-by: Thierry Moisan <8bf87a6c4caed0437859f8c8fafc6782533e4540@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Gsoap < Formula
desc "SOAP stub and skeleton compiler for C and C++"
homepage "https://www.genivia.com/products.html"
url "https://downloads.sourceforge.net/project/gsoap2/gsoap_2.8.118.zip"
sha256 "f233f06a307a664ab1ecafd8f6d87b9c905a98c497c9f5da9083790e5b921f50"
license any_of: ["GPL-2.0-or-later", "gSOAP-1.3b"]
livecheck do
url :stable
regex(%r{url=.*?/gsoap[._-]v?(\d+(?:\.\d+)+)\.zip}i)
end
bottle do
sha256 arm64_monterey: "64a4384227bd706458260fcd39352c1065bed4458aafe88bee947cd79bc0d632"
sha256 arm64_big_sur: "fb074963567f15e35317ba190540835a9701746ba9a590002415ae6dc4083d96"
sha256 monterey: "2d615817bed23a54c53aee8b9efef6629f1b7980305d5e3580873db125968607"
sha256 big_sur: "84f24f86809047fdd972a9d192514b0d9ab08a27821ce07a3e60f33a44697459"
sha256 catalina: "6bee22cc8b7c43f3d41b51bd75c50cf0f444feed51d9bf5a4f7dd288d662a42d"
sha256 mojave: "d90706cf140e9b273d56007f6e56db0933686d50afae9a9703b862af3bce5b55"
sha256 x86_64_linux: "2ecb0911a0996e51f00d7e35a3729865bc19eeec3aaf5189ce54ee7e18c5d5a5"
end
depends_on "autoconf" => :build
depends_on "openssl@1.1"
uses_from_macos "bison"
uses_from_macos "flex"
uses_from_macos "zlib"
def install
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system "#{bin}/wsdl2h", "-o", "calc.h", "https://www.genivia.com/calc.wsdl"
system "#{bin}/soapcpp2", "calc.h"
assert_predicate testpath/"calc.add.req.xml", :exist?
end
end
|
class Gssdp < Formula
desc "GUPnP library for resource discovery and announcement over SSDP"
homepage "https://wiki.gnome.org/GUPnP/"
url "https://download.gnome.org/sources/gssdp/1.2/gssdp-1.2.0.tar.xz"
sha256 "22cbef547f522f0b062933e302482ebdb397e2f3703899757562ddffbbfd00d1"
bottle do
sha256 "ba0e685fdd43e1a7077acc89f426094cbac4bbd6ef8a7c9a41c019d51edb48a6" => :mojave
sha256 "f8ffc61329914be5de59373b9d2aacc7a762bc902730e0202ddf82f1c6cf6186" => :high_sierra
sha256 "e5e69427ba9125550e2e07d0ce98c02311d9cf21535244b5ea78bd01e8494271" => :sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "glib"
depends_on "libsoup"
# to be removed when next release is out
patch do
url "https://gitlab.gnome.org/GNOME/gssdp/commit/3b085a7e2c94119519d848c4f4f1434bbea3d937.patch"
sha256 "7d9b36c81bbbeca390c417f86e5e287c0ba350350928ec37617b8182db548f9c"
end
# submitted upstream as https://gitlab.gnome.org/GNOME/gssdp/merge_requests/2
patch :DATA
def install
mkdir "build" do
system "meson", "--prefix=#{prefix}", "-Dsniffer=false", ".."
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <libgssdp/gssdp.h>
int main(int argc, char *argv[]) {
GType type = gssdp_client_get_type();
return 0;
}
EOS
gettext = Formula["gettext"]
glib = Formula["glib"]
flags = %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gssdp-1.2
-D_REENTRANT
-L#{lib}
-lgssdp-1.2
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
__END__
diff --git a/libgssdp/meson.build b/libgssdp/meson.build
index aa66def..a022609 100644
--- a/libgssdp/meson.build
+++ b/libgssdp/meson.build
@@ -48,8 +48,18 @@ if generic_unix
sources += 'gssdp-net-posix.c'
endif
+version = '0.0.0'
+version_arr = version.split('.')
+major_version = version_arr[0].to_int()
+minor_version = version_arr[1].to_int()
+micro_version = version_arr[2].to_int()
+current = major_version + minor_version + 1
+interface_age = micro_version
+darwin_versions = [current, '@0@.@1@'.format(current, interface_age)]
+
libgssdp = library('gssdp-1.2', sources + enums,
- version : '0.0.0',
+ version : version,
+ darwin_versions : darwin_versions,
dependencies : dependencies + system_deps,
include_directories : include_directories('..'),
install : true)
diff --git a/meson.build b/meson.build
index 7e898eb..3d75cc9 100644
--- a/meson.build
+++ b/meson.build
@@ -1,4 +1,4 @@
-project('gssdp', 'c', version: '1.2.0')
+project('gssdp', 'c', version: '1.2.0', meson_version : '>= 0.48.0')
gnome = import('gnome')
pkg = import('pkgconfig')
gssdp 1.2.1
class Gssdp < Formula
desc "GUPnP library for resource discovery and announcement over SSDP"
homepage "https://wiki.gnome.org/GUPnP/"
url "https://download.gnome.org/sources/gssdp/1.2/gssdp-1.2.1.tar.xz"
sha256 "6b57b79a96e229367981b6f00474e4bbc795909a2d3160c748cba3395b3556d3"
bottle do
sha256 "ba0e685fdd43e1a7077acc89f426094cbac4bbd6ef8a7c9a41c019d51edb48a6" => :mojave
sha256 "f8ffc61329914be5de59373b9d2aacc7a762bc902730e0202ddf82f1c6cf6186" => :high_sierra
sha256 "e5e69427ba9125550e2e07d0ce98c02311d9cf21535244b5ea78bd01e8494271" => :sierra
end
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "glib"
depends_on "libsoup"
# submitted upstream as https://gitlab.gnome.org/GNOME/gssdp/merge_requests/2
patch :DATA
def install
mkdir "build" do
system "meson", "--prefix=#{prefix}", "-Dsniffer=false", ".."
system "ninja"
system "ninja", "install"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <libgssdp/gssdp.h>
int main(int argc, char *argv[]) {
GType type = gssdp_client_get_type();
return 0;
}
EOS
gettext = Formula["gettext"]
glib = Formula["glib"]
flags = %W[
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}/gssdp-1.2
-D_REENTRANT
-L#{lib}
-lgssdp-1.2
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
__END__
diff --git a/libgssdp/meson.build b/libgssdp/meson.build
index aa66def..a022609 100644
--- a/libgssdp/meson.build
+++ b/libgssdp/meson.build
@@ -48,8 +48,18 @@ if generic_unix
sources += 'gssdp-net-posix.c'
endif
+version = '0.0.0'
+version_arr = version.split('.')
+major_version = version_arr[0].to_int()
+minor_version = version_arr[1].to_int()
+micro_version = version_arr[2].to_int()
+current = major_version + minor_version + 1
+interface_age = micro_version
+darwin_versions = [current, '@0@.@1@'.format(current, interface_age)]
+
libgssdp = library('gssdp-1.2', sources + enums,
- version : '0.0.0',
+ version : version,
+ darwin_versions : darwin_versions,
dependencies : dependencies + system_deps,
include_directories : include_directories('..'),
install : true)
diff --git a/meson.build b/meson.build
index 7e898eb..3d75cc9 100644
--- a/meson.build
+++ b/meson.build
@@ -1,4 +1,4 @@
-project('gssdp', 'c', version: '1.2.1')
+project('gssdp', 'c', version: '1.2.1', meson_version : '>= 0.48.0')
gnome = import('gnome')
pkg = import('pkgconfig')
|
class Gtkx3 < Formula
desc "Toolkit for creating graphical user interfaces"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/3.18/gtk+-3.18.1.tar.xz"
sha256 "bd279cbb19f3fda074c89cf0edf9e7d95eee8b889b6889d16c2f7f0f6bdeba92"
bottle do
sha256 "61053b37213071d20002ad242090ab4d3f654c0c7d0747d0b75aedbf3f257526" => :el_capitan
sha256 "17d463cdaba4bc1ec587b7f05a0ec003f2d0fb809b73508f74831b3ff124362e" => :yosemite
sha256 "04300b5a6be784901fa31691237b2a4c7f307416ce620ab974559398a0411937" => :mavericks
end
option :universal
option "with-quartz-relocation", "Build with quartz relocation support"
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "gobject-introspection"
depends_on "libepoxy"
depends_on "gsettings-desktop-schemas" => :recommended
depends_on "pango"
depends_on "glib"
depends_on "hicolor-icon-theme"
def install
ENV.universal_binary if build.universal?
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--disable-glibtest
--enable-introspection=yes
--disable-schemas-compile
--enable-quartz-backend
--disable-x11-backend
]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
# necessary to avoid gtk-update-icon-cache not being found during make install
bin.mkpath
ENV.prepend_path "PATH", "#{bin}"
system "make", "install"
# Prevent a conflict between this and Gtk+2
mv bin/"gtk-update-icon-cache", bin/"gtk3-update-icon-cache"
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
gtk_disable_setlocale();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libepoxy = Formula["libepoxy"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/gio-unix-2.0/
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}
-I#{include}/gtk-3.0
-I#{libepoxy.opt_include}
-I#{libpng.opt_include}/libpng16
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lcairo-gobject
-lgdk-3
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-3
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
gtk+3 3.18.2
Closes Homebrew/homebrew#44865.
Signed-off-by: Dominyk Tiller <53e438f55903875d07efdd98a8aaf887e7208dd3@gmail.com>
class Gtkx3 < Formula
desc "Toolkit for creating graphical user interfaces"
homepage "http://gtk.org/"
url "https://download.gnome.org/sources/gtk+/3.18/gtk+-3.18.2.tar.xz"
sha256 "5dbec561c4a00070073bf9cf4cfdd61fab4a14c8ff5b15d700bd378f8185e152"
bottle do
sha256 "61053b37213071d20002ad242090ab4d3f654c0c7d0747d0b75aedbf3f257526" => :el_capitan
sha256 "17d463cdaba4bc1ec587b7f05a0ec003f2d0fb809b73508f74831b3ff124362e" => :yosemite
sha256 "04300b5a6be784901fa31691237b2a4c7f307416ce620ab974559398a0411937" => :mavericks
end
option :universal
option "with-quartz-relocation", "Build with quartz relocation support"
depends_on "pkg-config" => :build
depends_on "gdk-pixbuf"
depends_on "jasper" => :optional
depends_on "atk"
depends_on "gobject-introspection"
depends_on "libepoxy"
depends_on "gsettings-desktop-schemas" => :recommended
depends_on "pango"
depends_on "glib"
depends_on "hicolor-icon-theme"
def install
ENV.universal_binary if build.universal?
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--disable-glibtest
--enable-introspection=yes
--disable-schemas-compile
--enable-quartz-backend
--disable-x11-backend
]
args << "--enable-quartz-relocation" if build.with?("quartz-relocation")
system "./configure", *args
# necessary to avoid gtk-update-icon-cache not being found during make install
bin.mkpath
ENV.prepend_path "PATH", "#{bin}"
system "make", "install"
# Prevent a conflict between this and Gtk+2
mv bin/"gtk-update-icon-cache", bin/"gtk3-update-icon-cache"
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtk/gtk.h>
int main(int argc, char *argv[]) {
gtk_disable_setlocale();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
libepoxy = Formula["libepoxy"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/gio-unix-2.0/
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{include}
-I#{include}/gtk-3.0
-I#{libepoxy.opt_include}
-I#{libpng.opt_include}/libpng16
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lcairo-gobject
-lgdk-3
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-3
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
require 'formula'
class Gtkx3 < Formula
homepage 'http://gtk.org/'
url 'http://ftp.gnome.org/pub/gnome/sources/gtk+/3.14/gtk+-3.14.1.tar.xz'
sha256 '7e86eb7c8acc18524d7758ca2340b723ddeee1d0cd2cadd56de5a13322770a52'
bottle do
end
depends_on :x11 => ['2.5', :recommended] # needs XInput2, introduced in libXi 1.3
depends_on 'pkg-config' => :build
depends_on 'glib'
depends_on 'jpeg'
depends_on 'libtiff'
depends_on 'gdk-pixbuf'
depends_on 'pango'
depends_on 'cairo'
depends_on 'jasper' => :optional
depends_on 'atk'
depends_on 'at-spi2-atk'
depends_on 'gobject-introspection'
depends_on 'gsettings-desktop-schemas' => :recommended
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--disable-glibtest
--enable-introspection=yes
--disable-schemas-compile
]
if build.without? "x11"
args << "--enable-quartz-backend" << "--enable-quartz-relocation"
else
args << "--enable-x11-backend"
end
system "./configure", *args
system "make install"
# Prevent a conflict between this and Gtk+2
mv bin/'gtk-update-icon-cache', bin/'gtk3-update-icon-cache'
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
end
gtk+3: update 3.14.1 bottle.
require 'formula'
class Gtkx3 < Formula
homepage 'http://gtk.org/'
url 'http://ftp.gnome.org/pub/gnome/sources/gtk+/3.14/gtk+-3.14.1.tar.xz'
sha256 '7e86eb7c8acc18524d7758ca2340b723ddeee1d0cd2cadd56de5a13322770a52'
bottle do
sha1 "f86088908060d19c73afe2883dea0d7f2b9db7f7" => :mavericks
sha1 "8cb0138bbfee3942db106f413c3ed84c7d431e7e" => :mountain_lion
sha1 "f14082ff43736ec1b5dfa96fff9c64f493207625" => :lion
end
depends_on :x11 => ['2.5', :recommended] # needs XInput2, introduced in libXi 1.3
depends_on 'pkg-config' => :build
depends_on 'glib'
depends_on 'jpeg'
depends_on 'libtiff'
depends_on 'gdk-pixbuf'
depends_on 'pango'
depends_on 'cairo'
depends_on 'jasper' => :optional
depends_on 'atk'
depends_on 'at-spi2-atk'
depends_on 'gobject-introspection'
depends_on 'gsettings-desktop-schemas' => :recommended
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--disable-glibtest
--enable-introspection=yes
--disable-schemas-compile
]
if build.without? "x11"
args << "--enable-quartz-backend" << "--enable-quartz-relocation"
else
args << "--enable-x11-backend"
end
system "./configure", *args
system "make install"
# Prevent a conflict between this and Gtk+2
mv bin/'gtk-update-icon-cache', bin/'gtk3-update-icon-cache'
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
end
end
|
# encoding: utf-8
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'rubocop/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'rubocop'
s.version = Rubocop::Version::STRING
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 1.9.2'
s.authors = ['Bozhidar Batsov']
s.description = <<-EOF
Automatic Ruby code style checking tool.
Aims to enforce the community-driven Ruby Style Guide.
EOF
s.email = 'bozhidar@batsov.com'
s.files = `git ls-files`.split($RS)
s.test_files = s.files.grep(/^spec\//)
s.executables = s.files.grep(/^bin\//) { |f| File.basename(f) }
s.extra_rdoc_files = ['LICENSE.txt', 'README.md']
s.homepage = 'http://github.com/bbatsov/rubocop'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.rubygems_version = '1.8.23'
s.summary = 'Automatic Ruby code style checking tool.'
s.add_runtime_dependency('rainbow', '>= 1.1.4')
s.add_runtime_dependency('parser', '~> 2.0.0.pre6')
s.add_runtime_dependency('backports', '~> 3.3.3')
s.add_runtime_dependency('powerpack', '0.0.1')
s.add_development_dependency('rake', '~> 10.1')
s.add_development_dependency('rspec', '~> 2.14')
s.add_development_dependency('yard', '~> 0.8')
s.add_development_dependency('bundler', '~> 1.3')
s.add_development_dependency('simplecov', '~> 0.7')
end
Bump powerpack dep
# encoding: utf-8
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'rubocop/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'rubocop'
s.version = Rubocop::Version::STRING
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 1.9.2'
s.authors = ['Bozhidar Batsov']
s.description = <<-EOF
Automatic Ruby code style checking tool.
Aims to enforce the community-driven Ruby Style Guide.
EOF
s.email = 'bozhidar@batsov.com'
s.files = `git ls-files`.split($RS)
s.test_files = s.files.grep(/^spec\//)
s.executables = s.files.grep(/^bin\//) { |f| File.basename(f) }
s.extra_rdoc_files = ['LICENSE.txt', 'README.md']
s.homepage = 'http://github.com/bbatsov/rubocop'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.rubygems_version = '1.8.23'
s.summary = 'Automatic Ruby code style checking tool.'
s.add_runtime_dependency('rainbow', '>= 1.1.4')
s.add_runtime_dependency('parser', '~> 2.0.0.pre6')
s.add_runtime_dependency('backports', '~> 3.3.3')
s.add_runtime_dependency('powerpack', '~> 0.0.3')
s.add_development_dependency('rake', '~> 10.1')
s.add_development_dependency('rspec', '~> 2.14')
s.add_development_dependency('yard', '~> 0.8')
s.add_development_dependency('bundler', '~> 1.3')
s.add_development_dependency('simplecov', '~> 0.7')
end
|
require 'formula'
class Guile < Formula
homepage 'http://www.gnu.org/software/guile/'
url 'http://ftpmirror.gnu.org/guile/guile-2.0.9.tar.gz'
mirror 'http://ftp.gnu.org/gnu/guile/guile-2.0.9.tar.gz'
sha1 'fc5d770e8b1d364b2f222a8f8c96ccf740b2956f'
head do
url 'http://git.sv.gnu.org/r/guile.git'
depends_on 'autoconf' => :build
depends_on 'automake' => :build
depends_on 'gettext' => :build
end
depends_on 'pkg-config' => :build
depends_on :libtool
depends_on 'libffi'
depends_on 'libunistring'
depends_on 'bdw-gc'
depends_on 'gmp'
# GNU Readline is required; libedit won't work.
depends_on 'readline'
fails_with :llvm do
build 2336
cause "Segfaults during compilation"
end
fails_with :clang do
build 211
cause "Segfaults during compilation"
end
# Only for 2.0.9: Fix shebang shell in build-aux/install-sh.
# http://debbugs.gnu.org/cgi/bugreport.cgi?bug=14201#19
def patches; DATA; end
def install
system './autogen.sh' if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libreadline-prefix=#{Formula.factory('readline').prefix}"
system "make install"
# A really messed up workaround required on OS X --mkhl
lib.cd { Dir["*.dylib"].each {|p| ln_sf p, File.basename(p, ".dylib")+".so" }}
end
test do
hello = testpath/'hello.scm'
hello.write <<-EOS.undent
(display "Hello World")
(newline)
EOS
ENV['GUILE_AUTO_COMPILE'] = '0'
system bin/'guile', hello
end
end
__END__
--- guile-2.0.9.orig/build-aux/install-sh 2013-01-28 12:35:24.000000000 -0800
+++ guile-2.0.9/build-aux/install-sh 2013-04-21 08:41:10.000000000 -0700
@@ -1,4 +1,4 @@
-#!/nix/store/ryk1ywzz31kp4biclxq3yq6hpjycalyy-bash-4.2/bin/sh
+#!/bin/sh
# install - install a program, script, or datafile
scriptversion=2011-11-20.07; # UTC
guile: specify path to libgmp
Closes Homebrew/homebrew#26111.
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require 'formula'
class Guile < Formula
homepage 'http://www.gnu.org/software/guile/'
url 'http://ftpmirror.gnu.org/guile/guile-2.0.9.tar.gz'
mirror 'http://ftp.gnu.org/gnu/guile/guile-2.0.9.tar.gz'
sha1 'fc5d770e8b1d364b2f222a8f8c96ccf740b2956f'
head do
url 'http://git.sv.gnu.org/r/guile.git'
depends_on 'autoconf' => :build
depends_on 'automake' => :build
depends_on 'gettext' => :build
end
depends_on 'pkg-config' => :build
depends_on :libtool
depends_on 'libffi'
depends_on 'libunistring'
depends_on 'bdw-gc'
depends_on 'gmp'
# GNU Readline is required; libedit won't work.
depends_on 'readline'
fails_with :llvm do
build 2336
cause "Segfaults during compilation"
end
fails_with :clang do
build 211
cause "Segfaults during compilation"
end
# Only for 2.0.9: Fix shebang shell in build-aux/install-sh.
# http://debbugs.gnu.org/cgi/bugreport.cgi?bug=14201#19
def patches; DATA; end
def install
system './autogen.sh' if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libreadline-prefix=#{Formula.factory('readline').prefix}",
"--with-libgmp-prefix=#{Formula.factory('gmp').prefix}"
system "make install"
# A really messed up workaround required on OS X --mkhl
lib.cd { Dir["*.dylib"].each {|p| ln_sf p, File.basename(p, ".dylib")+".so" }}
end
test do
hello = testpath/'hello.scm'
hello.write <<-EOS.undent
(display "Hello World")
(newline)
EOS
ENV['GUILE_AUTO_COMPILE'] = '0'
system bin/'guile', hello
end
end
__END__
--- guile-2.0.9.orig/build-aux/install-sh 2013-01-28 12:35:24.000000000 -0800
+++ guile-2.0.9/build-aux/install-sh 2013-04-21 08:41:10.000000000 -0700
@@ -1,4 +1,4 @@
-#!/nix/store/ryk1ywzz31kp4biclxq3yq6hpjycalyy-bash-4.2/bin/sh
+#!/bin/sh
# install - install a program, script, or datafile
scriptversion=2011-11-20.07; # UTC
|
# encoding: utf-8
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'rubocop/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'rubocop'
s.version = RuboCop::Version::STRING
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 1.9.2'
s.authors = ['Bozhidar Batsov', 'Jonas Arvidsson', 'Yuji Nakayama']
s.description = <<-EOF
Automatic Ruby code style checking tool.
Aims to enforce the community-driven Ruby Style Guide.
EOF
s.email = 'rubocop@googlegroups.com'
s.files = `git ls-files`.split($RS)
s.test_files = s.files.grep(/^spec\//)
s.executables = s.files.grep(/^bin\//) { |f| File.basename(f) }
s.extra_rdoc_files = ['LICENSE.txt', 'README.md']
s.homepage = 'http://github.com/bbatsov/rubocop'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.rubygems_version = '1.8.23'
s.summary = 'Automatic Ruby code style checking tool.'
s.add_runtime_dependency('rainbow', '>= 1.99.1', '< 3.0')
s.add_runtime_dependency('parser', '>= 2.2.0.pre.3', '< 3.0')
s.add_runtime_dependency('powerpack', '~> 0.0.6')
s.add_runtime_dependency('json', '>= 1.7.7', '< 2')
s.add_runtime_dependency('ruby-progressbar', '~> 1.4')
s.add_development_dependency('rake', '~> 10.1')
s.add_development_dependency('rspec', '~> 3.0')
s.add_development_dependency('yard', '~> 0.8')
s.add_development_dependency('bundler', '~> 1.3')
s.add_development_dependency('simplecov', '~> 0.7')
end
Revert "Add json gem as a runtime dependency for Ruby 1.9.2"
This reverts commit 71ad28d77749eccbbe52034c7909bdfd8f266a8f.
Conflicts:
rubocop.gemspec
# encoding: utf-8
$LOAD_PATH.unshift File.expand_path('../lib', __FILE__)
require 'rubocop/version'
require 'English'
Gem::Specification.new do |s|
s.name = 'rubocop'
s.version = RuboCop::Version::STRING
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 1.9.2'
s.authors = ['Bozhidar Batsov', 'Jonas Arvidsson', 'Yuji Nakayama']
s.description = <<-EOF
Automatic Ruby code style checking tool.
Aims to enforce the community-driven Ruby Style Guide.
EOF
s.email = 'rubocop@googlegroups.com'
s.files = `git ls-files`.split($RS)
s.test_files = s.files.grep(/^spec\//)
s.executables = s.files.grep(/^bin\//) { |f| File.basename(f) }
s.extra_rdoc_files = ['LICENSE.txt', 'README.md']
s.homepage = 'http://github.com/bbatsov/rubocop'
s.licenses = ['MIT']
s.require_paths = ['lib']
s.rubygems_version = '1.8.23'
s.summary = 'Automatic Ruby code style checking tool.'
s.add_runtime_dependency('rainbow', '>= 1.99.1', '< 3.0')
s.add_runtime_dependency('parser', '>= 2.2.0.pre.3', '< 3.0')
s.add_runtime_dependency('powerpack', '~> 0.0.6')
s.add_runtime_dependency('ruby-progressbar', '~> 1.4')
s.add_development_dependency('rake', '~> 10.1')
s.add_development_dependency('rspec', '~> 3.0')
s.add_development_dependency('yard', '~> 0.8')
s.add_development_dependency('bundler', '~> 1.3')
s.add_development_dependency('simplecov', '~> 0.7')
end
|
class Gupnp < Formula
include Language::Python::Shebang
desc "Framework for creating UPnP devices and control points"
homepage "https://wiki.gnome.org/Projects/GUPnP"
url "https://download.gnome.org/sources/gupnp/1.4/gupnp-1.4.0.tar.xz"
sha256 "590ffb02b84da2a1aec68fd534bc40af1b37dd3f6223f9d1577fc48ab48be36f"
license "LGPL-2.0-or-later"
revision 1
bottle do
sha256 cellar: :any, arm64_big_sur: "74d32f0c86134b6ae8680e88eca4cdf16c0b3c7ad39003f36801e9328832e577"
sha256 cellar: :any, big_sur: "cc65c455a851a56d7724da20529e17b9f6a891d7611b1ace26c91522f2261632"
sha256 cellar: :any, catalina: "dc95e5115ede011c9b59a5f1cc0a669321a11297fdb7e0a2d0fb05267480a7f2"
sha256 x86_64_linux: "7d3212cbd0a5c39317ea485330ed228a0f8a9a02080daf08688ecad592cfd77f"
end
depends_on "docbook-xsl" => :build
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "gettext"
depends_on "glib"
depends_on "gssdp"
depends_on "libsoup"
depends_on "python@3.9"
def install
mkdir "build" do
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
bin.find { |f| rewrite_shebang detected_python_shebang, f }
end
end
test do
system bin/"gupnp-binding-tool-1.2", "--help"
(testpath/"test.c").write <<~EOS
#include <libgupnp/gupnp-control-point.h>
static GMainLoop *main_loop;
int main (int argc, char **argv)
{
GUPnPContext *context;
GUPnPControlPoint *cp;
context = gupnp_context_new (NULL, 0, NULL);
cp = gupnp_control_point_new
(context, "urn:schemas-upnp-org:service:WANIPConnection:1");
main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_unref (main_loop);
g_object_unref (cp);
g_object_unref (context);
return 0;
}
EOS
libxml2 = "-I#{MacOS.sdk_path}/usr/include/libxml2"
on_linux do
libxml2 = "-I#{Formula["libxml2"].include}/libxml2"
end
system ENV.cc, testpath/"test.c", "-I#{include}/gupnp-1.2", "-L#{lib}", "-lgupnp-1.2",
"-I#{Formula["gssdp"].opt_include}/gssdp-1.2",
"-L#{Formula["gssdp"].opt_lib}", "-lgssdp-1.2",
"-I#{Formula["glib"].opt_include}/glib-2.0",
"-I#{Formula["glib"].opt_lib}/glib-2.0/include",
"-L#{Formula["glib"].opt_lib}",
"-lglib-2.0", "-lgobject-2.0",
"-I#{Formula["libsoup"].opt_include}/libsoup-2.4",
libxml2, "-o", testpath/"test"
system "./test"
end
end
gupnp: revision bump (libsoup 3.0.0)
class Gupnp < Formula
include Language::Python::Shebang
desc "Framework for creating UPnP devices and control points"
homepage "https://wiki.gnome.org/Projects/GUPnP"
url "https://download.gnome.org/sources/gupnp/1.4/gupnp-1.4.0.tar.xz"
sha256 "590ffb02b84da2a1aec68fd534bc40af1b37dd3f6223f9d1577fc48ab48be36f"
license "LGPL-2.0-or-later"
revision 1
bottle do
sha256 cellar: :any, arm64_big_sur: "74d32f0c86134b6ae8680e88eca4cdf16c0b3c7ad39003f36801e9328832e577"
sha256 cellar: :any, big_sur: "cc65c455a851a56d7724da20529e17b9f6a891d7611b1ace26c91522f2261632"
sha256 cellar: :any, catalina: "dc95e5115ede011c9b59a5f1cc0a669321a11297fdb7e0a2d0fb05267480a7f2"
sha256 x86_64_linux: "7d3212cbd0a5c39317ea485330ed228a0f8a9a02080daf08688ecad592cfd77f"
end
depends_on "docbook-xsl" => :build
depends_on "gobject-introspection" => :build
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "gettext"
depends_on "glib"
depends_on "gssdp"
depends_on "libsoup@2"
depends_on "libxml2"
depends_on "python@3.9"
def install
ENV.prepend_path "PKG_CONFIG_PATH", Formula["libsoup@2"].opt_lib/"pkgconfig"
ENV.prepend_path "XDG_DATA_DIRS", Formula["libsoup@2"].opt_share
ENV.prepend_path "XDG_DATA_DIRS", HOMEBREW_PREFIX/"share"
mkdir "build" do
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
system "meson", *std_meson_args, ".."
system "ninja"
system "ninja", "install"
bin.find { |f| rewrite_shebang detected_python_shebang, f }
end
end
test do
system bin/"gupnp-binding-tool-1.2", "--help"
(testpath/"test.c").write <<~EOS
#include <libgupnp/gupnp-control-point.h>
static GMainLoop *main_loop;
int main (int argc, char **argv)
{
GUPnPContext *context;
GUPnPControlPoint *cp;
context = gupnp_context_new (NULL, 0, NULL);
cp = gupnp_control_point_new
(context, "urn:schemas-upnp-org:service:WANIPConnection:1");
main_loop = g_main_loop_new (NULL, FALSE);
g_main_loop_unref (main_loop);
g_object_unref (cp);
g_object_unref (context);
return 0;
}
EOS
libxml2 = "-I#{MacOS.sdk_path}/usr/include/libxml2"
on_linux do
libxml2 = "-I#{Formula["libxml2"].include}/libxml2"
end
system ENV.cc, testpath/"test.c", "-I#{include}/gupnp-1.2", "-L#{lib}", "-lgupnp-1.2",
"-I#{Formula["gssdp"].opt_include}/gssdp-1.2",
"-L#{Formula["gssdp"].opt_lib}", "-lgssdp-1.2",
"-I#{Formula["glib"].opt_include}/glib-2.0",
"-I#{Formula["glib"].opt_lib}/glib-2.0/include",
"-L#{Formula["glib"].opt_lib}",
"-lglib-2.0", "-lgobject-2.0",
"-I#{Formula["libsoup@2"].opt_include}/libsoup-2.4",
libxml2, "-o", testpath/"test"
system "./test"
end
end
|
require 'formula'
class Hbase < Formula
homepage 'http://hbase.apache.org'
url 'http://www.apache.org/dyn/closer.cgi?path=hbase/hbase-0.94.0/hbase-0.94.0.tar.gz'
sha1 'a77f4b70bddbf8555bfe11e4db197d3cbb0c20d1'
depends_on 'hadoop'
def install
rm_f Dir["bin/*.bat"]
libexec.install %w[bin conf docs lib hbase-webapps]
libexec.install Dir['*.jar']
bin.write_exec_script Dir["#{libexec}/bin/*"]
inreplace "#{libexec}/conf/hbase-env.sh",
"# export JAVA_HOME=/usr/java/jdk1.6.0/",
"export JAVA_HOME=\"$(/usr/libexec/java_home)\""
end
def caveats; <<-EOS.undent
Requires Java 1.6.0 or greater.
You must also edit the configs in:
#{libexec}/conf
to reflect your environment.
For more details:
http://wiki.apache.org/hadoop/Hbase
EOS
end
end
HBase 0.94.2
Closes Homebrew/homebrew#15819.
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class Hbase < Formula
homepage 'http://hbase.apache.org'
url 'http://www.apache.org/dyn/closer.cgi?path=hbase/hbase-0.94.2/hbase-0.94.2.tar.gz'
sha1 'c6731729e1bc47cbdfc0e95ebce8c039bfebfa8e'
depends_on 'hadoop'
def install
rm_f Dir["bin/*.bat"]
libexec.install %w[bin conf docs lib hbase-webapps]
libexec.install Dir['*.jar']
bin.write_exec_script Dir["#{libexec}/bin/*"]
inreplace "#{libexec}/conf/hbase-env.sh",
"# export JAVA_HOME=/usr/java/jdk1.6.0/",
"export JAVA_HOME=\"$(/usr/libexec/java_home)\""
end
def caveats; <<-EOS.undent
Requires Java 1.6.0 or greater.
You must also edit the configs in:
#{libexec}/conf
to reflect your environment.
For more details:
http://wiki.apache.org/hadoop/Hbase
EOS
end
end
|
require 'test_helper'
class UsersHelperTest < ActionView::TestCase
include UsersHelper
test 'role help is a description list' do
assert_includes role_help, "Guest"
assert_includes role_help, "Manager"
assert_includes role_help, "Admin"
end
end
fixing test for user role_help
require 'test_helper'
class UsersHelperTest < ActionView::TestCase
include UsersHelper
test 'role help is a description list' do
role_items = role_help.map(&:first)
assert_includes role_items, "Guest"
assert_includes role_items, "Manager"
assert_includes role_items, "Admin"
end
end
|
class Hbase < Formula
desc "Hadoop database: a distributed, scalable, big data store"
homepage "https://hbase.apache.org"
url "https://www.apache.org/dyn/closer.cgi?path=hbase/1.1.5/hbase-1.1.5-bin.tar.gz"
sha256 "e06fa399c7ba48acd14b71ba6316d272f14b18b5b7398d6a3d60b6b8a9073f0e"
bottle do
sha256 "9885dc9e22c35e7d682da9c3a98fbe6152bb936ef417af8ab853b71df1d84b57" => :el_capitan
sha256 "8fd13d83dfe5bfab04c771f23dfbec48232d07386162835c87319f453c7a2236" => :yosemite
sha256 "f665d23df81a50de0b7e6c0168ba2d3a626d50288c1e63967b57f08ff8c52eb9" => :mavericks
end
depends_on :java => "1.7+"
depends_on "hadoop" => :optional
depends_on "lzo" => :recommended
depends_on "ant" => :build if build.with? "lzo"
depends_on :arch => :x86_64 if build.with? "lzo"
# 64 bit is required because of three things:
# the lzo jar has a native extension
# building native extensions requires a version of java that matches the architecture
# there is no 32 bit version of java for OS X since Java 1.7, and 1.7+ is required for hbase
resource "hadoop-lzo" do
url "https://github.com/cloudera/hadoop-lzo/archive/0.4.14.tar.gz"
sha256 "aa8ddbb8b3f9e1c4b8cc3523486acdb7841cd97c002a9f2959c5b320c7bb0e6c"
end
def install
ENV.java_cache if build.with? "lzo"
rm_f Dir["bin/*.cmd", "conf/*.cmd"]
libexec.install %w[bin conf docs lib hbase-webapps]
bin.write_exec_script Dir["#{libexec}/bin/*"]
if build.with? "lzo"
resource("hadoop-lzo").stage do
# Fixed upstream: https://github.com/cloudera/hadoop-lzo/blob/master/build.xml#L235
inreplace "build.xml",
%r{(<class name="com.hadoop.compression.lzo.LzoDecompressor" />)},
"\\1\n<classpath refid=\"classpath\"/>"
ENV["CLASSPATH"] = Dir["#{libexec}/lib/hadoop-common-*.jar"].first
ENV["CFLAGS"] = "-m64"
ENV["CXXFLAGS"] = "-m64"
ENV["CPPFLAGS"] = "-I/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers"
system "ant", "compile-native", "tar"
(libexec/"lib").install Dir["build/hadoop-lzo-*/hadoop-lzo-*.jar"]
(libexec/"lib/native").install Dir["build/hadoop-lzo-*/lib/native/*"]
end
end
inreplace "#{libexec}/conf/hbase-env.sh" do |s|
# upstream bugs for ipv6 incompatibility:
# https://issues.apache.org/jira/browse/HADOOP-8568
# https://issues.apache.org/jira/browse/HADOOP-3619
s.gsub!("export HBASE_OPTS=\"-XX:+UseConcMarkSweepGC\"",
"export HBASE_OPTS=\"-Djava.net.preferIPv4Stack=true -XX:+UseConcMarkSweepGC\"")
s.gsub!("# export JAVA_HOME=/usr/java/jdk1.6.0/",
"export JAVA_HOME=\"$(/usr/libexec/java_home)\"")
end
# makes hbase usable out of the box
# upstream has been provided this patch
# https://issues.apache.org/jira/browse/HBASE-15426
inreplace "#{libexec}/conf/hbase-site.xml",
/<configuration>/,
<<-EOS.undent
<configuration>
<property>
<name>hbase.rootdir</name>
<value>#{(build.with? "hadoop") ? "hdfs://localhost:9000" : "file://"+var}/hbase</value>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.property.dataDir</name>
<value>#{var}/zookeeper</value>
</property>
<property>
<name>hbase.zookeeper.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.regionserver.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.master.dns.interface</name>
<value>lo0</value>
</property>
EOS
(libexec/"logs").mkpath
end
def post_install
(var/"run/hbase").mkpath
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/hbase/bin/start-hbase.sh"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
#{(build.without? "hadoop") ? "<true/>" : "<dict>\n <key>OtherJobEnabled</key>\n <string>"+Formula["hadoop"].plist_name+"</string>\n </dict>"}
<key>Label</key>
<string>#{plist_name}</string>
<key>EnvironmentVariables</key>
<dict>
<key>HBASE_MASTER_OPTS</key><string> -XX:PermSize=128m -XX:MaxPermSize=128m</string>
<key>HBASE_LOG_DIR</key><string>#{var}/hbase</string>
<key>HBASE_HOME</key><string>#{opt_libexec}</string>
<key>HBASE_SECURITY_LOGGER</key><string>INFO,RFAS</string>
<key>HBASE_PID_DIR</key><string>#{var}/run/hbase</string>
<key>HBASE_NICENESS</key><string>0</string>
<key>HBASE_IDENT_STRING</key><string>root</string>
<key>HBASE_REGIONSERVER_OPTS</key><string> -XX:PermSize=128m -XX:MaxPermSize=128m</string>
<key>HBASE_OPTS</key><string>-XX:+UseConcMarkSweepGC</string>
<key>HBASE_ROOT_LOGGER</key><string>INFO,RFA</string>
<key>HBASE_LOG_PREFIX</key><string>hbase-root-master</string>
<key>HBASE_LOGFILE</key><string>hbase-root-master.log</string>
</dict>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hbase</string>
<string>--config</string>
<string>#{opt_libexec}/conf</string>
<string>master</string>
<string>start</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardOutPath</key>
<string>#{var}/hbase/hbase.log</string>
<key>StandardErrorPath</key>
<string>#{var}/hbase/hbase.err</string>
</dict>
</plist>
EOS
end
test do
assert_match /#{version}/, shell_output("#{bin}/hbase mapredcp")
cp_r (libexec/"conf"), testpath
inreplace (testpath/"conf/hbase-site.xml") do |s|
s.gsub! /(hbase.rootdir.*)\n.*/, "\\1\n<value>file://#{testpath}/hbase</value>"
s.gsub! /(hbase.zookeeper.property.dataDir.*)\n.*/, "\\1\n<value>#{testpath}/zookeeper</value>"
end
ENV["HBASE_LOG_DIR"] = (testpath/"logs")
ENV["HBASE_CONF_DIR"] = (testpath/"conf")
ENV["HBASE_PID_DIR"] = (testpath/"pid")
system "#{bin}/start-hbase.sh"
sleep 2
begin
assert_match /Zookeeper/, pipe_output("nc 127.0.0.1 2181 2>&1", "stats")
ensure
system "#{bin}/stop-hbase.sh"
end
end
end
hbase: remove superfluous parentheses in test
class Hbase < Formula
desc "Hadoop database: a distributed, scalable, big data store"
homepage "https://hbase.apache.org"
url "https://www.apache.org/dyn/closer.cgi?path=hbase/1.1.5/hbase-1.1.5-bin.tar.gz"
sha256 "e06fa399c7ba48acd14b71ba6316d272f14b18b5b7398d6a3d60b6b8a9073f0e"
bottle do
sha256 "9885dc9e22c35e7d682da9c3a98fbe6152bb936ef417af8ab853b71df1d84b57" => :el_capitan
sha256 "8fd13d83dfe5bfab04c771f23dfbec48232d07386162835c87319f453c7a2236" => :yosemite
sha256 "f665d23df81a50de0b7e6c0168ba2d3a626d50288c1e63967b57f08ff8c52eb9" => :mavericks
end
depends_on :java => "1.7+"
depends_on "hadoop" => :optional
depends_on "lzo" => :recommended
depends_on "ant" => :build if build.with? "lzo"
depends_on :arch => :x86_64 if build.with? "lzo"
# 64 bit is required because of three things:
# the lzo jar has a native extension
# building native extensions requires a version of java that matches the architecture
# there is no 32 bit version of java for OS X since Java 1.7, and 1.7+ is required for hbase
resource "hadoop-lzo" do
url "https://github.com/cloudera/hadoop-lzo/archive/0.4.14.tar.gz"
sha256 "aa8ddbb8b3f9e1c4b8cc3523486acdb7841cd97c002a9f2959c5b320c7bb0e6c"
end
def install
ENV.java_cache if build.with? "lzo"
rm_f Dir["bin/*.cmd", "conf/*.cmd"]
libexec.install %w[bin conf docs lib hbase-webapps]
bin.write_exec_script Dir["#{libexec}/bin/*"]
if build.with? "lzo"
resource("hadoop-lzo").stage do
# Fixed upstream: https://github.com/cloudera/hadoop-lzo/blob/master/build.xml#L235
inreplace "build.xml",
%r{(<class name="com.hadoop.compression.lzo.LzoDecompressor" />)},
"\\1\n<classpath refid=\"classpath\"/>"
ENV["CLASSPATH"] = Dir["#{libexec}/lib/hadoop-common-*.jar"].first
ENV["CFLAGS"] = "-m64"
ENV["CXXFLAGS"] = "-m64"
ENV["CPPFLAGS"] = "-I/System/Library/Frameworks/JavaVM.framework/Versions/Current/Headers"
system "ant", "compile-native", "tar"
(libexec/"lib").install Dir["build/hadoop-lzo-*/hadoop-lzo-*.jar"]
(libexec/"lib/native").install Dir["build/hadoop-lzo-*/lib/native/*"]
end
end
inreplace "#{libexec}/conf/hbase-env.sh" do |s|
# upstream bugs for ipv6 incompatibility:
# https://issues.apache.org/jira/browse/HADOOP-8568
# https://issues.apache.org/jira/browse/HADOOP-3619
s.gsub!("export HBASE_OPTS=\"-XX:+UseConcMarkSweepGC\"",
"export HBASE_OPTS=\"-Djava.net.preferIPv4Stack=true -XX:+UseConcMarkSweepGC\"")
s.gsub!("# export JAVA_HOME=/usr/java/jdk1.6.0/",
"export JAVA_HOME=\"$(/usr/libexec/java_home)\"")
end
# makes hbase usable out of the box
# upstream has been provided this patch
# https://issues.apache.org/jira/browse/HBASE-15426
inreplace "#{libexec}/conf/hbase-site.xml",
/<configuration>/,
<<-EOS.undent
<configuration>
<property>
<name>hbase.rootdir</name>
<value>#{(build.with? "hadoop") ? "hdfs://localhost:9000" : "file://"+var}/hbase</value>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.property.dataDir</name>
<value>#{var}/zookeeper</value>
</property>
<property>
<name>hbase.zookeeper.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.regionserver.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.master.dns.interface</name>
<value>lo0</value>
</property>
EOS
(libexec/"logs").mkpath
end
def post_install
(var/"run/hbase").mkpath
end
plist_options :manual => "#{HOMEBREW_PREFIX}/opt/hbase/bin/start-hbase.sh"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
#{(build.without? "hadoop") ? "<true/>" : "<dict>\n <key>OtherJobEnabled</key>\n <string>"+Formula["hadoop"].plist_name+"</string>\n </dict>"}
<key>Label</key>
<string>#{plist_name}</string>
<key>EnvironmentVariables</key>
<dict>
<key>HBASE_MASTER_OPTS</key><string> -XX:PermSize=128m -XX:MaxPermSize=128m</string>
<key>HBASE_LOG_DIR</key><string>#{var}/hbase</string>
<key>HBASE_HOME</key><string>#{opt_libexec}</string>
<key>HBASE_SECURITY_LOGGER</key><string>INFO,RFAS</string>
<key>HBASE_PID_DIR</key><string>#{var}/run/hbase</string>
<key>HBASE_NICENESS</key><string>0</string>
<key>HBASE_IDENT_STRING</key><string>root</string>
<key>HBASE_REGIONSERVER_OPTS</key><string> -XX:PermSize=128m -XX:MaxPermSize=128m</string>
<key>HBASE_OPTS</key><string>-XX:+UseConcMarkSweepGC</string>
<key>HBASE_ROOT_LOGGER</key><string>INFO,RFA</string>
<key>HBASE_LOG_PREFIX</key><string>hbase-root-master</string>
<key>HBASE_LOGFILE</key><string>hbase-root-master.log</string>
</dict>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/hbase</string>
<string>--config</string>
<string>#{opt_libexec}/conf</string>
<string>master</string>
<string>start</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardOutPath</key>
<string>#{var}/hbase/hbase.log</string>
<key>StandardErrorPath</key>
<string>#{var}/hbase/hbase.err</string>
</dict>
</plist>
EOS
end
test do
assert_match /#{version}/, shell_output("#{bin}/hbase mapredcp")
cp_r (libexec/"conf"), testpath
inreplace (testpath/"conf/hbase-site.xml") do |s|
s.gsub! /(hbase.rootdir.*)\n.*/, "\\1\n<value>file://#{testpath}/hbase</value>"
s.gsub! /(hbase.zookeeper.property.dataDir.*)\n.*/, "\\1\n<value>#{testpath}/zookeeper</value>"
end
ENV["HBASE_LOG_DIR"] = testpath/"logs"
ENV["HBASE_CONF_DIR"] = testpath/"conf"
ENV["HBASE_PID_DIR"] = testpath/"pid"
system "#{bin}/start-hbase.sh"
sleep 2
begin
assert_match /Zookeeper/, pipe_output("nc 127.0.0.1 2181 2>&1", "stats")
ensure
system "#{bin}/stop-hbase.sh"
end
end
end
|
require File.join(File.expand_path(File.dirname(__FILE__)), '..', 'test_helper')
require 'cc_deville'
class FacebookItemTest < ActiveSupport::TestCase
test "should get canonical URL parsed from facebook html" do
media1 = create_media url: 'https://www.facebook.com/photo.php?fbid=10155446238011949&set=a.10151842779956949&type=3&theater'
media2 = create_media url: 'https://www.facebook.com/photo.php?fbid=10155446238011949&set=a.10151842779956949&type=3'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL parsed from facebook html when it is relative" do
media1 = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
media2 = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL parsed from facebook html when it is a page" do
media1 = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479?pnref=story.unseen-section'
media2 = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL from facebook object 2" do
media = Media.new(url: 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406')
media.as_json({ force: 1 })
assert_equal 'https://www.facebook.com/54212446406/photos/a.10154534110871407/10154534111016407/?type=3', media.url
end
test "should get canonical URL from facebook object 3" do
expected = 'https://www.facebook.com/54212446406/photos/a.10154534110871407/10154534111016407/?type=3'
variations = %w(
https://www.facebook.com/54212446406/photos/a.10154534110871407.1073742048.54212446406/10154534111016407/?type=3
https://www.facebook.com/54212446406/photos/a.10154534110871407.1073742048.54212446406/10154534111016407?type=3
)
variations.each do |url|
media = Media.new(url: url)
media.as_json({ force: 1 })
assert_equal expected, media.url
end
end
test "should parse facebook url with a photo album" do
expected = {
url: 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/613639175480416/?type=3',
title: 'Classic',
username: 'Classic.mou',
author_name: 'Classic',
author_url: 'http://facebook.com/136985363145802',
author_picture: 'https://graph.facebook.com/136985363145802/picture',
picture: /613639175480416_2497518582358260577/,
description: /Classic added a new photo/
}.with_indifferent_access
variations = %w(
https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/?type=3&theater
https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/
)
variations.each do |url|
media = Media.new(url: url)
data = media.as_json
expected.each do |key, value|
assert_match value, data[key]
end
end
end
test "should parse Facebook live post from mobile URL" do
m = create_media url: 'https://m.facebook.com/story.php?story_fbid=10154584426664820&id=355665009819%C2%ACif_t=live_video%C2%ACif_id=1476846578702256&ref=bookmarks'
data = m.as_json
assert_match /South China Morning Post/, data['title']
assert_match /SCMP #FacebookLive amid chaotic scenes in #HongKong Legco/, data['description']
assert_not_nil data['published_at']
assert_equal 'South China Morning Post', data['author_name']
assert_match 'http://facebook.com/355665009819', data['author_url']
assert_match /355665009819/, data['author_picture']
assert !data['picture'].blank?
end
test "should create Facebook post from mobile URL" do
m = create_media url: 'https://m.facebook.com/KIKOLOUREIROofficial/photos/a.10150618138397252/10152555300292252/?type=3&theater'
d = m.as_json
assert_match /Bolívia/, d['text']
assert_equal 'Kiko Loureiro', d['author_name']
assert_equal 1, d['media_count']
assert_equal '20/11/2014', Time.parse(d['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook pure text post url" do
m = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
d = m.as_json
assert_match /Dina Samak/, d['title']
assert_not_nil d['description']
assert_not_nil d['author_picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook live post" do
m = create_media url: 'https://www.facebook.com/cbcnews/videos/10154783484119604/'
data = m.as_json
assert_equal 'https://www.facebook.com/cbcnews/videos/10154783484119604/', m.url
assert_match /CBC News/, data['title']
assert_match /Live now: This is the National for Monday, Oct. 31, 2016./, data['description']
assert_not_nil data['published_at']
assert_equal 'cbcnews', data['username']
assert_match 'http://facebook.com/5823419603', data['author_url']
assert_match /5823419603/, data['author_picture']
assert_match /^https/, data['picture']
assert_match /10154783812779604/, data['picture']
end
test "should parse Facebook removed live post" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1538843716180215/'
data = m.as_json
assert_equal 'https://www.facebook.com/teste637621352/posts/1538843716180215', m.url
assert_match /Not Identified/, data['title']
assert_equal '', data['description']
assert_equal '', data['published_at']
assert_equal 'teste637621352', data['username']
assert_match 'http://facebook.com/749262715138323', data['author_url']
assert_match /749262715138323/, data['author_picture']
end
test "should parse Facebook livemap" do
variations = %w(
https://www.facebook.com/livemap/#@-12.991858482361014,-38.521747589110994,4z
https://www.facebook.com/live/map/#@37.777053833008,-122.41587829590001,4z
https://www.facebook.com/live/discover/map/#@37.777053833008,-122.41587829590001,4z
)
request = 'http://localhost'
request.expects(:base_url).returns('http://localhost')
variations.each do |url|
m = create_media url: url, request: request
data = m.as_json
assert_match /facebook\.com/, m.url
assert_match /Facebook/, data['title']
assert_not_nil data['published_at']
end
end
test "should parse Facebook event post" do
m = create_media url: 'https://www.facebook.com/events/364677040588691/permalink/376287682760960/?ref=1&action_history=null'
data = m.as_json
variations = %w(
https://www.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null
https://www.facebook.com/events/zawya/zawyas-tribute-to-mohamed-khan-%D9%85%D9%88%D8%B9%D8%AF-%D9%85%D8%B9-%D8%AE%D8%A7%D9%86/364677040588691/
https://web.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null&_rdc=1&_rdr
)
assert_includes variations, m.url
assert_not_nil data['published_at']
assert_match /#{data['user_uuid']}/, data['author_url']
assert_match /#{data['user_uuid']}/, data['author_picture']
assert_match /^https:/, data['picture']
assert_match /Zawya/, data['title']
assert_equal 'Zawya', data['username']
end
test "should parse Facebook video url from a page" do
m = create_media url: 'https://www.facebook.com/144585402276277/videos/1127489833985824'
d = m.as_json
assert_match /Trent Aric - Meteorologist/, d['title']
assert_match /MATTHEW YOU ARE DRUNK...GO HOME!/, d['description']
assert_equal 'item', d['type']
assert_not_nil d['picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video url from a page with another url pattern" do
m = create_media url: 'https://www.facebook.com/democrats/videos/10154268929856943'
d = m.as_json
assert_match /Democratic Party/, d['title']
assert_match /On National Voter Registration Day/, d['description']
assert_equal 'item', d['type']
assert_not_nil d['picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video url from a profile" do
m = create_media url: 'https://www.facebook.com/edwinscott143/videos/vb.737361619/10154242961741620/?type=2&theater'
d = m.as_json
assert_match /Eddie/, d['title']
assert_equal 'item', d['type']
assert_match /^http/, d['picture']
assert_match /10154242963196620/, d['picture']
assert_not_nil d['author_picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video on page album" do
m = create_media url: 'https://www.facebook.com/scmp/videos/vb.355665009819/10154584426664820/?type=2&theater'
d = m.as_json
assert_match /South China Morning Post/, d['title']
assert_match /SCMP #FacebookLive/, d['description']
assert_equal 'scmp', d['username']
assert_match /355665009819/, d['author_picture']
assert_match /10154584445939820/, d['picture']
assert_match 'http://facebook.com/355665009819', d['author_url']
assert_not_nil Time.parse(d['published_at'])
assert_match /South China Morning Post/, d['author_name']
end
test "should parse Facebook gif photo url" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/posts/1095740107184121'
d = m.as_json
assert_match /New Quoted Pictures Everyday/, d['title']
assert_not_nil d['description']
assert_match /giphy.gif/, d['photos'].first
end
test "should parse album post with a permalink" do
m = create_media url: 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406'
d = m.as_json
assert_match /Mariano Rajoy Brey/, d['title']
assert_equal 'item', d['type']
assert_match /54212446406/, d['author_picture']
assert_match /14543767_10154534111016407_5167486558738906371/, d['picture']
assert_not_nil Time.parse(d['published_at'])
assert_equal '10154534111016407', d['object_id']
end
test "should parse facebook user post" do
m = create_media url: 'https://www.facebook.com/dina.hawary/posts/10158416884740321'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_match /Dina El Hawary/, d['title']
assert_match /ربنا يزيدهن فوق القوة قوة/, d['description']
assert_not_nil d['published_at']
assert_equal 'Dina El Hawary', d['author_name']
assert_equal 'dina.hawary', d['username']
assert_match 'http://facebook.com/813705320', d['author_url']
assert_match /813705320/, d['author_picture']
assert_not_nil d['picture']
assert_nil d['error']
assert_equal 'https://www.facebook.com/dina.hawary/posts/10158416884740321', m.url
end
test "should parse facebook url with colon mark" do
m = create_media url: 'https://www.facebook.com/Classic.mou/posts/666508790193454:0'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_equal '136985363145802_666508790193454', d['uuid']
assert_match /Classic/, d['title']
assert_match /إليزابيث تايلو/, d['description']
assert_not_nil d['published_at']
assert_equal 'Classic.mou', d['username']
assert_equal 'Classic', d['author_name']
assert_match 'http://facebook.com/136985363145802', d['author_url']
assert_match /136985363145802/, d['author_picture']
assert_match /16473884_666508790193454_8112186335057907723/, d['picture']
assert_equal 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/666508790193454/?type=3', m.url
end
test "should parse Facebook post from media set" do
m = create_media url: 'https://www.facebook.com/media/set/?set=a.10154534110871407.1073742048.54212446406&type=3'
d = m.as_json
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/En el Museo Serralves de Oporto/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert d['media_count'] > 20
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/media/set?set=a.10154534110871407.1073742048.54212446406&type=3', m.url
end
test "should support facebook pattern with pg" do
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407'
d = m.as_json
assert_equal 'item', d['type']
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/Militante del Partido Popular/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/pages/category/Politician/Mariano-Rajoy-Brey-54212446406/photos/', m.url
end
test "should support facebook pattern with album" do
m = create_media url: 'https://www.facebook.com/album.php?fbid=10154534110871407&id=54212446406&aid=1073742048'
d = m.as_json
assert_equal '10154534110871407_10154534110871407', d['uuid']
assert_match(/En el Museo Serralves de Oporto/, d['text'])
assert_equal '10154534110871407', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert d['media_count'] > 20
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/media/set?set=a.10154534110871407', m.url
end
test "should get facebook data from original_url when url fails" do
Media.any_instance.stubs(:url).returns('https://www.facebook.com/Mariano-Rajoy-Brey-54212446406/photos')
Media.any_instance.stubs(:original_url).returns('https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407')
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos'
d = m.as_json
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/Militante del Partido Popular/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert_equal '10154534110871407', d['object_id']
Media.any_instance.unstub(:url)
Media.any_instance.unstub(:original_url)
end
test "should parse as html when API token is expired and notify Airbrake" do
fb_token = CONFIG['facebook_auth_token']
Airbrake.stubs(:configured?).returns(true)
Airbrake.stubs(:notify).once
CONFIG['facebook_auth_token'] = 'EAACMBapoawsBAP8ugWtoTpZBpI68HdM68qgVdLNc8R0F8HMBvTU1mOcZA4R91BsHZAZAvSfTktgBrdjqhYJq2Qet2RMsNZAu12J14NqsP1oyIt74vXlFOBkR7IyjRLLVDysoUploWZC1N76FMPf5Dzvz9Sl0EymSkZD'
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert_match /Nostalgia/, data['title']
CONFIG['facebook_auth_token'] = fb_token
data = m.as_json(force: 1)
assert_match /Nostalgia/, data['title']
Airbrake.unstub(:configured?)
Airbrake.unstub(:notify)
end
test "should store data of post returned by oembed" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028416870556238'
oembed = m.as_json['raw']['oembed']
assert oembed.is_a? Hash
assert !oembed.empty?
assert_nil oembed['title']
assert_equal 'Teste', oembed['author_name']
assert_match 'https://www.facebook.com/teste637621352/', oembed['author_url']
assert_equal 'Facebook', oembed['provider_name']
assert_equal 'https://www.facebook.com', oembed['provider_url']
assert_equal 552, oembed['width']
assert oembed['height'].nil?
end
test "should store oembed data of a facebook post" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert_equal 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501/942167619246718/?type=3', m.url
assert data['raw']['oembed'].is_a? Hash
assert_equal "https://www.facebook.com", data['raw']['oembed']['provider_url']
assert_equal "Facebook", data['raw']['oembed']['provider_name']
end
test "should store oembed data of a facebook page" do
m = create_media url: 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
data = m.as_json
assert_nil data['raw']['oembed']
assert_equal 'Meedan', data['oembed']['author_name']
assert_equal 'Meedan', data['oembed']['title']
end
test "should parse Facebook post from page photo" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
d = m.as_json
assert_match /New Quoted Pictures Everyday/, d['title']
assert_match /New Quoted Pictures Everyday added a new photo./, d['description']
assert_equal 'quoted.pictures', d['username']
assert_equal 'New Quoted Pictures Everyday', d['author_name']
assert_not_nil d['author_url']
assert_not_nil d['picture']
assert_equal 1, d['media_count']
assert_equal '08/09/2016', Time.parse(d['published_at']).strftime("%d/%m/%Y")
assert_match /Pictures/, d['text']
end
test "should parse facebook url without identified pattern as item" do
m = create_media url: 'https://www.facebook.com/Bimbo.Memories/photos/pb.235404669918505.-2207520000.1481570271./1051597428299221/?type=3&theater'
d = m.as_json
assert_equal 'item', d['type']
assert_match /Bimbo Memories/, d['title']
assert_not_nil d['description']
assert_not_nil d['published_at']
assert_equal 'Bimbo Memories', d['author_name']
assert_equal 'Bimbo.Memories', d['username']
assert_match 'http://facebook.com/235404669918505', d['author_url']
assert_match /235404669918505/, d['author_picture']
assert_match /15400507_1051597428299221_6315842220063966332/, d['picture']
end
test "should parse Facebook photo post within an album url" do
m = create_media url: 'https://www.facebook.com/ESCAPE.Egypt/photos/ms.c.eJxNk8d1QzEMBDvyQw79N2ZyaeD7osMIwAZKLGTUViod1qU~;DCBNHcpl8gfMKeR8bz2gH6ABlHRuuHYM6AdywPkEsH~;gqAjxqLAKJtQGZFxw7CzIa6zdF8j1EZJjXRgTzAP43XBa4HfFa1REA2nXugScCi3wN7FZpF5BPtaVDEBqwPNR60O9Lsi0nbDrw3KyaPCVZfqAYiWmZO13YwvSbtygCWeKleh9KEVajW8FfZz32qcUrNgA5wfkA4Xfh004x46d9gdckQt2xR74biSOegwIcoB9OW~_oVIxKML0JWYC0XHvDkdZy0oY5bgjvBAPwdBpRuKE7kZDNGtnTLoCObBYqJJ4Ky5FF1kfh75Gnyl~;Qxqsv.bps.a.1204090389632094.1073742218.423930480981426/1204094906298309/?type=3&theater'
d = m.as_json
assert_equal '09/2016', Time.parse(d['published_at']).strftime('%m/%Y')
assert_equal 'item', d['type']
assert_match /Escape/, d['title']
assert_match /Escape(\.Egypt)? added a new photo./, d['description']
assert_match /423930480981426/, d['author_picture']
assert_equal 1, d['photos'].size
assert_match /^https:/, d['picture']
assert_equal '1204094906298309', d['object_id']
end
test "should parse photo in a photo album" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/pb.456182634511888.-2207520000.1484079948./928269767303170/?type=3&theater'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_match /Nostalgia/, d['title']
assert_match /مين قالك تسكن فى حاراتنا/, d['description']
assert_not_nil d['published_at']
assert_equal 'nostalgia.y', d['username']
assert_equal 'Nostalgia', d['author_name']
assert_match 'http://facebook.com/456182634511888', d['author_url']
assert_match /456182634511888/, d['author_picture']
assert_match /15181134_928269767303170_7195169848911975270/, d['picture']
assert_nil d['error']
end
test "should create Facebook post from page photo URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/photos/a.754851877912740.1073741826.749262715138323/896869113711015/?type=3'
d = m.as_json
assert_equal '749262715138323_896869113711015', d['uuid']
assert_equal 'This post should be fetched.', d['text']
assert_equal '749262715138323', d['user_uuid']
assert_equal 'Teste', d['author_name']
assert_equal 'teste637621352', d['username']
assert_equal 1, d['media_count']
assert_equal '896869113711015', d['object_id']
assert_equal '03/2015', Time.parse(d['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from page photos URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
d = m.as_json
assert_equal '749262715138323_1028795030518422', d['uuid']
assert_equal 'This is just a test with many photos.', d['text']
assert_equal '749262715138323', d['user_uuid']
assert_equal 'Teste', d['author_name']
assert_equal 2, d['media_count']
assert_equal '1028795030518422', d['object_id']
assert_equal '11/2015', Time.parse(d['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from user photos URL" do
m = create_media url: 'https://www.facebook.com/nanabhay/posts/10156130657385246?pnref=story'
d = m.as_json
assert_equal '735450245_10156130657385246', d['uuid']
assert_equal 'Such a great evening with friends last night. Sultan Sooud Al-Qassemi has an amazing collecting of modern Arab art. It was a visual tour of the history of the region over the last century.', d['text'].strip
assert_equal '735450245', d['user_uuid']
assert_equal 'Mohamed Nanabhay', d['author_name']
assert_equal 4, d['media_count']
assert_equal '10156130657385246', d['object_id']
assert_equal '27/10/2015', Time.parse(d['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook post from user photo URL" do
m = create_media url: 'https://www.facebook.com/photo.php?fbid=10155150801660195&set=p.10155150801660195&type=1&theater'
d = m.as_json
assert_equal '10155150801660195_10155150801660195', d['uuid']
assert_equal '10155150801660195', d['user_uuid']
assert_equal 'David Marcus', d['author_name']
assert_equal 1, d['media_count']
assert_equal '10155150801660195', d['object_id']
assert_match /David Marcus/, d['title']
assert_match /10155150801660195/, d['author_picture']
assert_not_nil d['picture']
assert_match /always working on ways to make Messenger more useful/, d['text']
end
tests = YAML.load_file(File.join(Rails.root, 'test', 'data', 'fbposts.yml'))
tests.each do |url, text|
test "should get text from Facebook user post from URL '#{url}'" do
m = create_media url: url
assert_equal text, m.as_json['text'].gsub(/\s+/, ' ').strip
end
end
test "should create Facebook post with picture and photos" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
d = m.as_json
assert_match /^https/, d['picture']
assert_kind_of Array, d['photos']
assert_equal 2, d['media_count']
assert_equal 1, d['photos'].size
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1035783969819528'
d = m.as_json
assert_not_nil d['picture']
assert_match /^https/, d['author_picture']
assert_kind_of Array, d['photos']
assert_equal 0, d['media_count']
assert_equal 1, d['photos'].size
m = create_media url: 'https://www.facebook.com/teste637621352/posts/2194142813983632'
d = m.as_json
assert_match /^https/, d['author_picture']
assert_match /^https/, d['picture']
assert_kind_of Array, d['photos']
assert_equal 2, d['media_count']
assert_equal 1, d['photos'].size
end
test "should create Facebook post from Arabic user" do
m = create_media url: 'https://www.facebook.com/ahlam.alialshamsi/posts/108561999277346?pnref=story'
d = m.as_json
assert_equal '100003706393630_108561999277346', d['uuid']
assert_equal '100003706393630', d['user_uuid']
assert_equal 'Ahlam Ali Al Shāmsi', d['author_name']
assert_equal 0, d['media_count']
assert_equal '108561999277346', d['object_id']
assert_equal 'أنا مواد رافعة الآن الأموال اللازمة لمشروع مؤسسة خيرية، ودعم المحتاجين في غرب أفريقيا مساعدتي لبناء مكانا أفضل للأطفال في أفريقيا', d['text']
end
test "should have a transitive relation between normalized URLs" do
url = 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
m = create_media url: url
data = m.as_json
url = 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334/1096134023811396/?type=3'
assert_equal url, data['url']
m = create_media url: url
data = m.as_json
assert_equal url, data['url']
end
test "should return item as oembed" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is not on cache" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(nil, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is on cache and raw key is missing" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
json_data = m.as_json
json_data.delete('raw')
data = Media.as_oembed(json_data, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when the page has oembed url" do
url = 'https://www.facebook.com/teste637621352/posts/1028416870556238'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_nil data['title']
assert_equal 'Teste', data['author_name']
assert_match 'https://www.facebook.com/teste637621352', data['author_url']
assert_equal 'Facebook', data['provider_name']
assert_equal 'https://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
json = Pender::Store.read(Media.get_id(url), :json)
assert_equal 552, json[:raw][:oembed][:width]
assert_nil json[:raw][:oembed][:height]
end
test "should not use Facebook embed if is a link to redirect" do
url = 'https://l.facebook.com/l.php?u=https://hindi.indiatvnews.com/paisa/business-1-07-cr-new-taxpayers-added-dropped-filers-down-at-25-22-lakh-in-fy18-630914&h=AT1WAU-mDHKigOgFNrUsxsS2doGO0_F5W9Yck7oYUx-IsYAHx8JqyHwO02-N0pX8UOlcplZO50px8mkTA1XNyKig8Z2CfX6t3Sh0bHtO9MYPtWqacCm6gOXs5lbC6VGMLjDALNXZ6vg&s=1'
m = create_media url: url
data = m.as_json
assert_equal 'Leaving Facebook', data['author_name']
assert_equal 'flx', data['username']
assert_equal '', data['html']
end
test "should get image from original post if is a shared content" do
image_name = '32456133_1538581556252460_5832184448275185664'
original_url = 'https://www.facebook.com/dcc1968/posts/1538584976252118'
m = create_media url: original_url.to_s
data = m.as_json
assert_nil data.dig('original_post')
assert_match image_name, data[:picture]
url = 'https://www.facebook.com/danielafeitosa/posts/1862242233833668'
m = create_media url: url.to_s
data = m.as_json
assert_match /facebook.com\/dcc1968/, data.dig('original_post')
assert_match image_name, data[:picture]
end
test "should not get original post if it's already parsing the original post" do
m = create_media url: 'https://www.facebook.com/groups/1863694297275556/permalink/2193768444268138/'
data = m.as_json
original_post = data.dig('original_post')
assert_not_nil original_post
original = Media.new url: original_post
assert_nil original.as_json['original_post']
end
test "should have external id for post" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML("<meta property='og:url' content='https://www.facebook.com/ironmaiden/posts/10156071020577051'>"))
m = create_media url: 'https://www.facebook.com/ironmaiden/posts/10156071020577051'
data = m.as_json
assert_equal '10156071020577051', data['external_id']
Media.any_instance.unstub(:doc)
end
test "should parse Facebook category page" do
m = create_media url: 'https://www.facebook.com/pages/category/Society---Culture-Website/PoporDezamagit/photos/'
data = m.as_json
assert_equal 'Popor dezamagit on Facebook', data[:title]
end
test "should add not found error and return empty html" do
urls = ['https://www.facebook.com/danielafeitosa/posts/2074906892567200', 'https://www.facebook.com/caiosba/posts/8457689347638947', 'https://www.facebook.com/photo.php?fbid=158203948564609&set=pb.100031250132368.-2207520000..&type=3&theater']
urls.each do |url|
m = create_media url: url
data = m.as_json
assert_equal '', data[:html]
assert_equal LapisConstants::ErrorCodes::const_get('NOT_FOUND'), data[:error][:code]
assert_equal 'URL Not Found', data[:error][:message]
end
end
test "should add login required error and return empty html" do
m = create_media url: 'https://www.facebook.com/caiosba/posts/2914211445293757'
data = m.as_json
assert_equal '', data[:html]
assert_equal 'Login required to see this profile', data[:error][:message]
assert_equal LapisConstants::ErrorCodes::const_get('LOGIN_REQUIRED'), data[:error][:code]
end
test "should not raise error when canonical URL on meta tags has non-ascii" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML('<meta property="og:title" content="राजनीति no Facebook Watch" /><meta property="og:url" content="https://www.facebook.com/राजनीति-105391010971335/videos/%E0%A4%AF%E0%A5%87-%E0%A4%B5%E0%A4%BF%E0%A4%A1%E0%A5%80%E0%A4%93-%E0%A4%B6%E0%A4%BE%E0%A4%AF%E0%A4%A6-%E0%A4%B0%E0%A4%BE%E0%A4%9C%E0%A4%B8%E0%A5%8D%E0%A4%A5%E0%A4%BE%E0%A4%A8-%E0%A4%95%E0%A5%8D%E0%A4%B7%E0%A5%87%E0%A4%A4%E0%A5%8D%E0%A4%B0-%E0%A4%95%E0%A5%87-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%97%E0%A4%BE%E0%A4%81%E0%A4%B5-%E0%A4%95%E0%A4%BE-%E0%A4%B9%E0%A5%88-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%A8%E0%A5%87-%E0%A4%AD%E0%A5%87%E0%A4%9C%E0%A4%BE-%E0%A4%B9%E0%A5%88-%E0%A4%AF%E0%A4%A6%E0%A4%BF-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%AC%E0%A4%A8%E0%A5%8D%E0%A4%A6%E0%A5%87/258392245354246/" />'))
assert_nothing_raised do
m = create_media url: 'https://www.facebook.com/राजनीति-105391010971335/videos/ये-विडीओ-शायद-राजस्थान-क्षेत्र-के-किसी-गाँव-का-है-किसी-ने-भेजा-है-यदि-किसी-बन्दे/258392245354246/'
data = m.as_json
assert_equal 'राजनीति no Facebook Watch on Facebook', data['title']
assert_nil data['error']
end
Media.any_instance.unstub(:doc)
end
end
fix test
require File.join(File.expand_path(File.dirname(__FILE__)), '..', 'test_helper')
require 'cc_deville'
class FacebookItemTest < ActiveSupport::TestCase
test "should get canonical URL parsed from facebook html" do
media1 = create_media url: 'https://www.facebook.com/photo.php?fbid=10155446238011949&set=a.10151842779956949&type=3&theater'
media2 = create_media url: 'https://www.facebook.com/photo.php?fbid=10155446238011949&set=a.10151842779956949&type=3'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL parsed from facebook html when it is relative" do
media1 = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
media2 = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL parsed from facebook html when it is a page" do
media1 = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479?pnref=story.unseen-section'
media2 = create_media url: 'https://www.facebook.com/CyrineOfficialPage/posts/10154332542247479'
media1.as_json
media2.as_json
assert_equal media2.url, media1.url
end
test "should get canonical URL from facebook object 2" do
media = Media.new(url: 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406')
media.as_json({ force: 1 })
assert_equal 'https://www.facebook.com/54212446406/photos/a.10154534110871407/10154534111016407/?type=3', media.url
end
test "should get canonical URL from facebook object 3" do
expected = 'https://www.facebook.com/54212446406/photos/a.10154534110871407/10154534111016407/?type=3'
variations = %w(
https://www.facebook.com/54212446406/photos/a.10154534110871407.1073742048.54212446406/10154534111016407/?type=3
https://www.facebook.com/54212446406/photos/a.10154534110871407.1073742048.54212446406/10154534111016407?type=3
)
variations.each do |url|
media = Media.new(url: url)
media.as_json({ force: 1 })
assert_equal expected, media.url
end
end
test "should parse facebook url with a photo album" do
expected = {
url: 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/613639175480416/?type=3',
title: 'Classic',
username: 'Classic.mou',
author_name: 'Classic',
author_url: 'http://facebook.com/136985363145802',
author_picture: 'https://graph.facebook.com/136985363145802/picture',
picture: /613639175480416_2497518582358260577/,
description: /Classic added a new photo/
}.with_indifferent_access
variations = %w(
https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/?type=3&theater
https://www.facebook.com/Classic.mou/photos/pcb.613639338813733/613639175480416/
)
variations.each do |url|
media = Media.new(url: url)
data = media.as_json
expected.each do |key, value|
assert_match value, data[key]
end
end
end
test "should parse Facebook live post from mobile URL" do
m = create_media url: 'https://m.facebook.com/story.php?story_fbid=10154584426664820&id=355665009819%C2%ACif_t=live_video%C2%ACif_id=1476846578702256&ref=bookmarks'
data = m.as_json
assert_match /South China Morning Post/, data['title']
assert_match /SCMP #FacebookLive amid chaotic scenes in #HongKong Legco/, data['description']
assert_not_nil data['published_at']
assert_equal 'South China Morning Post', data['author_name']
assert_match 'http://facebook.com/355665009819', data['author_url']
assert_match /355665009819/, data['author_picture']
assert !data['picture'].blank?
end
test "should create Facebook post from mobile URL" do
m = create_media url: 'https://m.facebook.com/KIKOLOUREIROofficial/photos/a.10150618138397252/10152555300292252/?type=3&theater'
d = m.as_json
assert_match /Bolívia/, d['text']
assert_equal 'Kiko Loureiro', d['author_name']
assert_equal 1, d['media_count']
assert_equal '20/11/2014', Time.parse(d['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook pure text post url" do
m = create_media url: 'https://www.facebook.com/dina.samak/posts/10153679232246949?pnref=story.unseen-section'
d = m.as_json
assert_match /Dina Samak/, d['title']
assert_not_nil d['description']
assert_not_nil d['author_picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook live post" do
m = create_media url: 'https://www.facebook.com/cbcnews/videos/10154783484119604/'
data = m.as_json
assert_equal 'https://www.facebook.com/cbcnews/videos/10154783484119604/', m.url
assert_match /CBC News/, data['title']
assert_match /Live now: This is the National for Monday, Oct. 31, 2016./, data['description']
assert_not_nil data['published_at']
assert_equal 'cbcnews', data['username']
assert_match 'http://facebook.com/5823419603', data['author_url']
assert_match /5823419603/, data['author_picture']
assert_match /^https/, data['picture']
assert_match /10154783812779604/, data['picture']
end
test "should parse Facebook removed live post" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1538843716180215/'
data = m.as_json
assert_equal 'https://www.facebook.com/teste637621352/posts/1538843716180215', m.url
assert_match /Not Identified/, data['title']
assert_equal '', data['description']
assert_equal '', data['published_at']
assert_equal 'teste637621352', data['username']
assert_match 'http://facebook.com/749262715138323', data['author_url']
assert_match /749262715138323/, data['author_picture']
end
test "should parse Facebook livemap" do
variations = %w(
https://www.facebook.com/livemap/#@-12.991858482361014,-38.521747589110994,4z
https://www.facebook.com/live/map/#@37.777053833008,-122.41587829590001,4z
https://www.facebook.com/live/discover/map/#@37.777053833008,-122.41587829590001,4z
)
request = 'http://localhost'
request.expects(:base_url).returns('http://localhost')
variations.each do |url|
m = create_media url: url, request: request
data = m.as_json
assert_match /facebook\.com/, m.url
assert_match /Facebook/, data['title']
assert_not_nil data['published_at']
end
end
test "should parse Facebook event post" do
m = create_media url: 'https://www.facebook.com/events/364677040588691/permalink/376287682760960/?ref=1&action_history=null'
data = m.as_json
variations = %w(
https://www.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null
https://www.facebook.com/events/zawya/zawyas-tribute-to-mohamed-khan-%D9%85%D9%88%D8%B9%D8%AF-%D9%85%D8%B9-%D8%AE%D8%A7%D9%86/364677040588691/
https://web.facebook.com/events/364677040588691/permalink/376287682760960?ref=1&action_history=null&_rdc=1&_rdr
)
assert_includes variations, m.url
assert_not_nil data['published_at']
assert_match /#{data['user_uuid']}/, data['author_url']
assert_match /#{data['user_uuid']}/, data['author_picture']
assert_match /^https:/, data['picture']
assert_match /Zawya/, data['title']
assert_match 'Zawya', data['username']
end
test "should parse Facebook video url from a page" do
m = create_media url: 'https://www.facebook.com/144585402276277/videos/1127489833985824'
d = m.as_json
assert_match /Trent Aric - Meteorologist/, d['title']
assert_match /MATTHEW YOU ARE DRUNK...GO HOME!/, d['description']
assert_equal 'item', d['type']
assert_not_nil d['picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video url from a page with another url pattern" do
m = create_media url: 'https://www.facebook.com/democrats/videos/10154268929856943'
d = m.as_json
assert_match /Democratic Party/, d['title']
assert_match /On National Voter Registration Day/, d['description']
assert_equal 'item', d['type']
assert_not_nil d['picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video url from a profile" do
m = create_media url: 'https://www.facebook.com/edwinscott143/videos/vb.737361619/10154242961741620/?type=2&theater'
d = m.as_json
assert_match /Eddie/, d['title']
assert_equal 'item', d['type']
assert_match /^http/, d['picture']
assert_match /10154242963196620/, d['picture']
assert_not_nil d['author_picture']
assert_not_nil Time.parse(d['published_at'])
end
test "should parse Facebook video on page album" do
m = create_media url: 'https://www.facebook.com/scmp/videos/vb.355665009819/10154584426664820/?type=2&theater'
d = m.as_json
assert_match /South China Morning Post/, d['title']
assert_match /SCMP #FacebookLive/, d['description']
assert_equal 'scmp', d['username']
assert_match /355665009819/, d['author_picture']
assert_match /10154584445939820/, d['picture']
assert_match 'http://facebook.com/355665009819', d['author_url']
assert_not_nil Time.parse(d['published_at'])
assert_match /South China Morning Post/, d['author_name']
end
test "should parse Facebook gif photo url" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/posts/1095740107184121'
d = m.as_json
assert_match /New Quoted Pictures Everyday/, d['title']
assert_not_nil d['description']
assert_match /giphy.gif/, d['photos'].first
end
test "should parse album post with a permalink" do
m = create_media url: 'https://www.facebook.com/permalink.php?story_fbid=10154534111016407&id=54212446406'
d = m.as_json
assert_match /Mariano Rajoy Brey/, d['title']
assert_equal 'item', d['type']
assert_match /54212446406/, d['author_picture']
assert_match /14543767_10154534111016407_5167486558738906371/, d['picture']
assert_not_nil Time.parse(d['published_at'])
assert_equal '10154534111016407', d['object_id']
end
test "should parse facebook user post" do
m = create_media url: 'https://www.facebook.com/dina.hawary/posts/10158416884740321'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_match /Dina El Hawary/, d['title']
assert_match /ربنا يزيدهن فوق القوة قوة/, d['description']
assert_not_nil d['published_at']
assert_equal 'Dina El Hawary', d['author_name']
assert_equal 'dina.hawary', d['username']
assert_match 'http://facebook.com/813705320', d['author_url']
assert_match /813705320/, d['author_picture']
assert_not_nil d['picture']
assert_nil d['error']
assert_equal 'https://www.facebook.com/dina.hawary/posts/10158416884740321', m.url
end
test "should parse facebook url with colon mark" do
m = create_media url: 'https://www.facebook.com/Classic.mou/posts/666508790193454:0'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_equal '136985363145802_666508790193454', d['uuid']
assert_match /Classic/, d['title']
assert_match /إليزابيث تايلو/, d['description']
assert_not_nil d['published_at']
assert_equal 'Classic.mou', d['username']
assert_equal 'Classic', d['author_name']
assert_match 'http://facebook.com/136985363145802', d['author_url']
assert_match /136985363145802/, d['author_picture']
assert_match /16473884_666508790193454_8112186335057907723/, d['picture']
assert_equal 'https://www.facebook.com/Classic.mou/photos/a.136991166478555/666508790193454/?type=3', m.url
end
test "should parse Facebook post from media set" do
m = create_media url: 'https://www.facebook.com/media/set/?set=a.10154534110871407.1073742048.54212446406&type=3'
d = m.as_json
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/En el Museo Serralves de Oporto/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert d['media_count'] > 20
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/media/set?set=a.10154534110871407.1073742048.54212446406&type=3', m.url
end
test "should support facebook pattern with pg" do
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407'
d = m.as_json
assert_equal 'item', d['type']
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/Militante del Partido Popular/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/pages/category/Politician/Mariano-Rajoy-Brey-54212446406/photos/', m.url
end
test "should support facebook pattern with album" do
m = create_media url: 'https://www.facebook.com/album.php?fbid=10154534110871407&id=54212446406&aid=1073742048'
d = m.as_json
assert_equal '10154534110871407_10154534110871407', d['uuid']
assert_match(/En el Museo Serralves de Oporto/, d['text'])
assert_equal '10154534110871407', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert d['media_count'] > 20
assert_equal '10154534110871407', d['object_id']
assert_equal 'https://www.facebook.com/media/set?set=a.10154534110871407', m.url
end
test "should get facebook data from original_url when url fails" do
Media.any_instance.stubs(:url).returns('https://www.facebook.com/Mariano-Rajoy-Brey-54212446406/photos')
Media.any_instance.stubs(:original_url).returns('https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos/?tab=album&album_id=10154534110871407')
m = create_media url: 'https://www.facebook.com/pg/Mariano-Rajoy-Brey-54212446406/photos'
d = m.as_json
assert_equal '54212446406_10154534110871407', d['uuid']
assert_match(/Militante del Partido Popular/, d['text'])
assert_equal '54212446406', d['user_uuid']
assert_equal 'Mariano Rajoy Brey', d['author_name']
assert_equal '10154534110871407', d['object_id']
Media.any_instance.unstub(:url)
Media.any_instance.unstub(:original_url)
end
test "should parse as html when API token is expired and notify Airbrake" do
fb_token = CONFIG['facebook_auth_token']
Airbrake.stubs(:configured?).returns(true)
Airbrake.stubs(:notify).once
CONFIG['facebook_auth_token'] = 'EAACMBapoawsBAP8ugWtoTpZBpI68HdM68qgVdLNc8R0F8HMBvTU1mOcZA4R91BsHZAZAvSfTktgBrdjqhYJq2Qet2RMsNZAu12J14NqsP1oyIt74vXlFOBkR7IyjRLLVDysoUploWZC1N76FMPf5Dzvz9Sl0EymSkZD'
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert_match /Nostalgia/, data['title']
CONFIG['facebook_auth_token'] = fb_token
data = m.as_json(force: 1)
assert_match /Nostalgia/, data['title']
Airbrake.unstub(:configured?)
Airbrake.unstub(:notify)
end
test "should store data of post returned by oembed" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028416870556238'
oembed = m.as_json['raw']['oembed']
assert oembed.is_a? Hash
assert !oembed.empty?
assert_nil oembed['title']
assert_equal 'Teste', oembed['author_name']
assert_match 'https://www.facebook.com/teste637621352/', oembed['author_url']
assert_equal 'Facebook', oembed['provider_name']
assert_equal 'https://www.facebook.com', oembed['provider_url']
assert_equal 552, oembed['width']
assert oembed['height'].nil?
end
test "should store oembed data of a facebook post" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501.1073741829.456182634511888/942167619246718/?type=3&theater'
data = m.as_json
assert_equal 'https://www.facebook.com/nostalgia.y/photos/a.508939832569501/942167619246718/?type=3', m.url
assert data['raw']['oembed'].is_a? Hash
assert_equal "https://www.facebook.com", data['raw']['oembed']['provider_url']
assert_equal "Facebook", data['raw']['oembed']['provider_name']
end
test "should store oembed data of a facebook page" do
m = create_media url: 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
data = m.as_json
assert_nil data['raw']['oembed']
assert_equal 'Meedan', data['oembed']['author_name']
assert_equal 'Meedan', data['oembed']['title']
end
test "should parse Facebook post from page photo" do
m = create_media url: 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
d = m.as_json
assert_match /New Quoted Pictures Everyday/, d['title']
assert_match /New Quoted Pictures Everyday added a new photo./, d['description']
assert_equal 'quoted.pictures', d['username']
assert_equal 'New Quoted Pictures Everyday', d['author_name']
assert_not_nil d['author_url']
assert_not_nil d['picture']
assert_equal 1, d['media_count']
assert_equal '08/09/2016', Time.parse(d['published_at']).strftime("%d/%m/%Y")
assert_match /Pictures/, d['text']
end
test "should parse facebook url without identified pattern as item" do
m = create_media url: 'https://www.facebook.com/Bimbo.Memories/photos/pb.235404669918505.-2207520000.1481570271./1051597428299221/?type=3&theater'
d = m.as_json
assert_equal 'item', d['type']
assert_match /Bimbo Memories/, d['title']
assert_not_nil d['description']
assert_not_nil d['published_at']
assert_equal 'Bimbo Memories', d['author_name']
assert_equal 'Bimbo.Memories', d['username']
assert_match 'http://facebook.com/235404669918505', d['author_url']
assert_match /235404669918505/, d['author_picture']
assert_match /15400507_1051597428299221_6315842220063966332/, d['picture']
end
test "should parse Facebook photo post within an album url" do
m = create_media url: 'https://www.facebook.com/ESCAPE.Egypt/photos/ms.c.eJxNk8d1QzEMBDvyQw79N2ZyaeD7osMIwAZKLGTUViod1qU~;DCBNHcpl8gfMKeR8bz2gH6ABlHRuuHYM6AdywPkEsH~;gqAjxqLAKJtQGZFxw7CzIa6zdF8j1EZJjXRgTzAP43XBa4HfFa1REA2nXugScCi3wN7FZpF5BPtaVDEBqwPNR60O9Lsi0nbDrw3KyaPCVZfqAYiWmZO13YwvSbtygCWeKleh9KEVajW8FfZz32qcUrNgA5wfkA4Xfh004x46d9gdckQt2xR74biSOegwIcoB9OW~_oVIxKML0JWYC0XHvDkdZy0oY5bgjvBAPwdBpRuKE7kZDNGtnTLoCObBYqJJ4Ky5FF1kfh75Gnyl~;Qxqsv.bps.a.1204090389632094.1073742218.423930480981426/1204094906298309/?type=3&theater'
d = m.as_json
assert_equal '09/2016', Time.parse(d['published_at']).strftime('%m/%Y')
assert_equal 'item', d['type']
assert_match /Escape/, d['title']
assert_match /Escape(\.Egypt)? added a new photo./, d['description']
assert_match /423930480981426/, d['author_picture']
assert_equal 1, d['photos'].size
assert_match /^https:/, d['picture']
assert_equal '1204094906298309', d['object_id']
end
test "should parse photo in a photo album" do
m = create_media url: 'https://www.facebook.com/nostalgia.y/photos/pb.456182634511888.-2207520000.1484079948./928269767303170/?type=3&theater'
d = m.as_json
assert_equal 'item', d['type']
assert_equal 'facebook', d['provider']
assert_match /Nostalgia/, d['title']
assert_match /مين قالك تسكن فى حاراتنا/, d['description']
assert_not_nil d['published_at']
assert_equal 'nostalgia.y', d['username']
assert_equal 'Nostalgia', d['author_name']
assert_match 'http://facebook.com/456182634511888', d['author_url']
assert_match /456182634511888/, d['author_picture']
assert_match /15181134_928269767303170_7195169848911975270/, d['picture']
assert_nil d['error']
end
test "should create Facebook post from page photo URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/photos/a.754851877912740.1073741826.749262715138323/896869113711015/?type=3'
d = m.as_json
assert_equal '749262715138323_896869113711015', d['uuid']
assert_equal 'This post should be fetched.', d['text']
assert_equal '749262715138323', d['user_uuid']
assert_equal 'Teste', d['author_name']
assert_equal 'teste637621352', d['username']
assert_equal 1, d['media_count']
assert_equal '896869113711015', d['object_id']
assert_equal '03/2015', Time.parse(d['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from page photos URL" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
d = m.as_json
assert_equal '749262715138323_1028795030518422', d['uuid']
assert_equal 'This is just a test with many photos.', d['text']
assert_equal '749262715138323', d['user_uuid']
assert_equal 'Teste', d['author_name']
assert_equal 2, d['media_count']
assert_equal '1028795030518422', d['object_id']
assert_equal '11/2015', Time.parse(d['published_at']).strftime("%m/%Y")
end
test "should create Facebook post from user photos URL" do
m = create_media url: 'https://www.facebook.com/nanabhay/posts/10156130657385246?pnref=story'
d = m.as_json
assert_equal '735450245_10156130657385246', d['uuid']
assert_equal 'Such a great evening with friends last night. Sultan Sooud Al-Qassemi has an amazing collecting of modern Arab art. It was a visual tour of the history of the region over the last century.', d['text'].strip
assert_equal '735450245', d['user_uuid']
assert_equal 'Mohamed Nanabhay', d['author_name']
assert_equal 4, d['media_count']
assert_equal '10156130657385246', d['object_id']
assert_equal '27/10/2015', Time.parse(d['published_at']).strftime("%d/%m/%Y")
end
test "should parse Facebook post from user photo URL" do
m = create_media url: 'https://www.facebook.com/photo.php?fbid=10155150801660195&set=p.10155150801660195&type=1&theater'
d = m.as_json
assert_equal '10155150801660195_10155150801660195', d['uuid']
assert_equal '10155150801660195', d['user_uuid']
assert_equal 'David Marcus', d['author_name']
assert_equal 1, d['media_count']
assert_equal '10155150801660195', d['object_id']
assert_match /David Marcus/, d['title']
assert_match /10155150801660195/, d['author_picture']
assert_not_nil d['picture']
assert_match /always working on ways to make Messenger more useful/, d['text']
end
tests = YAML.load_file(File.join(Rails.root, 'test', 'data', 'fbposts.yml'))
tests.each do |url, text|
test "should get text from Facebook user post from URL '#{url}'" do
m = create_media url: url
assert_equal text, m.as_json['text'].gsub(/\s+/, ' ').strip
end
end
test "should create Facebook post with picture and photos" do
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1028795030518422'
d = m.as_json
assert_match /^https/, d['picture']
assert_kind_of Array, d['photos']
assert_equal 2, d['media_count']
assert_equal 1, d['photos'].size
m = create_media url: 'https://www.facebook.com/teste637621352/posts/1035783969819528'
d = m.as_json
assert_not_nil d['picture']
assert_match /^https/, d['author_picture']
assert_kind_of Array, d['photos']
assert_equal 0, d['media_count']
assert_equal 1, d['photos'].size
m = create_media url: 'https://www.facebook.com/teste637621352/posts/2194142813983632'
d = m.as_json
assert_match /^https/, d['author_picture']
assert_match /^https/, d['picture']
assert_kind_of Array, d['photos']
assert_equal 2, d['media_count']
assert_equal 1, d['photos'].size
end
test "should create Facebook post from Arabic user" do
m = create_media url: 'https://www.facebook.com/ahlam.alialshamsi/posts/108561999277346?pnref=story'
d = m.as_json
assert_equal '100003706393630_108561999277346', d['uuid']
assert_equal '100003706393630', d['user_uuid']
assert_equal 'Ahlam Ali Al Shāmsi', d['author_name']
assert_equal 0, d['media_count']
assert_equal '108561999277346', d['object_id']
assert_equal 'أنا مواد رافعة الآن الأموال اللازمة لمشروع مؤسسة خيرية، ودعم المحتاجين في غرب أفريقيا مساعدتي لبناء مكانا أفضل للأطفال في أفريقيا', d['text']
end
test "should have a transitive relation between normalized URLs" do
url = 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334.28784.128791873878954/1096134023811396/?type=3&theater'
m = create_media url: url
data = m.as_json
url = 'https://www.facebook.com/quoted.pictures/photos/a.128828073875334/1096134023811396/?type=3'
assert_equal url, data['url']
m = create_media url: url
data = m.as_json
assert_equal url, data['url']
end
test "should return item as oembed" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is not on cache" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
data = Media.as_oembed(nil, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when data is on cache and raw key is missing" do
url = 'https://www.facebook.com/pages/Meedan/105510962816034?fref=ts'
m = create_media url: url
json_data = m.as_json
json_data.delete('raw')
data = Media.as_oembed(json_data, "http://pender.org/medias.html?url=#{url}", 300, 150)
assert_equal 'Meedan', data['title']
assert_equal 'Meedan', data['author_name']
assert_match 'https://www.facebook.com/pages/Meedan/105510962816034', data['author_url']
assert_equal 'facebook', data['provider_name']
assert_equal 'http://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
assert_equal '<iframe src="http://pender.org/medias.html?url=https://www.facebook.com/pages/Meedan/105510962816034?fref=ts" width="300" height="150" scrolling="no" border="0" seamless>Not supported</iframe>', data['html']
assert_not_nil data['thumbnail_url']
end
test "should return item as oembed when the page has oembed url" do
url = 'https://www.facebook.com/teste637621352/posts/1028416870556238'
m = create_media url: url
data = Media.as_oembed(m.as_json, "http://pender.org/medias.html?url=#{url}", 300, 150, m)
assert_nil data['title']
assert_equal 'Teste', data['author_name']
assert_match 'https://www.facebook.com/teste637621352', data['author_url']
assert_equal 'Facebook', data['provider_name']
assert_equal 'https://www.facebook.com', data['provider_url']
assert_equal 300, data['width']
assert_equal 150, data['height']
json = Pender::Store.read(Media.get_id(url), :json)
assert_equal 552, json[:raw][:oembed][:width]
assert_nil json[:raw][:oembed][:height]
end
test "should not use Facebook embed if is a link to redirect" do
url = 'https://l.facebook.com/l.php?u=https://hindi.indiatvnews.com/paisa/business-1-07-cr-new-taxpayers-added-dropped-filers-down-at-25-22-lakh-in-fy18-630914&h=AT1WAU-mDHKigOgFNrUsxsS2doGO0_F5W9Yck7oYUx-IsYAHx8JqyHwO02-N0pX8UOlcplZO50px8mkTA1XNyKig8Z2CfX6t3Sh0bHtO9MYPtWqacCm6gOXs5lbC6VGMLjDALNXZ6vg&s=1'
m = create_media url: url
data = m.as_json
assert_equal 'Leaving Facebook', data['author_name']
assert_equal 'flx', data['username']
assert_equal '', data['html']
end
test "should get image from original post if is a shared content" do
image_name = '32456133_1538581556252460_5832184448275185664'
original_url = 'https://www.facebook.com/dcc1968/posts/1538584976252118'
m = create_media url: original_url.to_s
data = m.as_json
assert_nil data.dig('original_post')
assert_match image_name, data[:picture]
url = 'https://www.facebook.com/danielafeitosa/posts/1862242233833668'
m = create_media url: url.to_s
data = m.as_json
assert_match /facebook.com\/dcc1968/, data.dig('original_post')
assert_match image_name, data[:picture]
end
test "should not get original post if it's already parsing the original post" do
m = create_media url: 'https://www.facebook.com/groups/1863694297275556/permalink/2193768444268138/'
data = m.as_json
original_post = data.dig('original_post')
assert_not_nil original_post
original = Media.new url: original_post
assert_nil original.as_json['original_post']
end
test "should have external id for post" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML("<meta property='og:url' content='https://www.facebook.com/ironmaiden/posts/10156071020577051'>"))
m = create_media url: 'https://www.facebook.com/ironmaiden/posts/10156071020577051'
data = m.as_json
assert_equal '10156071020577051', data['external_id']
Media.any_instance.unstub(:doc)
end
test "should parse Facebook category page" do
m = create_media url: 'https://www.facebook.com/pages/category/Society---Culture-Website/PoporDezamagit/photos/'
data = m.as_json
assert_equal 'Popor dezamagit on Facebook', data[:title]
end
test "should add not found error and return empty html" do
urls = ['https://www.facebook.com/danielafeitosa/posts/2074906892567200', 'https://www.facebook.com/caiosba/posts/8457689347638947', 'https://www.facebook.com/photo.php?fbid=158203948564609&set=pb.100031250132368.-2207520000..&type=3&theater']
urls.each do |url|
m = create_media url: url
data = m.as_json
assert_equal '', data[:html]
assert_equal LapisConstants::ErrorCodes::const_get('NOT_FOUND'), data[:error][:code]
assert_equal 'URL Not Found', data[:error][:message]
end
end
test "should add login required error and return empty html" do
m = create_media url: 'https://www.facebook.com/caiosba/posts/2914211445293757'
data = m.as_json
assert_equal '', data[:html]
assert_equal 'Login required to see this profile', data[:error][:message]
assert_equal LapisConstants::ErrorCodes::const_get('LOGIN_REQUIRED'), data[:error][:code]
end
test "should not raise error when canonical URL on meta tags has non-ascii" do
Media.any_instance.stubs(:doc).returns(Nokogiri::HTML('<meta property="og:title" content="राजनीति no Facebook Watch" /><meta property="og:url" content="https://www.facebook.com/राजनीति-105391010971335/videos/%E0%A4%AF%E0%A5%87-%E0%A4%B5%E0%A4%BF%E0%A4%A1%E0%A5%80%E0%A4%93-%E0%A4%B6%E0%A4%BE%E0%A4%AF%E0%A4%A6-%E0%A4%B0%E0%A4%BE%E0%A4%9C%E0%A4%B8%E0%A5%8D%E0%A4%A5%E0%A4%BE%E0%A4%A8-%E0%A4%95%E0%A5%8D%E0%A4%B7%E0%A5%87%E0%A4%A4%E0%A5%8D%E0%A4%B0-%E0%A4%95%E0%A5%87-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%97%E0%A4%BE%E0%A4%81%E0%A4%B5-%E0%A4%95%E0%A4%BE-%E0%A4%B9%E0%A5%88-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%A8%E0%A5%87-%E0%A4%AD%E0%A5%87%E0%A4%9C%E0%A4%BE-%E0%A4%B9%E0%A5%88-%E0%A4%AF%E0%A4%A6%E0%A4%BF-%E0%A4%95%E0%A4%BF%E0%A4%B8%E0%A5%80-%E0%A4%AC%E0%A4%A8%E0%A5%8D%E0%A4%A6%E0%A5%87/258392245354246/" />'))
assert_nothing_raised do
m = create_media url: 'https://www.facebook.com/राजनीति-105391010971335/videos/ये-विडीओ-शायद-राजस्थान-क्षेत्र-के-किसी-गाँव-का-है-किसी-ने-भेजा-है-यदि-किसी-बन्दे/258392245354246/'
data = m.as_json
assert_equal 'राजनीति no Facebook Watch on Facebook', data['title']
assert_nil data['error']
end
Media.any_instance.unstub(:doc)
end
end
|
Pod::Spec.new do |spec|
spec.name = "R.swift"
spec.version = "0.12.0"
spec.license = "MIT"
spec.summary = "Get strong typed, autocompleted resources like images, fonts and segues in Swift projects"
spec.description = <<-DESC
R.swift is a tool to get strong typed, autocompleted resources like images, fonts and segues in Swift projects.
* Never type string identifiers again
* Supports images, fonts, storyboards, nibs, segues, reuse identifiers and more
* Compile time checks and errors instead of runtime crashes
DESC
spec.homepage = "https://github.com/mac-cain13/R.swift"
spec.documentation_url = "https://github.com/mac-cain13/R.swift/tree/master/Documentation"
spec.screenshots = [ "https://raw.githubusercontent.com/mac-cain13/R.swift/master/Documentation/Images/DemoUseImage.gif",
"https://raw.githubusercontent.com/mac-cain13/R.swift/master/Documentation/Images/DemoRenameImage.gif" ]
spec.author = { "Mathijs Kadijk" => "mkadijk@gmail.com" }
spec.social_media_url = "https://twitter.com/mac_cain13"
spec.requires_arc = true
spec.source = { :http => "https://github.com/mac-cain13/R.swift/releases/download/v#{spec.version}/rswift-#{spec.version}.zip" }
spec.ios.deployment_target = '8.0'
spec.tvos.deployment_target = '9.0'
spec.dependency "R.swift.Library", "~> #{spec.version}"
spec.preserve_paths = "rswift"
end
Keep version number in sync with master
Pod::Spec.new do |spec|
spec.name = "R.swift"
spec.version = "0.13.0"
spec.license = "MIT"
spec.summary = "Get strong typed, autocompleted resources like images, fonts and segues in Swift projects"
spec.description = <<-DESC
R.swift is a tool to get strong typed, autocompleted resources like images, fonts and segues in Swift projects.
* Never type string identifiers again
* Supports images, fonts, storyboards, nibs, segues, reuse identifiers and more
* Compile time checks and errors instead of runtime crashes
DESC
spec.homepage = "https://github.com/mac-cain13/R.swift"
spec.documentation_url = "https://github.com/mac-cain13/R.swift/tree/master/Documentation"
spec.screenshots = [ "https://raw.githubusercontent.com/mac-cain13/R.swift/master/Documentation/Images/DemoUseImage.gif",
"https://raw.githubusercontent.com/mac-cain13/R.swift/master/Documentation/Images/DemoRenameImage.gif" ]
spec.author = { "Mathijs Kadijk" => "mkadijk@gmail.com" }
spec.social_media_url = "https://twitter.com/mac_cain13"
spec.requires_arc = true
spec.source = { :http => "https://github.com/mac-cain13/R.swift/releases/download/v#{spec.version}/rswift-#{spec.version}.zip" }
spec.ios.deployment_target = '8.0'
spec.tvos.deployment_target = '9.0'
spec.dependency "R.swift.Library", "~> #{spec.version}"
spec.preserve_paths = "rswift"
end
|
class Hbase < Formula
desc "Hadoop database: a distributed, scalable, big data store"
homepage "https://hbase.apache.org"
url "https://www.apache.org/dyn/closer.lua?path=hbase/2.4.6/hbase-2.4.6-bin.tar.gz"
mirror "https://archive.apache.org/dist/hbase/2.4.6/hbase-2.4.6-bin.tar.gz"
sha256 "536e5a3e72da29a4978a91075d4afe7478f56b4893470dd70ec0dcfd2dc2b939"
license "Apache-2.0"
bottle do
sha256 arm64_big_sur: "cd254857676c86bba7937f4eb8fa6832b917a5ecd60a1d3bfc22b6c8d3ff99c8"
sha256 big_sur: "3b6bb361f07fda40372b7de6b6e794aab6a62a247d94141bfb3e7a808f337ae3"
sha256 catalina: "792f97ac1dbc3c9ef96dde2ff505cefd01f406789898f0f43204395c046640db"
sha256 mojave: "999d2da18e6b9e27272e69ffded83cf0cb6a6a279bb7b775e1be58aeb66566a2"
end
depends_on "ant" => :build
depends_on "lzo"
depends_on "openjdk@11"
resource "hadoop-lzo" do
url "https://github.com/cloudera/hadoop-lzo/archive/0.4.14.tar.gz"
sha256 "aa8ddbb8b3f9e1c4b8cc3523486acdb7841cd97c002a9f2959c5b320c7bb0e6c"
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b89da3afad84bbf69deed0611e5dddaaa5d39325/hbase/build.xml.patch"
sha256 "d1d65330a4367db3e17ee4f4045641b335ed42449d9e6e42cc687e2a2e3fa5bc"
end
end
def install
java_home = Formula["openjdk@11"].opt_prefix
rm_f Dir["bin/*.cmd", "conf/*.cmd"]
libexec.install %w[bin conf lib hbase-webapps]
# Some binaries have really generic names (like `test`) and most seem to be
# too special-purpose to be permanently available via PATH.
%w[hbase start-hbase.sh stop-hbase.sh].each do |script|
(bin/script).write_env_script "#{libexec}/bin/#{script}", JAVA_HOME: "${JAVA_HOME:-#{java_home}}"
end
resource("hadoop-lzo").stage do
# Fixed upstream: https://github.com/cloudera/hadoop-lzo/blob/HEAD/build.xml#L235
ENV["CLASSPATH"] = Dir["#{libexec}/lib/hadoop-common-*.jar"].first
ENV["CFLAGS"] = "-m64"
ENV["CXXFLAGS"] = "-m64"
ENV["CPPFLAGS"] = "-I#{Formula["openjdk@11"].include}"
system "ant", "compile-native", "tar"
(libexec/"lib").install Dir["build/hadoop-lzo-*/hadoop-lzo-*.jar"]
(libexec/"lib/native").install Dir["build/hadoop-lzo-*/lib/native/*"]
end
inreplace "#{libexec}/conf/hbase-env.sh" do |s|
# upstream bugs for ipv6 incompatibility:
# https://issues.apache.org/jira/browse/HADOOP-8568
# https://issues.apache.org/jira/browse/HADOOP-3619
s.gsub!(/^# export HBASE_OPTS$/,
"export HBASE_OPTS=\"-Djava.net.preferIPv4Stack=true -XX:+UseConcMarkSweepGC\"")
s.gsub!(/^# export JAVA_HOME=.*/,
"export JAVA_HOME=\"${JAVA_HOME:-#{java_home}}\"")
# Default `$HBASE_HOME/logs` is unsuitable as it would cause writes to the
# formula's prefix. Provide a better default but still allow override.
s.gsub!(/^# export HBASE_LOG_DIR=.*$/,
"export HBASE_LOG_DIR=\"${HBASE_LOG_DIR:-#{var}/log/hbase}\"")
end
# makes hbase usable out of the box
# upstream has been provided this patch
# https://issues.apache.org/jira/browse/HBASE-15426
inreplace "#{libexec}/conf/hbase-site.xml",
/<configuration>/,
<<~EOS
<configuration>
<property>
<name>hbase.rootdir</name>
<value>file://#{var}/hbase</value>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.property.dataDir</name>
<value>#{var}/zookeeper</value>
</property>
<property>
<name>hbase.zookeeper.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.regionserver.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.master.dns.interface</name>
<value>lo0</value>
</property>
EOS
end
def post_install
(var/"log/hbase").mkpath
(var/"run/hbase").mkpath
end
service do
run [opt_bin/"hbase", "--config", opt_libexec/"conf", "master", "start"]
keep_alive true
working_dir HOMEBREW_PREFIX
log_path var/"hbase/hbase.log"
error_log_path var/"hbase/hbase.err"
environment_variables HBASE_HOME: opt_libexec,
HBASE_IDENT_STRING: "root",
HBASE_LOG_DIR: var/"hbase",
HBASE_LOG_PREFIX: "hbase-root-master",
HBASE_LOGFILE: "hbase-root-master.log",
HBASE_MASTER_OPTS: " -XX:PermSize=128m -XX:MaxPermSize=128m",
HBASE_NICENESS: "0",
HBASE_OPTS: "-XX:+UseConcMarkSweepGC",
HBASE_PID_DIR: var/"run/hbase",
HBASE_REGIONSERVER_OPTS: " -XX:PermSize=128m -XX:MaxPermSize=128m",
HBASE_ROOT_LOGGER: "INFO,RFA",
HBASE_SECURITY_LOGGER: "INFO,RFAS"
end
test do
port = free_port
assert_match "HBase #{version}", shell_output("#{bin}/hbase version 2>&1")
cp_r (libexec/"conf"), testpath
inreplace (testpath/"conf/hbase-site.xml") do |s|
s.gsub!(/(hbase.rootdir.*)\n.*/, "\\1\n<value>file://#{testpath}/hbase</value>")
s.gsub!(/(hbase.zookeeper.property.dataDir.*)\n.*/, "\\1\n<value>#{testpath}/zookeeper</value>")
s.gsub!(/(hbase.zookeeper.property.clientPort.*)\n.*/, "\\1\n<value>#{port}</value>")
end
ENV["HBASE_LOG_DIR"] = testpath/"logs"
ENV["HBASE_CONF_DIR"] = testpath/"conf"
ENV["HBASE_PID_DIR"] = testpath/"pid"
system "#{bin}/start-hbase.sh"
sleep 10
begin
assert_match "Zookeeper", pipe_output("nc 127.0.0.1 #{port} 2>&1", "stats")
ensure
system "#{bin}/stop-hbase.sh"
end
end
end
hbase 2.4.10
Closes #96809.
Signed-off-by: Sean Molenaar <2b250e3fea88cfef248b497ad5fc17f7dc435154@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Hbase < Formula
desc "Hadoop database: a distributed, scalable, big data store"
homepage "https://hbase.apache.org"
url "https://www.apache.org/dyn/closer.lua?path=hbase/2.4.10/hbase-2.4.10-bin.tar.gz"
mirror "https://archive.apache.org/dist/hbase/2.4.10/hbase-2.4.10-bin.tar.gz"
sha256 "7ea25b264c9d934f6d4ea25362ea8ede38b1c527747f55e4aa1ec9a700082219"
# We bundle hadoop-lzo which is GPL-3.0-or-later
license all_of: ["Apache-2.0", "GPL-3.0-or-later"]
bottle do
sha256 arm64_big_sur: "cd254857676c86bba7937f4eb8fa6832b917a5ecd60a1d3bfc22b6c8d3ff99c8"
sha256 big_sur: "3b6bb361f07fda40372b7de6b6e794aab6a62a247d94141bfb3e7a808f337ae3"
sha256 catalina: "792f97ac1dbc3c9ef96dde2ff505cefd01f406789898f0f43204395c046640db"
sha256 mojave: "999d2da18e6b9e27272e69ffded83cf0cb6a6a279bb7b775e1be58aeb66566a2"
end
depends_on "ant" => :build
depends_on "lzo"
depends_on "openjdk@11"
resource "hadoop-lzo" do
url "https://github.com/cloudera/hadoop-lzo/archive/0.4.14.tar.gz"
sha256 "aa8ddbb8b3f9e1c4b8cc3523486acdb7841cd97c002a9f2959c5b320c7bb0e6c"
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b89da3afad84bbf69deed0611e5dddaaa5d39325/hbase/build.xml.patch"
sha256 "d1d65330a4367db3e17ee4f4045641b335ed42449d9e6e42cc687e2a2e3fa5bc"
end
# Fix -flat_namespace being used on Big Sur and later.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/03cf8088210822aa2c1ab544ed58ea04c897d9c4/libtool/configure-pre-0.4.2.418-big_sur.diff"
sha256 "83af02f2aa2b746bb7225872cab29a253264be49db0ecebb12f841562d9a2923"
directory "src/native"
end
end
def install
java_home = Language::Java.java_home("11")
rm_f Dir["bin/*.cmd", "conf/*.cmd"]
libexec.install %w[bin conf lib hbase-webapps]
# Some binaries have really generic names (like `test`) and most seem to be
# too special-purpose to be permanently available via PATH.
%w[hbase start-hbase.sh stop-hbase.sh].each do |script|
(bin/script).write_env_script libexec/"bin"/script, Language::Java.overridable_java_home_env("11")
end
resource("hadoop-lzo").stage do
# Fixed upstream: https://github.com/cloudera/hadoop-lzo/blob/HEAD/build.xml#L235
ENV["CLASSPATH"] = Dir["#{libexec}/lib/hadoop-common-*.jar"].first
ENV["CFLAGS"] = "-m64"
ENV["CXXFLAGS"] = "-m64"
ENV["CPPFLAGS"] = "-I#{Formula["openjdk@11"].include}"
system "ant", "compile-native", "tar"
(libexec/"lib").install Dir["build/hadoop-lzo-*/hadoop-lzo-*.jar"]
(libexec/"lib/native").install Dir["build/hadoop-lzo-*/lib/native/*"]
end
inreplace "#{libexec}/conf/hbase-env.sh" do |s|
# upstream bugs for ipv6 incompatibility:
# https://issues.apache.org/jira/browse/HADOOP-8568
# https://issues.apache.org/jira/browse/HADOOP-3619
s.gsub!(/^# export HBASE_OPTS$/,
"export HBASE_OPTS=\"-Djava.net.preferIPv4Stack=true -XX:+UseConcMarkSweepGC\"")
s.gsub!(/^# export JAVA_HOME=.*/,
"export JAVA_HOME=\"${JAVA_HOME:-#{java_home}}\"")
# Default `$HBASE_HOME/logs` is unsuitable as it would cause writes to the
# formula's prefix. Provide a better default but still allow override.
s.gsub!(/^# export HBASE_LOG_DIR=.*$/,
"export HBASE_LOG_DIR=\"${HBASE_LOG_DIR:-#{var}/log/hbase}\"")
end
# makes hbase usable out of the box
# upstream has been provided this patch
# https://issues.apache.org/jira/browse/HBASE-15426
inreplace "#{libexec}/conf/hbase-site.xml",
/<configuration>/,
<<~EOS
<configuration>
<property>
<name>hbase.rootdir</name>
<value>file://#{var}/hbase</value>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.property.dataDir</name>
<value>#{var}/zookeeper</value>
</property>
<property>
<name>hbase.zookeeper.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.regionserver.dns.interface</name>
<value>lo0</value>
</property>
<property>
<name>hbase.master.dns.interface</name>
<value>lo0</value>
</property>
EOS
end
def post_install
(var/"log/hbase").mkpath
(var/"run/hbase").mkpath
end
service do
run [opt_bin/"hbase", "--config", opt_libexec/"conf", "master", "start"]
keep_alive true
working_dir HOMEBREW_PREFIX
log_path var/"hbase/hbase.log"
error_log_path var/"hbase/hbase.err"
environment_variables HBASE_HOME: opt_libexec,
HBASE_IDENT_STRING: "root",
HBASE_LOG_DIR: var/"hbase",
HBASE_LOG_PREFIX: "hbase-root-master",
HBASE_LOGFILE: "hbase-root-master.log",
HBASE_MASTER_OPTS: " -XX:PermSize=128m -XX:MaxPermSize=128m",
HBASE_NICENESS: "0",
HBASE_OPTS: "-XX:+UseConcMarkSweepGC",
HBASE_PID_DIR: var/"run/hbase",
HBASE_REGIONSERVER_OPTS: " -XX:PermSize=128m -XX:MaxPermSize=128m",
HBASE_ROOT_LOGGER: "INFO,RFA",
HBASE_SECURITY_LOGGER: "INFO,RFAS"
end
test do
port = free_port
assert_match "HBase #{version}", shell_output("#{bin}/hbase version 2>&1")
cp_r (libexec/"conf"), testpath
inreplace (testpath/"conf/hbase-site.xml") do |s|
s.gsub!(/(hbase.rootdir.*)\n.*/, "\\1\n<value>file://#{testpath}/hbase</value>")
s.gsub!(/(hbase.zookeeper.property.dataDir.*)\n.*/, "\\1\n<value>#{testpath}/zookeeper</value>")
s.gsub!(/(hbase.zookeeper.property.clientPort.*)\n.*/, "\\1\n<value>#{port}</value>")
end
ENV["HBASE_LOG_DIR"] = testpath/"logs"
ENV["HBASE_CONF_DIR"] = testpath/"conf"
ENV["HBASE_PID_DIR"] = testpath/"pid"
system "#{bin}/start-hbase.sh"
sleep 10
begin
assert_match "Zookeeper", pipe_output("nc 127.0.0.1 #{port} 2>&1", "stats")
ensure
system "#{bin}/stop-hbase.sh"
end
end
end
|
require_relative '../test_helper'
class ProjectMediaTest < ActiveSupport::TestCase
def setup
require 'sidekiq/testing'
Sidekiq::Testing.fake!
super
create_team_bot login: 'keep', name: 'Keep'
create_verification_status_stuff
end
test "should create project media" do
assert_difference 'ProjectMedia.count' do
create_project_media
end
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'admin'
m = create_valid_media
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
assert_difference 'ProjectMedia.count' do
with_current_user_and_team(u, t) do
pm = create_project_media team: t, media: m
assert_equal u, pm.user
end
end
# should be unique
assert_no_difference 'ProjectMedia.count' do
assert_raises RuntimeError do
create_project_media team: t, media: m
end
end
# editor should assign any media
m2 = create_valid_media
Rails.cache.clear
tu.update_column(:role, 'editor')
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media team: t, media: m2
end
m3 = create_valid_media user_id: u.id
assert_difference 'ProjectMedia.count' do
pm = create_project_media team: t, media: m3
pm.save!
end
User.unstub(:current)
Team.unstub(:current)
end
test "should get status label" do
pm = create_project_media
assert_equal 'Unstarted', pm.last_verification_status_label
end
test "should respect state transition roles" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', role: 'editor', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', role: 'editor', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media team: t
s = pm.last_status_obj
s.status = 'done'
s.save!
u = create_user
create_team_user team: t, user: u, role: 'collaborator'
assert_equal 'done', pm.reload.status
with_current_user_and_team(u ,t) do
a = Annotation.where(annotation_type: 'verification_status', annotated_type: 'ProjectMedia', annotated_id: pm.id).last.load
f = a.get_field('verification_status_status')
f.value = 'stop'
assert_raises ActiveRecord::RecordInvalid do
f.save!
end
end
end
test "should have a media not not necessarily a project" do
assert_nothing_raised do
create_project_media project: nil
end
assert_raise ActiveRecord::RecordInvalid do
create_project_media media: nil
end
end
test "should create media if url or quote set" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count', 2 do
create_project_media media: nil, quote: 'Claim report'
create_project_media media: nil, url: url
end
end
test "should find media by normalized url" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media url: url
url2 = 'http://test2.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm = create_project_media url: url2
assert_equal pm.media, m
end
test "should create with exisitng media if url exists" do
m = create_valid_media
pm = create_project_media media: nil, url: m.url
assert_equal m, pm.media
end
test "should collaborator add a new media" do
t = create_team
u = create_user
tu = create_team_user team: t, user: u, role: 'collaborator'
with_current_user_and_team(u, t) do
assert_difference 'ProjectMedia.count' do
create_project_media team: t, quote: 'Claim report'
end
end
end
test "should update and destroy project media" do
u = create_user
t = create_team
m = create_valid_media user_id: u.id
create_team_user team: t, user: u
pm = create_project_media team: t, media: m, user: u
with_current_user_and_team(u, t) do
pm.save!
end
u2 = create_user
tu = create_team_user team: t, user: u2, role: 'editor'
with_current_user_and_team(u2, t) do
pm.save!
end
end
test "queries for relationship source" do
u = create_user
t = create_team
pm = create_project_media team: t
assert_equal pm.relationship_source, pm
end
test "checks truthfulness of is_claim?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Claim"
pm.media.save!
assert pm.is_claim?
end
test "checks truthfulness of is_link?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Link"
pm.media.save!
assert pm.is_link?
end
test "checks truthfulness of is_uploaded_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert pm.is_uploaded_image?
end
test "checks truthfulness of is_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert pm.is_image?
end
test "checks truthfulness of is_text?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Link"
pm.media.save!
assert pm.is_text?
end
test "checks truthfulness of is_blank?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Blank"
pm.media.save!
assert pm.is_blank?
end
test "checks falsity of is_text?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert !pm.is_text?
end
test "checks falsity of is_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media_type = "Link"
assert !pm.is_image?
end
test "non members should not read project media in private team" do
u = create_user
t = create_team
pm = create_project_media team: t
pu = create_user
pt = create_team private: true
create_team_user team: pt, user: pu
pu2 = create_user
create_team_user team: pt, user: pu2, status: 'requested'
ppm = create_project_media team: pt
ProjectMedia.find_if_can(pm.id)
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(u, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
with_current_user_and_team(pu, pt) do
ProjectMedia.find_if_can(ppm.id)
end
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(pu2, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
end
test "should notify Slack based on slack events" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
p2 = create_project team: t
t.set_slack_notifications_enabled = 1
t.set_slack_webhook = 'https://hooks.slack.com/services/123'
slack_notifications = []
slack_notifications << {
"label": random_string,
"event_type": "item_added",
"values": ["#{p.id}"],
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "any_activity",
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "status_changed",
"values": ["in_progress"],
"slack_channel": "##{random_string}"
}
t.slack_notifications = slack_notifications.to_json
t.save!
with_current_user_and_team(u, t) do
m = create_valid_media
pm = create_project_media team: t, media: m
assert pm.sent_to_slack
m = create_claim_media
pm = create_project_media team: t, media: m
assert pm.sent_to_slack
pm = create_project_media project: p
assert pm.sent_to_slack
# status changes
s = pm.last_status_obj
s.status = 'in_progress'
s.save!
assert s.sent_to_slack
# move item
pm = create_project_media project: p2
pm.project_id = p.id
pm.save!
assert pm.sent_to_slack
end
end
test "should not duplicate slack notification for custom slack list settings" do
Rails.stubs(:env).returns(:production)
t = create_team slug: 'test'
p = create_project team: t
t.set_slack_notifications_enabled = 1
t.set_slack_webhook = 'https://hooks.slack.com/services/123'
slack_notifications = []
slack_notifications << {
"label": random_string,
"event_type": "item_added",
"values": ["#{p.id}"],
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "any_activity",
"slack_channel": "##{random_string}"
}
t.slack_notifications = slack_notifications.to_json
t.save!
u = create_user
p = create_project team: t
Sidekiq::Testing.fake! do
with_current_user_and_team(u, t) do
create_team_user team: t, user: u, role: 'admin'
SlackNotificationWorker.drain
assert_equal 0, SlackNotificationWorker.jobs.size
create_project_media team: t
assert_equal 1, SlackNotificationWorker.jobs.size
SlackNotificationWorker.drain
assert_equal 0, SlackNotificationWorker.jobs.size
create_project_media project: p
assert_equal 1, SlackNotificationWorker.jobs.size
Rails.unstub(:env)
end
end
end
test "should notify Pusher when project media is created" do
pm = create_project_media
assert pm.sent_to_pusher
t = create_team
p = create_project team: t
m = create_claim_media project_id: p.id
pm = create_project_media project: p, media: m
assert pm.sent_to_pusher
end
test "should notify Pusher when project media is destroyed" do
pm = create_project_media
pm.sent_to_pusher = false
pm.destroy!
assert pm.sent_to_pusher
end
test "should notify Pusher in background" do
Rails.stubs(:env).returns(:production)
t = create_team
p = create_project team: t
CheckPusher::Worker.drain
assert_equal 0, CheckPusher::Worker.jobs.size
create_project_media project: p
assert_equal 2, CheckPusher::Worker.jobs.size
CheckPusher::Worker.drain
assert_equal 0, CheckPusher::Worker.jobs.size
Rails.unstub(:env)
end
test "should update project media embed data" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
p1 = create_project
p2 = create_project
pm1 = create_project_media project: p1, media: m
pm2 = create_project_media project: p2, media: m
# fetch data (without overridden)
data = pm1.media.metadata
assert_equal 'test media', data['title']
assert_equal 'add desc', data['description']
# Update media title and description for pm1
info = { title: 'Title A', content: 'Desc A' }
pm1.analysis = info
info = { title: 'Title AA', content: 'Desc AA' }
pm1.analysis = info
# Update media title and description for pm2
info = { title: 'Title B', content: 'Desc B' }
pm2.analysis = info
info = { title: 'Title BB', content: 'Desc BB' }
pm2.analysis = info
# fetch data for pm1
data = pm1.analysis
assert_equal 'Title AA', data['title']
assert_equal 'Desc AA', data['content']
# fetch data for pm2
data = pm2.analysis
assert_equal 'Title BB', data['title']
assert_equal 'Desc BB', data['content']
end
test "should have annotations" do
pm = create_project_media
c1 = create_comment annotated: pm
c2 = create_comment annotated: pm
c3 = create_comment annotated: nil
assert_equal [c1.id, c2.id].sort, pm.reload.annotations('comment').map(&:id).sort
end
test "should get permissions" do
u = create_user
t = create_team current_user: u
tu = create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
pm = create_project_media project: p, current_user: u
perm_keys = [
"read ProjectMedia", "update ProjectMedia", "destroy ProjectMedia", "create Comment",
"create Tag", "create Task", "create Dynamic", "not_spam ProjectMedia", "restore ProjectMedia", "confirm ProjectMedia",
"embed ProjectMedia", "lock Annotation","update Status", "administer Content", "create Relationship",
"create Source", "update Source", "create ClaimDescription"
].sort
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
# load permissions as owner
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as collaborator
tu.update_column(:role, 'collaborator')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as authenticated
tu.update_column(:team_id, nil)
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
User.unstub(:current)
Team.unstub(:current)
end
test "should create embed for uploaded image" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
pm = ProjectMedia.new
pm.team_id = create_team.id
pm.file = File.new(File.join(Rails.root, 'test', 'data', 'rails.png'))
pm.disable_es_callbacks = true
pm.media_type = 'UploadedImage'
pm.save!
assert_equal media_filename('rails.png', false), pm.title
end
test "should set automatic title for images videos and audios" do
m = create_uploaded_image file: 'rails.png'
v = create_uploaded_video file: 'rails.mp4'
a = create_uploaded_audio file: 'rails.mp3'
bot = create_team_bot name: 'Smooch', login: 'smooch', set_approved: true
u = create_user
team = create_team slug: 'workspace-slug'
create_team_user team: team, user: bot, role: 'admin'
create_team_user team: team, user: u, role: 'admin'
# test with smooch user
with_current_user_and_team(bot, team) do
pm = create_project_media team: team, media: m
count = Media.where(type: 'UploadedImage').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal "image-#{team.slug}-#{count}", pm.title
pm2 = create_project_media team: team, media: v
count = Media.where(type: 'UploadedVideo').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal "video-#{team.slug}-#{count}", pm2.title
pm3 = create_project_media team: team, media: a
count = Media.where(type: 'UploadedAudio').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal pm3.title, "audio-#{team.slug}-#{count}"
pm.destroy; pm2.destroy; pm3.destroy
end
# test with non smooch user
with_current_user_and_team(u, team) do
pm = create_project_media team: team, media: m
assert_equal pm.title, media_filename('rails.png', false)
pm2 = create_project_media team: team, media: v
assert_equal pm2.title, media_filename('rails.mp4', false)
pm3 = create_project_media team: team, media: a
assert_equal pm3.title, media_filename('rails.mp3', false)
end
end
test "should protect attributes from mass assignment" do
raw_params = { project: create_project, user: create_user }
params = ActionController::Parameters.new(raw_params)
assert_raise ActiveModel::ForbiddenAttributesError do
ProjectMedia.create(params)
end
end
test "should create auto tasks" do
t = create_team
create_team_task team_id: t.id
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm1 = create_project_media team: t
end
end
end
test "should collaborator create auto tasks" do
t = create_team
create_team_task team_id: t.id
u = create_user
tu = create_team_user team: t, user: u, role: 'collaborator'
Sidekiq::Testing.inline! do
with_current_user_and_team(u, t) do
assert_difference 'Task.length' do
create_project_media team: t
end
end
end
end
test "should have versions" do
t = create_team
m = create_valid_media team: t
u = create_user
create_team_user user: u, team: t, role: 'admin'
pm = nil
User.current = u
assert_difference 'PaperTrail::Version.count', 2 do
pm = create_project_media team: t, media: m, user: u, skip_autocreate_source: false
end
assert_equal 2, pm.versions.count
pm.destroy!
v = Version.from_partition(t.id).where(item_type: 'ProjectMedia', item_id: pm.id, event: 'destroy').last
assert_not_nil v
User.current = nil
end
test "should get log" do
m = create_valid_media
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
create_team_user user: u, team: t, role: 'admin'
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
c = create_comment annotated: pm
tg = create_tag annotated: pm
f = create_flag annotated: pm
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'In Progress'; s.save!
info = { title: 'Foo' }; pm.analysis = info; pm.save!
info = { title: 'Bar' }; pm.analysis = info; pm.save!
assert_equal [
"create_dynamic", "create_dynamicannotationfield", "create_projectmedia",
"create_projectmedia", "create_tag", "update_dynamicannotationfield"
].sort, pm.get_versions_log.map(&:event_type).sort
assert_equal 5, pm.get_versions_log_count
c.destroy
assert_equal 5, pm.get_versions_log_count
tg.destroy
assert_equal 6, pm.get_versions_log_count
f.destroy
assert_equal 6, pm.get_versions_log_count
end
end
test "should get previous project and previous project search object" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
assert_nil pm.project_was
pm.previous_project_id = p1.id
pm.save!
assert_equal p1, pm.project_was
assert_kind_of CheckSearch, pm.check_search_project_was
end
test "should refresh Pender data" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = random_url
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"1"}}')
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"2"}}')
m = create_media url: url
pm = create_project_media media: m
t1 = pm.updated_at.to_i
em1 = pm.media.metadata_annotation
assert_not_nil em1
em1_data = JSON.parse(em1.get_field_value('metadata_value'))
assert_equal '1', em1_data['foo']
assert_equal 1, em1_data['refreshes_count']
sleep 2
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
t2 = pm.reload.updated_at.to_i
assert t2 > t1
em2 = pm.media.metadata_annotation
assert_not_nil em2
em2_data = JSON.parse(em2.get_field_value('metadata_value'))
assert_equal '2', em2_data['foo']
assert_equal 2, em2_data['refreshes_count']
assert_equal em1, em2
end
test "should create or reset archive response when refresh media" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
t = create_team
t.set_limits_keep = true
t.save!
l = create_link team: t
tb = BotUser.where(name: 'Keep').last
tb.set_settings = [{ name: 'archive_pender_archive_enabled', type: 'boolean' }]
tb.set_approved = true
tb.save!
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.refresh_media = true
pm.skip_check_ability = true
pm.save!
end
end
a = pm.get_annotations('archiver').last.load
f = a.get_field('pender_archive_response')
f.value = '{"foo":"bar"}'
f.save!
v = a.reload.get_field('pender_archive_response').reload.value
assert_not_equal "{}", v
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.refresh_media = true
pm.skip_check_ability = true
pm.save!
end
end
v = a.reload.get_field('pender_archive_response').reload.value
assert_equal "{}", v
end
test "should get user id for migration" do
pm = ProjectMedia.new
assert_nil pm.send(:user_id_callback, 'test@test.com')
u = create_user(email: 'test@test.com')
assert_equal u.id, pm.send(:user_id_callback, 'test@test.com')
end
test "should get project id for migration" do
p = create_project
mapping = Hash.new
pm = ProjectMedia.new
assert_nil pm.send(:project_id_callback, 1, mapping)
mapping[1] = p.id
assert_equal p.id, pm.send(:project_id_callback, 1, mapping)
end
test "should set annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'text').last || create_field_type(field_type: 'text', label: 'Text')
lt = create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'translation', label: 'Translation'
create_field_instance annotation_type_object: at, name: 'translation_text', label: 'Translation Text', field_type_object: ft, optional: false
create_field_instance annotation_type_object: at, name: 'translation_note', label: 'Translation Note', field_type_object: ft, optional: true
create_field_instance annotation_type_object: at, name: 'translation_language', label: 'Translation Language', field_type_object: lt, optional: false
assert_equal 0, Annotation.where(annotation_type: 'translation').count
create_project_media set_annotation: { annotation_type: 'translation', set_fields: { 'translation_text' => 'Foo', 'translation_note' => 'Bar', 'translation_language' => 'pt' }.to_json }.to_json
assert_equal 1, Annotation.where(annotation_type: 'translation').count
end
test "should have reference to search team object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_team
end
test "should get dynamic annotation by type" do
create_annotation_type annotation_type: 'foo'
create_annotation_type annotation_type: 'bar'
pm = create_project_media
d1 = create_dynamic_annotation annotation_type: 'foo', annotated: pm
d2 = create_dynamic_annotation annotation_type: 'bar', annotated: pm
assert_equal d1, pm.get_dynamic_annotation('foo')
assert_equal d2, pm.get_dynamic_annotation('bar')
end
test "should get report type" do
c = create_claim_media
l = create_link
m = create_project_media media: c
assert_equal 'claim', m.report_type
m = create_project_media media: l
assert_equal 'link', m.report_type
end
test "should delete project media" do
t = create_team
u = create_user
u2 = create_user
tu = create_team_user team: t, user: u, role: 'admin'
tu = create_team_user team: t, user: u2
pm = create_project_media team: t, quote: 'Claim', user: u2
at = create_annotation_type annotation_type: 'test'
ft = create_field_type
fi = create_field_instance name: 'test', field_type_object: ft, annotation_type_object: at
a = create_dynamic_annotation annotator: u2, annotated: pm, annotation_type: 'test', set_fields: { test: 'Test' }.to_json
RequestStore.store[:disable_es_callbacks] = true
with_current_user_and_team(u, t) do
pm.disable_es_callbacks = true
pm.destroy
end
RequestStore.store[:disable_es_callbacks] = false
end
test "should have Pender embeddable URL" do
RequestStore[:request] = nil
t = create_team
pm = create_project_media team: t
stub_configs({ 'pender_url' => 'https://pender.fake' }) do
assert_equal CheckConfig.get('pender_url') + '/api/medias.html?url=' + pm.full_url.to_s, pm.embed_url(false)
end
stub_configs({ 'pender_url' => 'https://pender.fake' }) do
assert_match /#{CheckConfig.get('short_url_host')}/, pm.embed_url
end
end
test "should have oEmbed endpoint" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
pm = create_project_media media: m
assert_equal 'test media', pm.as_oembed[:title]
end
test "should have oEmbed URL" do
RequestStore[:request] = nil
t = create_team private: false
p = create_project team: t
pm = create_project_media project: p
stub_configs({ 'checkdesk_base_url' => 'https://checkmedia.org' }) do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
t = create_team private: true
p = create_project team: t
pm = create_project_media project: p
stub_configs({ 'checkdesk_base_url' => 'https://checkmedia.org' }) do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
end
test "should get author name for oEmbed" do
u = create_user name: 'Foo Bar'
pm = create_project_media user: u
assert_equal 'Foo Bar', pm.author_name
pm.user = nil
assert_equal '', pm.author_name
end
test "should get author URL for oEmbed" do
url = 'http://twitter.com/test'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"profile"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
u = create_omniauth_user url: url, provider: 'twitter'
pm = create_project_media user: u
assert_equal url, pm.author_url
pm.user = create_user
assert_equal '', pm.author_url
pm.user = nil
assert_equal '', pm.author_url
end
test "should get author picture for oEmbed" do
u = create_user
pm = create_project_media user: u
assert_match /^http/, pm.author_picture
end
test "should get author username for oEmbed" do
u = create_user login: 'test'
pm = create_project_media user: u
assert_equal 'test', pm.author_username
pm.user = nil
assert_equal '', pm.author_username
end
test "should get author role for oEmbed" do
t = create_team
u = create_user
create_team_user user: u, team: t, role: 'collaborator'
pm = create_project_media team: t, user: u
assert_equal 'collaborator', pm.author_role
pm.user = create_user
assert_equal 'none', pm.author_role
pm.user = nil
assert_equal 'none', pm.author_role
end
test "should get source URL for external link for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal url, pm.source_url
c = create_claim_media
pm = create_project_media media: c
assert_match CheckConfig.get('checkdesk_client'), pm.source_url
end
test "should get completed tasks for oEmbed" do
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response'
pm = create_project_media
assert_equal [], pm.completed_tasks
assert_equal 0, pm.completed_tasks_count
t1 = create_task annotated: pm
t1.response = { annotation_type: 'task_response', set_fields: { response: 'Test' }.to_json }.to_json
t1.save!
t2 = create_task annotated: pm
assert_equal [t1], pm.completed_tasks
assert_equal [t2], pm.open_tasks
assert_equal 1, pm.completed_tasks_count
end
test "should get comments for oEmbed" do
pm = create_project_media
assert_equal [], pm.comments
assert_equal 0, pm.comments_count
c = create_comment annotated: pm
assert_equal [c], pm.comments
assert_equal 1, pm.comments_count
end
test "should get provider for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Twitter', pm.provider
c = create_claim_media
pm = create_project_media media: c
assert_equal 'Check', pm.provider
end
test "should get published time for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal '25/01/1989', pm.published_at.strftime('%d/%m/%Y')
c = create_claim_media
pm = create_project_media media: c
assert_nil pm.published_at
end
test "should get source author for oEmbed" do
u = create_user name: 'Foo'
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","author_name":"Bar"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l, user: u
assert_equal 'Bar', pm.source_author[:author_name]
c = create_claim_media
pm = create_project_media media: c, user: u
assert_equal 'Foo', pm.source_author[:author_name]
end
test "should render oEmbed HTML" do
Sidekiq::Testing.inline! do
pm = create_project_media
PenderClient::Request.stubs(:get_medias)
publish_report(pm, {}, nil, {
use_visual_card: false,
use_text_message: true,
use_disclaimer: false,
disclaimer: '',
title: 'Title',
text: '*This* _is_ a ~test~!',
published_article_url: 'http://foo.bar'
})
PenderClient::Request.unstub(:get_medias)
expected = File.read(File.join(Rails.root, 'test', 'data', "oembed-#{pm.default_project_media_status_type}.html")).gsub(/^\s+/m, '')
actual = ProjectMedia.find(pm.id).html.gsub(/.*<body/m, '<body').gsub(/^\s+/m, '').gsub(/https?:\/\/[^:]*:3000/, 'http://check')
assert_equal expected, actual
end
end
test "should have metadata for oEmbed" do
pm = create_project_media
assert_kind_of String, pm.oembed_metadata
end
test "should clear caches when media is updated" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
create_dynamic_annotation annotation_type: 'embed_code', annotated: pm
u = create_user
ProjectMedia.any_instance.unstub(:clear_caches)
CcDeville.expects(:clear_cache_for_url).returns(nil).times(52)
PenderClient::Request.expects(:get_medias).returns(nil).times(16)
Sidekiq::Testing.inline! do
create_comment annotated: pm, user: u
create_task annotated: pm, user: u
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
end
test "should respond to auto-tasks on creation" do
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
t = create_team
p = create_project team: t
create_team_task team_id: t.id, label: 'When?'
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm = create_project_media project: p, set_tasks_responses: { 'when' => 'Yesterday' }
task = pm.annotations('task').last
assert_equal 'Yesterday', task.first_response
end
end
end
test "should auto-response for Krzana report" do
at = create_annotation_type annotation_type: 'task_response_geolocation', label: 'Task Response Geolocation'
geotype = create_field_type field_type: 'geojson', label: 'GeoJSON'
create_field_instance annotation_type_object: at, name: 'response_geolocation', field_type_object: geotype
at = create_annotation_type annotation_type: 'task_response_datetime', label: 'Task Response Date Time'
datetime = create_field_type field_type: 'datetime', label: 'Date Time'
create_field_instance annotation_type_object: at, name: 'response_datetime', field_type_object: datetime
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi2 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Note', field_type_object: ft1
t = create_team
p = create_project team: t
p2 = create_project team: t
p3 = create_project team: t
tt1 = create_team_task team_id: t.id, label: 'who?', task_type: 'free_text', mapping: { "type" => "free_text", "match" => "$.mentions[?(@['@type'] == 'Person')].name", "prefix" => "Suggested by Krzana: "}
tt2 = create_team_task team_id: t.id, label: 'where?', task_type: 'geolocation', mapping: { "type" => "geolocation", "match" => "$.mentions[?(@['@type'] == 'Place')]", "prefix" => ""}
tt3 = create_team_task team_id: t.id, label: 'when?', type: 'datetime', mapping: { "type" => "datetime", "match" => "dateCreated", "prefix" => ""}
Sidekiq::Testing.inline! do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
# test empty json+ld
url = 'http://test1.com'
raw = {"json+ld": {}}
response = {'type':'media','data': {'url': url, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media project: p, url: url
t = Task.where(annotation_type: 'task', annotated_id: pm.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with non exist value
url1 = 'http://test11.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person" } ] } }
response = {'type':'media','data': {'url': url1, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url1 } }).to_return(body: response)
pm1 = create_project_media project: p, url: url1
t = Task.where(annotation_type: 'task', annotated_id: pm1.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with empty value
url12 = 'http://test12.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "" } ] } }
response = {'type':'media','data': {'url': url12, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url12 } }).to_return(body: response)
pm12 = create_project_media project: p, url: url12
t = Task.where(annotation_type: 'task', annotated_id: pm12.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with single selection
url2 = 'http://test2.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" } ] } }
response = {'type':'media','data': {'url': url2, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm2 = create_project_media project: p, url: url2
t = Task.where(annotation_type: 'task', annotated_id: pm2.id).select{ |t| t.team_task_id == tt1.id }.last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test multiple selection (should get first one)
url3 = 'http://test3.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" }, { "@type": "Person", "name": "last_name" } ] } }
response = {'type':'media','data': {'url': url3, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url3 } }).to_return(body: response)
pm3 = create_project_media project: p, url: url3
t = Task.where(annotation_type: 'task', annotated_id: pm3.id).select{ |t| t.team_task_id == tt1.id }.last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test geolocation mapping
url4 = 'http://test4.com'
raw = { "json+ld": {
"mentions": [ { "name": "Delimara Powerplant", "@type": "Place", "geo": { "latitude": 35.83020073454, "longitude": 14.55602645874 } } ]
} }
response = {'type':'media','data': {'url': url4, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url4 } }).to_return(body: response)
pm4 = create_project_media project: p2, url: url4
t = Task.where(annotation_type: 'task', annotated_id: pm4.id).select{ |t| t.team_task_id == tt2.id }.last
# assert_not_nil t.first_response
# test datetime mapping
url5 = 'http://test5.com'
raw = { "json+ld": { "dateCreated": "2017-08-30T14:22:28+00:00" } }
response = {'type':'media','data': {'url': url5, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url5 } }).to_return(body: response)
pm5 = create_project_media project: p3, url: url5
t = Task.where(annotation_type: 'task', annotated_id: pm5.id).select{ |t| t.team_task_id == tt3.id }.last
assert_not_nil t.first_response
end
end
test "should expose conflict error from Pender" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"error","data":{"message":"Conflict","code":9}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response, status: 409)
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response, status: 409)
t = create_team
pm = ProjectMedia.new
pm.team = t
pm.url = url
pm.media_type = 'Link'
assert_raises RuntimeError do
pm.save!
assert_equal PenderClient::ErrorCodes::DUPLICATED, pm.media.pender_error_code
end
end
test "should not create project media under archived project" do
p = create_project archived: CheckArchivedFlags::FlagCodes::TRASHED
assert_raises ActiveRecord::RecordInvalid do
create_project_media project_id: p.id
end
end
test "should archive" do
pm = create_project_media
assert_equal pm.archived, CheckArchivedFlags::FlagCodes::NONE
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
assert_equal pm.reload.archived, CheckArchivedFlags::FlagCodes::TRASHED
end
test "should create annotation when is embedded for the first time" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
assert_difference 'Annotation.where(annotation_type: "embed_code").count', 1 do
pm.as_oembed
end
assert_no_difference 'Annotation.where(annotation_type: "embed_code").count' do
pm.as_oembed
end
end
test "should not crash if mapping value is invalid" do
assert_nothing_raised do
pm = ProjectMedia.new
assert_nil pm.send(:mapping_value, 'foo', 'bar')
end
end
test "should not crash if another user tries to update media" do
u1 = create_user
u2 = create_user
t = create_team
create_team_user team: t, user: u1, role: 'admin'
create_team_user team: t, user: u2, role: 'admin'
pm = nil
with_current_user_and_team(u1, t) do
pm = create_project_media team: t, user: u1
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }
pm.analysis = info
pm.save!
end
with_current_user_and_team(u2, t) do
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }
pm.analysis = info
pm.save!
end
end
test "should get claim description only if it has been set" do
RequestStore.store[:skip_cached_field_update] = false
c = create_claim_media quote: 'Test'
pm = create_project_media media: c
assert_equal 'Test', pm.reload.description
create_claim_description project_media: pm, description: 'Test 2'
assert_equal 'Test 2', pm.reload.description
end
test "should create pender_archive annotation for link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
p = create_project team: t
pm = create_project_media media: l, team: t, project_id: p.id
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should not create pender_archive annotation when media is not a link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
c = create_claim_media
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
p = create_project team: t
pm = create_project_media media: c, project: p
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should not create pender_archive annotation when there is no annotation type" do
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
p = create_project team: t
pm = create_project_media media: l, project: p
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should create pender_archive annotation using information from pender_embed" do
Link.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json }.with_indifferent_access }))
Media.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json }.with_indifferent_access }))
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
Link.any_instance.unstub(:pender_embed)
Media.any_instance.unstub(:pender_embed)
end
test "should create pender_archive annotation using information from pender_data" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
Link.any_instance.stubs(:pender_data).returns({ screenshot_taken: 1, 'archives' => {} })
Link.any_instance.stubs(:pender_embed).raises(RuntimeError)
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
Link.any_instance.unstub(:pender_data)
Link.any_instance.unstub(:pender_embed)
end
test "should update media account when change author_url" do
setup_elasticsearch
u = create_user is_admin: true
t = create_team
create_team_user user: u, team: t
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://www.facebook.com/meedan/posts/123456'
author_url = 'http://facebook.com/123456'
author_normal_url = 'http://www.facebook.com/meedan'
author2_url = 'http://facebook.com/789123'
author2_normal_url = 'http://www.facebook.com/meedan2'
data = { url: url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
data = { url: url, author_url: author2_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
data = { url: author_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
data = { url: author2_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'NewFoo', description: 'NewBar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author2_url } }).to_return(body: response)
m = create_media team: t, url: url, account: nil, account_id: nil
a = m.account
p = create_project team: t
Sidekiq::Testing.inline! do
pm = create_project_media media: m, project: p, disable_es_callbacks: false
sleep 2
pm = ProjectMedia.find(pm.id)
with_current_user_and_team(u, t) do
pm.refresh_media = true
sleep 2
end
new_account = m.reload.account
assert_not_equal a, new_account
assert_nil Account.where(id: a.id).last
result = $repository.find(get_es_id(pm))
assert_equal 1, result['accounts'].size
assert_equal result['accounts'].first['id'], new_account.id
end
end
test "should update elasticsearch parent_id field" do
setup_elasticsearch
t = create_team
s1 = create_project_media team: t, disable_es_callbacks: false
s2 = create_project_media team: t, disable_es_callbacks: false
s3 = create_project_media team: t, disable_es_callbacks: false
t1 = create_project_media team: t, disable_es_callbacks: false
t2 = create_project_media team: t, disable_es_callbacks: false
t3 = create_project_media team: t, disable_es_callbacks: false
create_relationship source_id: s1.id, target_id: t1.id
create_relationship source_id: s2.id, target_id: t2.id, relationship_type: Relationship.confirmed_type, disable_es_callbacks: false
create_relationship source_id: s3.id, target_id: t3.id, relationship_type: Relationship.suggested_type, disable_es_callbacks: false
sleep 2
t1_es = $repository.find(get_es_id(t1))
assert_equal t1.id, t1_es['parent_id']
t2_es = $repository.find(get_es_id(t2))
assert_equal s2.id, t2_es['parent_id']
t3_es = $repository.find(get_es_id(t3))
assert_equal t3.id, t3_es['parent_id']
end
test "should validate media source" do
t = create_team
t2 = create_team
s = create_source team: t
s2 = create_source team: t2
pm = nil
assert_difference 'ProjectMedia.count', 2 do
create_project_media team: t
pm = create_project_media team: t, source_id: s.id
end
assert_raises ActiveRecord::RecordInvalid do
pm.source_id = s2.id
pm.save!
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media team: t, source_id: s2.id, skip_autocreate_source: false
end
end
test "should assign media source using account" do
u = create_user
t = create_team
t2 = create_team
create_team_user team: t, user: u, role: 'admin'
create_team_user team: t2, user: u, role: 'admin'
m = nil
s = nil
with_current_user_and_team(u, t) do
m = create_valid_media
s = m.account.sources.first
assert_equal t.id, s.team_id
pm = create_project_media media: m, team: t, skip_autocreate_source: false
assert_equal s.id, pm.source_id
end
pm = create_project_media media: m, team: t2, skip_autocreate_source: false
s2 = pm.source
assert_not_nil pm.source_id
assert_not_equal s.id, s2.id
assert_equal t2.id, s2.team_id
assert_equal m.account, s2.accounts.first
end
test "should create media when normalized URL exists" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
url = 'https://www.facebook.com/Ma3komMona/videos/695409680623722'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
t = create_team
l = create_link team: t, url: url
pm = create_project_media media: l
url = 'https://www.facebook.com/Ma3komMona/videos/vb.268809099950451/695409680623722/?type=3&theater'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"https://www.facebook.com/Ma3komMona/videos/695409680623722","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count' do
pm = ProjectMedia.new
pm.url = url
pm.media_type = 'Link'
pm.team = t
pm.save!
end
end
test "should complete media if there are pending tasks" do
pm = create_project_media
s = pm.last_verification_status_obj
create_task annotated: pm, required: true
assert_equal 'undetermined', s.reload.get_field('verification_status_status').status
assert_nothing_raised do
s.status = 'verified'
s.save!
end
end
test "should get account from author URL" do
s = create_source
pm = create_project_media
assert_nothing_raised do
pm.send :account_from_author_url, @url, s
end
end
test "should not move media to active status if status is locked" do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
s = pm.last_verification_status_obj
s.locked = true
s.save!
create_task annotated: pm, disable_update_status: false
assert_equal 'undetermined', pm.reload.last_verification_status
end
test "should have status permission" do
u = create_user
t = create_team
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u, t) do
permissions = JSON.parse(pm.permissions)
assert permissions.has_key?('update Status')
end
end
test "should not crash if media does not have status" do
pm = create_project_media
Annotation.delete_all
assert_nothing_raised do
assert_nil pm.last_verification_status_obj
end
end
test "should have relationships and parent and children reports" do
p = create_project
s1 = create_project_media project: p
s2 = create_project_media project: p
t1 = create_project_media project: p
t2 = create_project_media project: p
create_project_media project: p
create_relationship source_id: s1.id, target_id: t1.id
create_relationship source_id: s2.id, target_id: t2.id
assert_equal [t1], s1.targets
assert_equal [t2], s2.targets
assert_equal [s1], t1.sources
assert_equal [s2], t2.sources
end
test "should return related" do
pm = create_project_media
pm2 = create_project_media
assert_nil pm.related_to
pm.related_to_id = pm2.id
assert_equal pm2, pm.related_to
end
test "should include extra attributes in serialized object" do
pm = create_project_media
pm.related_to_id = 1
dump = YAML::dump(pm)
assert_match /related_to_id/, dump
end
test "should skip screenshot archiver" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = false
tbi.save!
pm = create_project_media project: create_project(team: t), media: l
assert pm.should_skip_create_archive_annotation?('pender_archive')
end
test "should destroy project media when associated_id on version is not valid" do
m = create_valid_media
t = create_team
p = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'admin'
pm = nil
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
pm.source_id = create_source(team_id: t.id).id
pm.save
assert_equal 3, pm.versions.count
end
version = pm.versions.last
version.update_attribute('associated_id', 100)
assert_nothing_raised do
pm.destroy
end
end
# https://errbit.test.meedan.com/apps/581a76278583c6341d000b72/problems/5ca644ecf023ba001260e71d
# https://errbit.test.meedan.com/apps/581a76278583c6341d000b72/problems/5ca4faa1f023ba001260dbae
test "should create claim with Indian characters" do
str1 = "_Buy Redmi Note 5 Pro Mobile at *2999 Rs* (95�\u0000off) in Flash Sale._\r\n\r\n*Grab this offer now, Deal valid only for First 1,000 Customers. Visit here to Buy-* http://sndeals.win/"
str2 = "*प्रधानमंत्री छात्रवृति योजना 2019*\n\n*Scholarship Form for 10th or 12th Open Now*\n\n*Scholarship Amount*\n1.50-60�\u0000- Rs. 5000/-\n2.60-80�\u0000- Rs. 10000/-\n3.Above 80�\u0000- Rs. 25000/-\n\n*सभी 10th और 12th के बच्चो व उनके अभिभावकों को ये SMS भेजे ताकि सभी बच्चे इस योजना का लाभ ले सके*\n\n*Click Here for Apply:*\nhttps://bit.ly/2l71tWl"
[str1, str2].each do |str|
assert_difference 'ProjectMedia.count' do
m = create_claim_media quote: str
create_project_media media: m
end
end
end
test "should not create project media with unsafe URL" do
WebMock.disable_net_connect! allow: [CheckConfig.get('storage_endpoint')]
url = 'http://unsafe.com/'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"error","data":{"code":12}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
assert_raises RuntimeError do
pm = create_project_media media: nil, url: url
assert_equal 12, pm.media.pender_error_code
end
end
test "should get metadata" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'https://twitter.com/test/statuses/123456'
response = { 'type' => 'media', 'data' => { 'url' => url, 'type' => 'item', 'title' => 'Media Title', 'description' => 'Media Description' } }.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Media Title', l.metadata['title']
assert_equal 'Media Description', l.metadata['description']
assert_equal 'Media Title', pm.media.metadata['title']
assert_equal 'Media Description', pm.media.metadata['description']
pm.analysis = { title: 'Project Media Title', content: 'Project Media Description' }
pm.save!
l = Media.find(l.id)
pm = ProjectMedia.find(pm.id)
assert_equal 'Media Title', l.metadata['title']
assert_equal 'Media Description', l.metadata['description']
assert_equal 'Project Media Title', pm.analysis['title']
assert_equal 'Project Media Description', pm.analysis['content']
end
test "should cache and sort by demand" do
setup_elasticsearch
RequestStore.store[:skip_cached_field_update] = false
team = create_team
p = create_project team: team
create_annotation_type_and_fields('Smooch', { 'Data' => ['JSON', false] })
pm = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
ms_pm = get_es_id(pm)
assert_queries(0, '=') { assert_equal(0, pm.demand) }
create_dynamic_annotation annotation_type: 'smooch', annotated: pm
assert_queries(0, '=') { assert_equal(1, pm.demand) }
pm2 = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
ms_pm2 = get_es_id(pm2)
assert_queries(0, '=') { assert_equal(0, pm2.demand) }
2.times { create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2) }
assert_queries(0, '=') { assert_equal(2, pm2.demand) }
# test sorting
result = $repository.find(ms_pm)
assert_equal result['demand'], 1
result = $repository.find(ms_pm2)
assert_equal result['demand'], 2
result = CheckSearch.new({projects: [p.id], sort: 'demand'}.to_json, nil, team.id)
assert_equal [pm2.id, pm.id], result.medias.map(&:id)
result = CheckSearch.new({projects: [p.id], sort: 'demand', sort_type: 'asc'}.to_json, nil, team.id)
assert_equal [pm.id, pm2.id], result.medias.map(&:id)
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_equal 1, pm.reload.requests_count
assert_equal 2, pm2.reload.requests_count
assert_queries(0, '=') { assert_equal(3, pm.demand) }
assert_queries(0, '=') { assert_equal(3, pm2.demand) }
pm3 = create_project_media team: team, project_id: p.id
ms_pm3 = get_es_id(pm3)
assert_queries(0, '=') { assert_equal(0, pm3.demand) }
2.times { create_dynamic_annotation(annotation_type: 'smooch', annotated: pm3) }
assert_queries(0, '=') { assert_equal(2, pm3.demand) }
create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(5, pm.demand) }
assert_queries(0, '=') { assert_equal(5, pm2.demand) }
assert_queries(0, '=') { assert_equal(5, pm3.demand) }
create_dynamic_annotation annotation_type: 'smooch', annotated: pm3
assert_queries(0, '=') { assert_equal(6, pm.demand) }
assert_queries(0, '=') { assert_equal(6, pm2.demand) }
assert_queries(0, '=') { assert_equal(6, pm3.demand) }
r.destroy!
assert_queries(0, '=') { assert_equal(4, pm.demand) }
assert_queries(0, '=') { assert_equal(2, pm2.demand) }
assert_queries(0, '=') { assert_equal(4, pm3.demand) }
assert_queries(0, '>') { assert_equal(4, pm.demand(true)) }
assert_queries(0, '>') { assert_equal(2, pm2.demand(true)) }
assert_queries(0, '>') { assert_equal(4, pm3.demand(true)) }
end
test "should cache number of linked items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm.linked_items_count) }
pm2 = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(1, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
pm3 = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
r = create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(2, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
r.destroy!
assert_queries(0, '=') { assert_equal(1, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
assert_queries(0, '>') { assert_equal(1, pm.linked_items_count(true)) }
end
test "should cache number of requests" do
RequestStore.store[:skip_cached_field_update] = false
team = create_team
pm = create_project_media team: team
t = t0 = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm).created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
sleep 1
pm2 = create_project_media team: team
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
t = pm2.created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
sleep 1
t = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2).created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
r.destroy!
assert_queries(0, '=') { assert_equal(t0, pm.last_seen) }
assert_queries(0, '>') { assert_equal(t0, pm.last_seen(true)) }
end
test "should cache status" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
assert pm.respond_to?(:status)
assert_queries 0, '=' do
assert_equal 'undetermined', pm.status
end
s = pm.last_verification_status_obj
s.status = 'verified'
s.save!
assert_queries 0, '=' do
assert_equal 'verified', pm.status
end
assert_queries(0, '>') do
assert_equal 'verified', pm.status(true)
end
end
test "should cache title" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media quote: 'Title 0'
assert_equal 'Title 0', pm.title
cd = create_claim_description project_media: pm, description: 'Title 1'
assert_queries 0, '=' do
assert_equal 'Title 1', pm.title
end
create_fact_check claim_description: cd, title: 'Title 2'
assert_queries 0, '=' do
assert_equal 'Title 1', pm.title
end
assert_queries(0, '>') do
assert_equal 'Title 1', pm.reload.title(true)
end
end
test "should cache title for imported items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
pm = ProjectMedia.create!(
media: Blank.create!,
team: t,
user: u,
channel: { main: CheckChannels::ChannelCodes::FETCH }
)
cd = ClaimDescription.new
cd.skip_check_ability = true
cd.project_media = pm
cd.description = '-'
cd.user = u
cd.save!
fc_summary = 'fc_summary'
fc_title = 'fc_title'
fc = FactCheck.new
fc.claim_description = cd
fc.title = fc_title
fc.summary = fc_summary
fc.user = u
fc.skip_report_update = true
fc.save!
assert_equal fc_title, pm.title
assert_equal fc_summary, pm.description
end
end
test "should cache description" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media quote: 'Description 0'
assert_equal 'Description 0', pm.description
cd = create_claim_description description: 'Description 1', project_media: pm
assert_queries 0, '=' do
assert_equal 'Description 1', pm.description
end
create_fact_check claim_description: cd, summary: 'Description 2'
assert_queries 0, '=' do
assert_equal 'Description 1', pm.description
end
assert_queries(0, '>') do
assert_equal 'Description 1', pm.reload.description(true)
end
end
test "should index sortable fields" do
RequestStore.store[:skip_cached_field_update] = false
# sortable fields are [linked_items_count, last_seen and share_count]
setup_elasticsearch
create_annotation_type_and_fields('Smooch', { 'Data' => ['JSON', false] })
team = create_team
p = create_project team: team
pm = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
result = $repository.find(get_es_id(pm))
assert_equal 0, result['linked_items_count']
assert_equal pm.created_at.to_i, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
t = t0 = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm).created_at.to_i
result = $repository.find(get_es_id(pm))
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
pm2 = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
t = pm2.created_at.to_i
result = $repository.find(get_es_id(pm))
result2 = $repository.find(get_es_id(pm2))
assert_equal 1, result['linked_items_count']
assert_equal 0, result2['linked_items_count']
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
t = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2).created_at.to_i
result = $repository.find(get_es_id(pm))
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
r.destroy!
result = $repository.find(get_es_id(pm))
assert_equal t0, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
result = $repository.find(get_es_id(pm))
result2 = $repository.find(get_es_id(pm2))
assert_equal 0, result['linked_items_count']
assert_equal 0, result2['linked_items_count']
end
test "should get team" do
t = create_team
pm = create_project_media team: t
assert_equal t, pm.reload.team
t2 = create_team
pm.team = t2
assert_equal t2, pm.team
assert_equal t, ProjectMedia.find(pm.id).team
end
test "should query media" do
setup_elasticsearch
t = create_team
p = create_project team: t
p1 = create_project team: t
p2 = create_project team: t
pm = create_project_media team: t, project_id: p.id, disable_es_callbacks: false
create_project_media team: t, project_id: p1.id, disable_es_callbacks: false
create_project_media team: t, archived: CheckArchivedFlags::FlagCodes::TRASHED, project_id: p.id, disable_es_callbacks: false
pm = create_project_media team: t, project_id: p1.id, disable_es_callbacks: false
create_relationship source_id: pm.id, target_id: create_project_media(team: t, project_id: p.id, disable_es_callbacks: false).id, relationship_type: Relationship.confirmed_type
sleep 2
assert_equal 3, CheckSearch.new({ team_id: t.id }.to_json, nil, t.id).medias.size
assert_equal 4, CheckSearch.new({ show_similar: true, team_id: t.id }.to_json, nil, t.id).medias.size
assert_equal 2, CheckSearch.new({ team_id: t.id, projects: [p1.id] }.to_json, nil, t.id).medias.size
assert_equal 0, CheckSearch.new({ team_id: t.id, projects: [p2.id] }.to_json, nil, t.id).medias.size
assert_equal 1, CheckSearch.new({ team_id: t.id, projects: [p1.id], eslimit: 1 }.to_json, nil, t.id).medias.size
end
test "should handle indexing conflicts" do
require File.join(Rails.root, 'lib', 'middleware_sidekiq_server_retry')
Sidekiq::Testing.server_middleware do |chain|
chain.add ::Middleware::Sidekiq::Server::Retry
end
class ElasticSearchTestWorker
include Sidekiq::Worker
attr_accessor :retry_count
sidekiq_options retry: 5
sidekiq_retries_exhausted do |_msg, e|
raise e
end
def perform(id)
begin
client = $repository.client
client.update index: CheckElasticSearchModel.get_index_alias, id: id, retry_on_conflict: 0, body: { doc: { updated_at: Time.now + rand(50).to_i } }
rescue Exception => e
retry_count = retry_count.to_i + 1
if retry_count < 5
perform(id)
else
raise e
end
end
end
end
setup_elasticsearch
threads = []
pm = create_project_media media: nil, quote: 'test', disable_es_callbacks: false
id = get_es_id(pm)
15.times do |i|
threads << Thread.start do
Sidekiq::Testing.inline! do
ElasticSearchTestWorker.perform_async(id)
end
end
end
threads.map(&:join)
end
test "should localize status" do
I18n.locale = :pt
pm = create_project_media
assert_equal 'Não Iniciado', pm.status_i18n(nil, { locale: 'pt' })
t = create_team slug: 'test'
value = {
label: 'Field label',
active: 'test',
default: 'undetermined',
statuses: [
{ id: 'undetermined', locales: { en: { label: 'Undetermined', description: '' } }, style: { color: 'blue' } },
{ id: 'test', locales: { en: { label: 'Test', description: '' }, pt: { label: 'Teste', description: '' } }, style: { color: 'red' } }
]
}
t.set_media_verification_statuses(value)
t.save!
p = create_project team: t
pm = create_project_media project: p
assert_equal 'Undetermined', pm.status_i18n(nil, { locale: 'pt' })
I18n.stubs(:exists?).with('custom_message_status_test_test').returns(true)
I18n.stubs(:t).returns('')
I18n.stubs(:t).with(:custom_message_status_test_test, { locale: 'pt' }).returns('Teste')
assert_equal 'Teste', pm.status_i18n('test', { locale: 'pt' })
I18n.unstub(:t)
I18n.unstub(:exists?)
I18n.locale = :en
end
test "should not throw exception for trashed item if request does not come from a client" do
pm = create_project_media project: p
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
User.current = nil
assert_nothing_raised do
create_comment annotated: pm
end
u = create_user(is_admin: true)
User.current = u
assert_raises ActiveRecord::RecordInvalid do
create_comment annotated: pm
end
User.current = nil
end
test "should set initial custom status of orphan item" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media project: nil, team: t
assert_equal 'stop', pm.last_status
end
test "should change custom status of orphan item" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media project: nil, team: t
assert_nothing_raised do
s = pm.last_status_obj
s.status = 'done'
s.save!
end
end
test "should clear caches when report is updated" do
ProjectMedia.any_instance.unstub(:clear_caches)
Sidekiq::Testing.inline! do
CcDeville.stubs(:clear_cache_for_url).times(6)
pm = create_project_media
pm.skip_clear_cache = false
RequestStore.store[:skip_clear_cache] = false
PenderClient::Request.stubs(:get_medias)
publish_report(pm)
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
ProjectMedia.any_instance.stubs(:clear_caches)
end
test "should generate short URL when getting embed URL for the first time" do
pm = create_project_media
assert_difference 'Shortener::ShortenedUrl.count' do
assert_match /^http/, pm.embed_url
end
assert_no_difference 'Shortener::ShortenedUrl.count' do
assert_match /^http/, pm.embed_url
end
end
test "should validate duplicate based on team" do
t = create_team
p = create_project team: t
t2 = create_team
p2 = create_project team: t2
# Create media in different team with no list
m = create_valid_media
create_project_media team: t, media: m
assert_nothing_raised do
create_project_media team: t2, url: m.url
end
# Try to add same item to list
assert_raises RuntimeError do
create_project_media team: t, url: m.url
end
# Create item in a list then try to add it via all items(with no list)
m2 = create_valid_media
create_project_media team:t, project_id: p.id, media: m2
assert_raises RuntimeError do
create_project_media team: t, url: m2.url
end
# Add same item to list in different team
assert_nothing_raised do
create_project_media team: t2, url: m2.url
end
# create item in a list then try to add it to all items in different team
m3 = create_valid_media
create_project_media team: t, project_id: p.id, media: m3
assert_nothing_raised do
create_project_media team: t2, url: m3.url
end
end
test "should restore and confirm item if not super admin" do
setup_elasticsearch
t = create_team
p = create_project team: t
p3 = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'admin', is_admin: false
Sidekiq::Testing.inline! do
# test restore
pm = create_project_media project: p, disable_es_callbacks: false, archived: CheckArchivedFlags::FlagCodes::TRASHED
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p.id, result
assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm.archived
with_current_user_and_team(u, t) do
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.disable_es_callbacks = false
pm.project_id = p3.id
pm.save!
end
pm = pm.reload
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.archived
assert_equal p3.id, pm.project_id
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p3.id, result
# test confirm
pm = create_project_media project: p, disable_es_callbacks: false, archived: CheckArchivedFlags::FlagCodes::UNCONFIRMED
sleep 1
assert_equal p.id, pm.project_id
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p.id, result
assert_equal CheckArchivedFlags::FlagCodes::UNCONFIRMED, pm.archived
with_current_user_and_team(u, t) do
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.disable_es_callbacks = false
pm.project_id = p3.id
pm.save!
end
pm = pm.reload
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.archived
assert_equal p3.id, pm.project_id
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p3.id, result
end
end
test "should set media type for links" do
l = create_link
pm = create_project_media url: l.url
pm.send :set_media_type
assert_equal 'Link', pm.media_type
end
test "should create link and account using team pender key" do
t = create_team
p = create_project(team: t)
Team.stubs(:current).returns(t)
url1 = random_url
author_url1 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => url1, "type" => "item", "title" => "Default token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Default token", "author_name" => 'Author with default token'}})
url2 = random_url
author_url2 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url2 }, 'specific_token').returns({"type" => "media","data" => {"url" => url2, "type" => "item", "title" => "Specific token", "author_url" => author_url2}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url2 }, 'specific_token').returns({"type" => "media","data" => {"url" => author_url2, "type" => "profile", "title" => "Specific token", "author_name" => 'Author with specific token'}})
pm = ProjectMedia.create url: url1
assert_equal 'Default token', ProjectMedia.find(pm.id).media.metadata['title']
assert_equal 'Author with default token', ProjectMedia.find(pm.id).media.account.metadata['author_name']
t.set_pender_key = 'specific_token'; t.save!
pm = ProjectMedia.create! url: url2
assert_equal 'Specific token', ProjectMedia.find(pm.id).media.metadata['title']
assert_equal 'Author with specific token', ProjectMedia.find(pm.id).media.account.metadata['author_name']
Team.unstub(:current)
PenderClient::Request.unstub(:get_medias)
end
test "should refresh using team pender key" do
t = create_team
l = create_link
Team.stubs(:current).returns(t)
pm = create_project_media media: l, project: create_project(team: t)
author_url1 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: l.url, refresh: '1' }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => l.url, "type" => "item", "title" => "Default token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Default token", "author_name" => 'Author with default token'}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: l.url, refresh: '1' }, 'specific_token').returns({"type" => "media","data" => {"url" => l.url, "type" => "item", "title" => "Specific token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, 'specific_token').returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Author with specific token", "author_name" => 'Author with specific token'}})
assert pm.media.metadata['title'].blank?
pm.refresh_media = true
pm.save!
assert_equal 'Default token', ProjectMedia.find(pm.id).media.metadata['title']
t.set_pender_key = 'specific_token'; t.save!
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true; pm.save!
assert_equal 'Specific token', ProjectMedia.find(pm.id).media.metadata['title']
Team.unstub(:current)
PenderClient::Request.unstub(:get_medias)
end
test "should not replace one project media by another if not from the same team" do
old = create_project_media team: create_team, media: Blank.create!
new = create_project_media team: create_team
assert_raises RuntimeError do
old.replace_by(new)
end
end
test "should not replace one project media by another if media is not blank" do
t = create_team
old = create_project_media team: t
new = create_project_media team: t
assert_raises RuntimeError do
old.replace_by(new)
end
end
test "should replace a blank project media by another project media" do
setup_elasticsearch
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
RequestStore.store[:skip_clear_cache] = true
old = create_project_media team: t, media: Blank.create!, channel: { main: CheckChannels::ChannelCodes::FETCH }, disable_es_callbacks: false
old_r = publish_report(old)
old_s = old.last_status_obj
new = create_project_media team: t, media: create_uploaded_video, disable_es_callbacks: false
new_r = publish_report(new)
new_s = new.last_status_obj
old.replace_by(new)
assert_nil ProjectMedia.find_by_id(old.id)
assert_nil Annotation.find_by_id(new_s.id)
assert_nil Annotation.find_by_id(new_r.id)
assert_equal old_r, new.get_dynamic_annotation('report_design')
assert_equal old_s, new.get_dynamic_annotation('verification_status')
new = new.reload
assert_equal 'Import', new.creator_name
data = { "main" => CheckChannels::ChannelCodes::FETCH }
assert_equal data, new.channel
# Verify ES
result = $repository.find(get_es_id(new))
assert_equal [CheckChannels::ChannelCodes::FETCH], result['channel']
end
end
test "should create metrics annotation after create a project media" do
create_annotation_type_and_fields('Metrics', { 'Data' => ['JSON', false] })
url = 'https://twitter.com/meedan/status/1321600654750613505'
response = {"type" => "media","data" => {"url" => url, "type" => "item", "metrics" => {"facebook"=> {"reaction_count" => 2, "comment_count" => 5, "share_count" => 10, "comment_plugin_count" => 0 }}}}
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url }, CheckConfig.get('pender_key')).returns(response)
pm = create_project_media media: nil, url: url
assert_equal response['data']['metrics'], JSON.parse(pm.get_annotations('metrics').last.load.get_field_value('metrics_data'))
PenderClient::Request.unstub(:get_medias)
end
test "should cache metadata value" do
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response_test'
t = create_team
tt = create_team_task fieldset: 'metadata', team_id: t.id
pm = create_project_media team: t
m = pm.get_annotations('task').last.load
value = random_string
m.response = { annotation_type: 'task_response', set_fields: { response_test: value }.to_json }.to_json
m.save!
assert_queries(0, '=') do
assert_equal value, pm.send("task_value_#{tt.id}")
end
assert_not_nil Rails.cache.read("project_media:task_value:#{pm.id}:#{tt.id}")
assert_not_nil pm.reload.task_value(tt.id)
d = m.reload.first_response_obj
d.destroy!
assert_nil Rails.cache.read("project_media:task_value:#{pm.id}:#{tt.id}")
assert_nil pm.reload.task_value(tt.id)
end
test "should return item columns values" do
RequestStore.store[:skip_cached_field_update] = false
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response_test'
t = create_team
tt1 = create_team_task fieldset: 'metadata', team_id: t.id
tt2 = create_team_task fieldset: 'metadata', team_id: t.id
t.list_columns = ["task_value_#{tt1.id}", "task_value_#{tt2.id}"]
t.save!
pm = create_project_media team: t.reload
m = pm.get_annotations('task').map(&:load).select{ |t| t.team_task_id == tt1.id }.last
m.response = { annotation_type: 'task_response', set_fields: { response_test: 'Foo Value' }.to_json }.to_json
m.save!
m = pm.get_annotations('task').map(&:load).select{ |t| t.team_task_id == tt2.id }.last
m.response = { annotation_type: 'task_response', set_fields: { response_test: 'Bar Value' }.to_json }.to_json
m.save!
pm.team
# The only SQL query should be to get the team tasks
assert_queries(1, '=') do
values = pm.list_columns_values
assert_equal 2, values.size
assert_equal 'Foo Value', values["task_value_#{tt1.id}"]
assert_equal 'Bar Value', values["task_value_#{tt2.id}"]
end
pm2 = create_project_media
pm2.team
pm2.media
# The only SQL query should be to get the team tasks
assert_queries(1, '=') do
assert_equal 8, pm2.list_columns_values.keys.size
end
end
test "should return error if method does not exist" do
pm = create_project_media
assert_raises NoMethodError do
pm.send(random_string)
end
end
test "should cache published value" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
pm2 = create_project_media team: pm.team
create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal 'unpublished', pm.report_status }
assert_queries(0, '=') { assert_equal 'unpublished', pm2.report_status }
r = publish_report(pm)
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'published', pm.report_status }
assert_queries(0, '=') { assert_equal 'published', pm2.report_status }
r = Dynamic.find(r.id)
r.set_fields = { state: 'paused' }.to_json
r.action = 'pause'
r.save!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'paused', pm.report_status }
assert_queries(0, '=') { assert_equal 'paused', pm2.report_status }
Rails.cache.clear
assert_queries(0, '>') { assert_equal 'paused', pm.report_status }
pm3 = create_project_media team: pm.team
assert_queries(0, '=') { assert_equal 'unpublished', pm3.report_status }
r = create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal 'paused', pm3.report_status }
r.destroy!
assert_queries(0, '=') { assert_equal 'unpublished', pm3.report_status }
end
test "should cache tags list" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
assert_queries(0, '=') { assert_equal '', pm.tags_as_sentence }
t = create_tag tag: 'foo', annotated: pm
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'foo', pm.tags_as_sentence }
create_tag tag: 'bar', annotated: pm
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'foo, bar', pm.tags_as_sentence }
t.destroy!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'bar', pm.tags_as_sentence }
Rails.cache.clear
assert_queries(0, '>') { assert_equal 'bar', pm.tags_as_sentence }
end
test "should cache media published at" do
RequestStore.store[:skip_cached_field_update] = false
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media media: nil, url: url
assert_queries(0, '=') { assert_equal 601720200, pm.media_published_at }
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:31:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 601720260, pm.media_published_at }
end
test "should cache number of related items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm1 = create_project_media team: t
pm2 = create_project_media team: t
assert_queries(0, '=') { assert_equal 0, pm1.related_count }
assert_queries(0, '=') { assert_equal 0, pm2.related_count }
r = create_relationship source_id: pm1.id, target_id: pm2.id
assert_queries(0, '=') { assert_equal 1, pm1.related_count }
assert_queries(0, '=') { assert_equal 1, pm2.related_count }
r.destroy!
assert_queries(0, '=') { assert_equal 0, pm1.related_count }
assert_queries(0, '=') { assert_equal 0, pm2.related_count }
end
test "should cache type of media" do
RequestStore.store[:skip_cached_field_update] = false
setup_elasticsearch
pm = create_project_media
assert_queries(0, '=') { assert_equal 'Link', pm.type_of_media }
Rails.cache.clear
assert_queries(1, '=') { assert_equal 'Link', pm.type_of_media }
assert_queries(0, '=') { assert_equal 'Link', pm.type_of_media }
sleep 1
es = $repository.find(get_es_id(pm))
assert_equal Media.types.index(pm.type_of_media), es['type_of_media']
end
test "should cache project title" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
p1 = create_project title: 'Foo', team: t
p2 = create_project title: 'Bar', team: t
pm = create_project_media team: t
default_folder = t.default_folder
assert_queries(0, '=') { assert_equal default_folder.title, pm.folder }
pm.project_id = p1.id
pm.save!
assert_queries(0, '=') { assert_equal 'Foo', pm.folder }
p1.title = 'Test'
p1.save!
assert_queries(0, '=') { assert_equal 'Test', pm.folder }
pm.project_id = p2.id
pm.save!
assert_queries(0, '=') { assert_equal 'Bar', pm.folder }
assert_equal p2.id, pm.reload.project_id
Sidekiq::Testing.inline! do
p2.destroy!
assert_equal t.default_folder.id, pm.reload.project_id
assert_queries(0, '=') { assert_equal default_folder.title, pm.folder }
end
end
test "should get original title for uploaded files" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media media: create_uploaded_image
create_claim_description project_media: pm, description: 'Custom Title'
assert_equal 'Custom Title', pm.reload.title
assert_equal media_filename('rails.png'), pm.reload.original_title
end
test "should move secondary item to same main item project" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
p = create_project team: t
p2 = create_project team: t
pm = create_project_media project: p
pm2 = create_project_media project: p
pm3 = create_project_media project: p
assert_equal p.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm.id}:folder")
create_relationship source_id: pm.id, target_id: pm2.id
create_relationship source_id: pm.id, target_id: pm3.id
pm.project_id = p2.id
pm.save!
assert_equal p2.id, pm2.reload.project_id
assert_equal p2.id, pm3.reload.project_id
# verify cached folder value
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm.id}:folder")
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm2.id}:folder")
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm3.id}:folder")
end
test "should get report information" do
pm = create_project_media
data = {
title: 'Report text title',
text: 'Report text content',
headline: 'Visual card title',
description: 'Visual card content'
}
publish_report(pm, {}, nil, data)
pm = ProjectMedia.find(pm.id).reload
assert_equal 'Report text title', pm.report_text_title
assert_equal 'Report text content', pm.report_text_content
assert_equal 'Visual card title', pm.report_visual_card_title
assert_equal 'Visual card content', pm.report_visual_card_content
end
test "should get extracted text" do
pm = create_project_media
assert_kind_of String, pm.extracted_text
end
test "should validate archived value" do
assert_difference 'ProjectMedia.count' do
create_project_media archived: CheckArchivedFlags::FlagCodes::SPAM
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media archived: { main: 90 }
end
end
test "should validate channel value" do
# validate channel create (should be in allowed values)
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: 90 }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: '90' }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { others: [90] }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: CheckChannels::ChannelCodes::MANUAL, others: [90] }
end
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::WHATSAPP }
end
# validate channel update (should not update existing value)
assert_raises ActiveRecord::RecordInvalid do
pm.channel = { main: CheckChannels::ChannelCodes::MESSENGER }
pm.save!
end
assert_raises ActiveRecord::RecordInvalid do
pm.channel = { others: [90] }
pm.save!
end
assert_nothing_raised do
pm.channel = { main: CheckChannels::ChannelCodes::WHATSAPP, others: [main: CheckChannels::ChannelCodes::MESSENGER]}
pm.save!
end
# Set channel with default value MANUAL
pm2 = create_project_media
data = { "main" => CheckChannels::ChannelCodes::MANUAL }
assert_equal data, pm2.channel
# Set channel with API if ApiKey exists
a = create_api_key
ApiKey.current = a
pm3 = create_project_media channel: nil
data = { "main" => CheckChannels::ChannelCodes::API }
assert_equal data, pm3.channel
ApiKey.current = nil
end
test "should not create duplicated media with for the same uploaded file" do
team = create_team
team2 = create_team
{
UploadedVideo: 'rails.mp4',
UploadedImage: 'rails.png',
UploadedAudio: 'rails.mp3'
}.each_pair do |media_type, filename|
# first time the video is added creates a new media
medias_count = media_type.to_s.constantize.count
assert_difference 'ProjectMedia.count', 1 do
pm = ProjectMedia.new media_type: media_type.to_s, team: team
File.open(File.join(Rails.root, 'test', 'data', filename)) do |f|
pm.file = f
pm.save!
end
end
assert_equal medias_count + 1, media_type.to_s.constantize.count
# second time the video is added should not create new media
medias_count = media_type.to_s.constantize.count
assert_difference 'ProjectMedia.count', 1 do
pm = ProjectMedia.new media_type: media_type.to_s, team: team2
File.open(File.join(Rails.root, 'test', 'data', filename)) do |f|
pm.file = f
pm.save!
end
end
assert_equal medias_count, media_type.to_s.constantize.count
end
end
test "should run callbacks for bulk-update status" do
ProjectMedia.stubs(:clear_caches).returns(nil)
setup_elasticsearch
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
rules = []
rules << {
"name": random_string,
"project_ids": "",
"rules": {
"operator": "and",
"groups": [
{
"operator": "and",
"conditions": [
{
"rule_definition": "status_is",
"rule_value": "verified"
}
]
}
]
},
"actions": [
{
"action_definition": "move_to_project",
"action_value": p.id.to_s
}
]
}
t.rules = rules.to_json
t.save!
with_current_user_and_team(u, t) do
pm = create_project_media team: t, disable_es_callbacks: false
publish_report(pm)
pm_status = pm.last_status
pm2 = create_project_media team: t, disable_es_callbacks: false
pm3 = create_project_media team: t, disable_es_callbacks: false
sleep 2
ids = [pm.id, pm2.id, pm3.id]
updates = { action: 'update_status', params: { status: 'verified' }.to_json }
Sidekiq::Testing.inline! do
ProjectMedia.bulk_update(ids, updates, t)
sleep 2
# Verify nothing happens for published reports
assert_equal pm_status, pm.reload.last_status
result = $repository.find(get_es_id(pm))
assert_equal pm_status, result['verification_status']
# Verify rules callback
assert_equal t.default_folder.id, pm.reload.project_id
assert_equal p.id, pm2.reload.project_id
assert_equal p.id, pm3.reload.project_id
# Verify ES index
result = $repository.find(get_es_id(pm2))
assert_equal 'verified', result['verification_status']
result = $repository.find(get_es_id(pm3))
assert_equal 'verified', result['verification_status']
end
end
ProjectMedia.unstub(:clear_caches)
end
test "should cache picture and creator name" do
RequestStore.store[:skip_cached_field_update] = false
u = create_user
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::MANUAL }, user: u
# picture
assert_queries(0, '=') { assert_equal('', pm.picture) }
assert_queries(0, '>') { assert_equal('', pm.picture(true)) }
# creator name
assert_queries(0, '=') { assert_equal(u.name, pm.creator_name) }
assert_queries(0, '>') { assert_equal(u.name, pm.creator_name(true)) }
end
test "should get creator name based on channel" do
RequestStore.store[:skip_cached_field_update] = false
u = create_user
pm = create_project_media user: u
assert_equal pm.creator_name, u.name
pm2 = create_project_media user: u, channel: { main: CheckChannels::ChannelCodes::WHATSAPP }
assert_equal pm2.creator_name, 'Tipline'
pm3 = create_project_media user: u, channel: { main: CheckChannels::ChannelCodes::FETCH }
assert_equal pm3.creator_name, 'Import'
# update cache based on user update
u.name = 'update name'
u.save!
assert_equal pm.creator_name, 'update name'
assert_equal pm.creator_name(true), 'update name'
assert_equal pm2.creator_name, 'Tipline'
assert_equal pm2.creator_name(true), 'Tipline'
assert_equal pm3.creator_name, 'Import'
assert_equal pm3.creator_name(true), 'Import'
User.delete_check_user(u)
assert_equal pm.creator_name, 'Anonymous'
assert_equal pm.reload.creator_name(true), 'Anonymous'
assert_equal pm2.creator_name, 'Tipline'
assert_equal pm2.creator_name(true), 'Tipline'
assert_equal pm3.creator_name, 'Import'
assert_equal pm3.creator_name(true), 'Import'
end
test "should create blank item" do
assert_difference 'ProjectMedia.count' do
assert_difference 'Blank.count' do
ProjectMedia.create! media_type: 'Blank', team: create_team
end
end
end
test "should convert old hash" do
t = create_team
pm = create_project_media team: t
Team.any_instance.stubs(:settings).returns(ActionController::Parameters.new({ media_verification_statuses: { statuses: [] } }))
assert_nothing_raised do
pm.custom_statuses
end
Team.any_instance.unstub(:settings)
end
test "should assign item to default project if project not set" do
t = create_team
pm = create_project_media team: t
assert_equal pm.project, t.default_folder
end
test "should detach similar items when trash parent item" do
setup_elasticsearch
RequestStore.store[:skip_delete_for_ever] = true
t = create_team
default_folder = t.default_folder
p = create_project team: t
pm = create_project_media project: p
pm1_c = create_project_media project: p
pm1_s = create_project_media project: p
pm2_s = create_project_media project: p
r = create_relationship source: pm, target: pm1_c, relationship_type: Relationship.confirmed_type
r2 = create_relationship source: pm, target: pm1_s, relationship_type: Relationship.suggested_type
r3 = create_relationship source: pm, target: pm2_s, relationship_type: Relationship.suggested_type
assert_difference 'Relationship.count', -2 do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_raises ActiveRecord::RecordNotFound do
r2.reload
end
assert_raises ActiveRecord::RecordNotFound do
r3.reload
end
pm1_s = pm1_s.reload; pm2_s.reload
assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm1_c.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm1_s.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm2_s.archived
assert_equal p.id, pm1_s.project_id
assert_equal p.id, pm2_s.project_id
# Verify ES
result = $repository.find(get_es_id(pm1_c))
result['archived'] = CheckArchivedFlags::FlagCodes::TRASHED
result = $repository.find(get_es_id(pm1_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
result = $repository.find(get_es_id(pm2_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
end
test "should detach similar items when spam parent item" do
setup_elasticsearch
RequestStore.store[:skip_delete_for_ever] = true
t = create_team
default_folder = t.default_folder
p = create_project team: t
pm = create_project_media project: p
pm1_c = create_project_media project: p
pm1_s = create_project_media project: p
pm2_s = create_project_media project: p
r = create_relationship source: pm, target: pm1_c, relationship_type: Relationship.confirmed_type
r2 = create_relationship source: pm, target: pm1_s, relationship_type: Relationship.suggested_type
r3 = create_relationship source: pm, target: pm2_s, relationship_type: Relationship.suggested_type
assert_difference 'Relationship.count', -2 do
pm.archived = CheckArchivedFlags::FlagCodes::SPAM
pm.save!
end
assert_raises ActiveRecord::RecordNotFound do
r2.reload
end
assert_raises ActiveRecord::RecordNotFound do
r3.reload
end
pm1_s = pm1_s.reload; pm2_s.reload
assert_equal CheckArchivedFlags::FlagCodes::SPAM, pm1_c.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm1_s.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm2_s.archived
assert_equal p.id, pm1_s.project_id
assert_equal p.id, pm2_s.project_id
# Verify ES
result = $repository.find(get_es_id(pm1_c))
result['archived'] = CheckArchivedFlags::FlagCodes::SPAM
result = $repository.find(get_es_id(pm1_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
result = $repository.find(get_es_id(pm2_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
end
test "should get cluster size" do
pm = create_project_media
assert_nil pm.reload.cluster
c = create_cluster
c.project_medias << pm
assert_equal 1, pm.reload.cluster.size
c.project_medias << create_project_media
assert_equal 2, pm.reload.cluster.size
end
test "should get cluster teams" do
RequestStore.store[:skip_cached_field_update] = false
setup_elasticsearch
t1 = create_team
t2 = create_team
pm1 = create_project_media team: t1
assert_nil pm1.cluster
c = create_cluster project_media: pm1
c.project_medias << pm1
assert_equal [t1.name], pm1.cluster.team_names.values
assert_equal [t1.id], pm1.cluster.team_names.keys
sleep 2
id = get_es_id(pm1)
es = $repository.find(id)
assert_equal [t1.id], es['cluster_teams']
pm2 = create_project_media team: t2
c.project_medias << pm2
sleep 2
assert_equal [t1.name, t2.name].sort, pm1.cluster.team_names.values.sort
assert_equal [t1.id, t2.id].sort, pm1.cluster.team_names.keys.sort
es = $repository.find(id)
assert_equal [t1.id, t2.id], es['cluster_teams']
end
test "should cache sources list" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
s_a = create_source team: t, name: 'source_a'
s_b = create_source team: t, name: 'source_b'
s_c = create_source team: t, name: 'source_c'
s_d = create_source team: t, name: 'source_d'
pm = create_project_media team: t, source: s_a, skip_autocreate_source: false
t1 = create_project_media team: t, source: s_b, skip_autocreate_source: false
t2 = create_project_media team: t, source: s_c, skip_autocreate_source: false
t3 = create_project_media team: t, source: s_d, skip_autocreate_source: false
result = {}
# Verify cache item source
result[s_a.id] = s_a.name
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify cache source for similar items
r1 = create_relationship source_id: pm.id, target_id: t1.id, relationship_type: Relationship.confirmed_type
r2 = create_relationship source_id: pm.id, target_id: t2.id, relationship_type: Relationship.confirmed_type
r3 = create_relationship source_id: pm.id, target_id: t3.id, relationship_type: Relationship.suggested_type
result[s_b.id] = s_b.name
result[s_c.id] = s_c.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify main source is a first element
assert_equal pm.source_id, JSON.parse(pm.sources_as_sentence).keys.first.to_i
# Verify update source names after destroy similar item
r1.destroy
result.delete(s_b.id)
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update item source
new_s1 = create_source team: t, name: 'new_source_1'
pm.source = new_s1; pm.save!
result.delete(s_a.id)
result[new_s1.id] = new_s1.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.keys.sort.map(&:to_s), JSON.parse(pm.sources_as_sentence).keys.sort }
# Verify update source for similar item
result_similar = {}
result_similar[s_c.id] = s_c.name
assert_queries(0, '=') { assert_equal result_similar.to_json, t2.sources_as_sentence }
new_s2 = create_source team: t, name: 'new_source_2'
t2.source = new_s2; t2.save!
t2 = ProjectMedia.find(t2.id)
result_similar.delete(s_c.id)
result_similar[new_s2.id] = new_s2.name
assert_queries(0, '=') { assert_equal result_similar.to_json, t2.sources_as_sentence }
result.delete(s_c.id)
result[new_s2.id] = new_s2.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update source name
new_s2.name = 'update source'; new_s2.save!
result[new_s2.id] = 'update source'
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update relation
r3.relationship_type = Relationship.confirmed_type; r3.save!
result[s_d.id] = s_d.name
pm = ProjectMedia.find(pm.id)
result_keys = result.keys.map(&:to_i).sort
sources_keys = JSON.parse(pm.sources_as_sentence).keys.map(&:to_i).sort
assert_queries(0, '=') { assert_equal result_keys, sources_keys }
Rails.cache.clear
assert_queries(0, '>') { assert_equal result_keys, JSON.parse(pm.sources_as_sentence).keys.map(&:to_i).sort }
end
test "should have web form channel" do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::WEB_FORM }
assert_equal 'Web Form', pm.reload.get_creator_name
end
test "should respond to file upload auto-task on creation" do
url = random_url
WebMock.stub_request(:get, url).to_return(body: File.read(File.join(Rails.root, 'test', 'data', 'rails.png')))
at = create_annotation_type annotation_type: 'task_response_file_upload', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi1 = create_field_instance annotation_type_object: at, name: 'response_file_upload', label: 'Response', field_type_object: ft1
t = create_team
create_team_task team_id: t.id, label: 'Upload a file', task_type: 'file_upload'
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm = create_project_media team: t, set_tasks_responses: { 'upload_a_file' => url }
task = pm.annotations('task').last
assert task.existing_files.size > 0
end
end
end
test "should get shared database creator" do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::SHARED_DATABASE }
assert_equal 'Shared Database', pm.creator_name
end
test "should delete claims and fact-checks when item is deleted" do
pm = create_project_media
cd = create_claim_description project_media: pm
fc = create_fact_check claim_description: cd
assert_difference 'ProjectMedia.count', -1 do
assert_difference 'ClaimDescription.count', -1 do
assert_difference 'FactCheck.count', -1 do
pm.destroy!
end
end
end
end
test "should get claim description and fact-check data" do
pm = create_project_media
assert_nil pm.claim_description_content
assert_nil pm.claim_description_context
cd = create_claim_description project_media: pm, description: 'Foo', context: 'Bar'
fc = create_fact_check claim_description: cd
assert_equal 'Foo', pm.claim_description_content
assert_equal 'Bar', pm.claim_description_context
assert_not_nil pm.fact_check_published_on
end
test "should cache if item is suggested or confirmed" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
main = create_project_media team: t
pm = create_project_media team: t
assert !pm.is_suggested
assert !pm.is_confirmed
r = create_relationship source_id: main.id, target_id: pm.id, relationship_type: Relationship.suggested_type
assert pm.is_suggested
assert !pm.is_confirmed
r.relationship_type = Relationship.confirmed_type
r.save!
assert !pm.is_suggested
assert pm.is_confirmed
r.destroy!
assert !pm.is_suggested
assert !pm.is_confirmed
end
test "should delete for ever trashed items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm = create_project_media team: t
# Check that cached field exists (pick a key to verify the key deleted after destroy item)
cache_key = "check_cached_field:ProjectMedia:#{pm.id}:folder"
assert Rails.cache.exist?(cache_key)
Sidekiq::Testing.fake! do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_not_nil ProjectMedia.find_by_id(pm.id)
Sidekiq::Worker.drain_all
assert_nil ProjectMedia.find_by_id(pm.id)
assert_not Rails.cache.exist?(cache_key)
# Restore item from trash before apply delete for ever
pm = create_project_media team: t
Sidekiq::Testing.fake! do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_not_nil ProjectMedia.find_by_id(pm.id)
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.save!
Sidekiq::Worker.drain_all
assert_not_nil ProjectMedia.find_by_id(pm.id)
end
test "should delete for ever spam items" do
t = create_team
pm_s = create_project_media team: t
pm_t1 = create_project_media team: t
pm_t2 = create_project_media team: t
pm_t3 = create_project_media team: t
r1 = create_relationship source_id: pm_s.id, target_id: pm_t1.id, relationship_type: Relationship.default_type
r2 = create_relationship source_id: pm_s.id, target_id: pm_t2.id, relationship_type: Relationship.confirmed_type
r3 = create_relationship source_id: pm_s.id, target_id: pm_t3.id, relationship_type: Relationship.suggested_type
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::SPAM
pm_s.save!
end
assert_not_nil ProjectMedia.find_by_id(pm_s.id)
assert_equal 4, ProjectMedia.where(id: [pm_s.id, pm_t1.id, pm_t2.id, pm_t3.id]).count
assert_equal 3, Relationship.where(id: [r1.id, r2.id, r3.id]).count
Sidekiq::Worker.drain_all
assert_equal CheckArchivedFlags::FlagCodes::SPAM, pm_s.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm_t3.reload.archived
assert_equal 0, Relationship.where(id: [r1.id, r2.id, r3.id]).count
assert_nil ProjectMedia.find_by_id(pm_t1.id)
assert_nil ProjectMedia.find_by_id(pm_t2.id)
# Restore item from spam before apply delete for ever
pm_s = create_project_media team: t
pm_t = create_project_media team: t
r = create_relationship source_id: pm_s.id, target_id: pm_t.id, relationship_type: Relationship.confirmed_type
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm_s.save!
end
assert_equal 2, ProjectMedia.where(id: [pm_s.id, pm_t.id]).count
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::NONE
pm_s.save!
end
Sidekiq::Worker.drain_all
assert_equal 2, ProjectMedia.where(id: [pm_s.id, pm_t.id], archived: CheckArchivedFlags::FlagCodes::NONE).count
assert_not_nil Relationship.where(id: r.id).last
end
test "should return cached values for feed data" do
pm = create_project_media
assert_kind_of Hash, pm.feed_columns_values
end
test "should set a custom title" do
m = create_uploaded_image
pm = create_project_media set_title: 'Foo', media: m
assert_equal 'Foo', pm.title
end
test "should bulk remove tags" do
setup_elasticsearch
RequestStore.store[:skip_cached_field_update] = false
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
pm = create_project_media team: t
pm2 = create_project_media team: t
pm3 = create_project_media team: t
sports = create_tag_text team_id: t.id, text: 'sports'
news = create_tag_text team_id: t.id, text: 'news'
economic = create_tag_text team_id: t.id, text: 'economic'
# Tag pm
pm_t1 = create_tag annotated: pm, tag: sports.id, disable_es_callbacks: false
pm_t2 = create_tag annotated: pm, tag: news.id, disable_es_callbacks: false
pm_t3 = create_tag annotated: pm, tag: economic.id, disable_es_callbacks: false
# Tag pm2
pm2_t1 = create_tag annotated: pm2, tag: sports.id, disable_es_callbacks: false
pm2_t2 = create_tag annotated: pm2, tag: news.id, disable_es_callbacks: false
# Tag pm3
pm3_t1 = create_tag annotated: pm3, tag: sports.id, disable_es_callbacks: false
sleep 2
assert_equal 3, sports.reload.tags_count
assert_equal 2, news.reload.tags_count
assert_equal 1, economic.reload.tags_count
assert_equal [pm_t1, pm2_t1, pm3_t1].sort, sports.reload.tags.to_a.sort
assert_equal [pm_t2, pm2_t2].sort, news.reload.tags.to_a.sort
assert_equal [pm_t3], economic.reload.tags.to_a
assert_equal 'sports, news, economic', pm.tags_as_sentence
assert_equal 'sports, news', pm2.tags_as_sentence
assert_equal 'sports', pm3.tags_as_sentence
result = $repository.find(get_es_id(pm))
assert_equal 3, result['tags_as_sentence']
assert_equal [pm_t1.id, pm_t2.id, pm_t3.id], result['tags'].collect{|t| t['id']}.sort
result = $repository.find(get_es_id(pm2))
assert_equal 2, result['tags_as_sentence']
assert_equal [pm2_t1.id, pm2_t2.id], result['tags'].collect{|t| t['id']}.sort
result = $repository.find(get_es_id(pm3))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm3_t1.id], result['tags'].collect{|t| t['id']}
# apply bulk-remove
ids = [pm.id, pm2.id, pm3.id]
updates = { action: 'remove_tags', params: { tags_text: "#{sports.id}, #{economic.id}" }.to_json }
ProjectMedia.bulk_update(ids, updates, t)
sleep 2
assert_equal 0, sports.reload.tags_count
assert_equal 2, news.reload.tags_count
assert_equal 0, economic.reload.tags_count
assert_empty sports.reload.tags.to_a
assert_equal [pm_t2, pm2_t2].sort, news.reload.tags.to_a.sort
assert_empty economic.reload.tags.to_a
assert_equal 'news', pm.tags_as_sentence
assert_equal 'news', pm2.tags_as_sentence
assert_empty pm3.tags_as_sentence
result = $repository.find(get_es_id(pm))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm_t2.id], result['tags'].collect{|t| t['id']}
result = $repository.find(get_es_id(pm2))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm2_t2.id], result['tags'].collect{|t| t['id']}
result = $repository.find(get_es_id(pm3))
assert_equal 0, result['tags_as_sentence']
assert_empty result['tags'].collect{|t| t['id']}
end
end
end
Add missing tests (#1271)
require_relative '../test_helper'
class ProjectMediaTest < ActiveSupport::TestCase
def setup
require 'sidekiq/testing'
Sidekiq::Testing.fake!
super
create_team_bot login: 'keep', name: 'Keep'
create_verification_status_stuff
end
test "should create project media" do
assert_difference 'ProjectMedia.count' do
create_project_media
end
u = create_user
t = create_team
tu = create_team_user team: t, user: u, role: 'admin'
m = create_valid_media
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
assert_difference 'ProjectMedia.count' do
with_current_user_and_team(u, t) do
pm = create_project_media team: t, media: m
assert_equal u, pm.user
end
end
# should be unique
assert_no_difference 'ProjectMedia.count' do
assert_raises RuntimeError do
create_project_media team: t, media: m
end
end
# editor should assign any media
m2 = create_valid_media
Rails.cache.clear
tu.update_column(:role, 'editor')
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media team: t, media: m2
end
m3 = create_valid_media user_id: u.id
assert_difference 'ProjectMedia.count' do
pm = create_project_media team: t, media: m3
pm.save!
end
User.unstub(:current)
Team.unstub(:current)
end
test "should get status label" do
pm = create_project_media
assert_equal 'Unstarted', pm.last_verification_status_label
end
test "should respect state transition roles" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', role: 'editor', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', role: 'editor', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media team: t
s = pm.last_status_obj
s.status = 'done'
s.save!
u = create_user
create_team_user team: t, user: u, role: 'collaborator'
assert_equal 'done', pm.reload.status
with_current_user_and_team(u ,t) do
a = Annotation.where(annotation_type: 'verification_status', annotated_type: 'ProjectMedia', annotated_id: pm.id).last.load
f = a.get_field('verification_status_status')
f.value = 'stop'
assert_raises ActiveRecord::RecordInvalid do
f.save!
end
end
end
test "should have a media not not necessarily a project" do
assert_nothing_raised do
create_project_media project: nil
end
assert_raise ActiveRecord::RecordInvalid do
create_project_media media: nil
end
end
test "should create media if url or quote set" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count', 2 do
create_project_media media: nil, quote: 'Claim report'
create_project_media media: nil, url: url
end
end
test "should find media by normalized url" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media url: url
url2 = 'http://test2.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm = create_project_media url: url2
assert_equal pm.media, m
end
test "should create with exisitng media if url exists" do
m = create_valid_media
pm = create_project_media media: nil, url: m.url
assert_equal m, pm.media
end
test "should collaborator add a new media" do
t = create_team
u = create_user
tu = create_team_user team: t, user: u, role: 'collaborator'
with_current_user_and_team(u, t) do
assert_difference 'ProjectMedia.count' do
create_project_media team: t, quote: 'Claim report'
end
end
end
test "should update and destroy project media" do
u = create_user
t = create_team
m = create_valid_media user_id: u.id
create_team_user team: t, user: u
pm = create_project_media team: t, media: m, user: u
with_current_user_and_team(u, t) do
pm.save!
end
u2 = create_user
tu = create_team_user team: t, user: u2, role: 'editor'
with_current_user_and_team(u2, t) do
pm.save!
end
end
test "queries for relationship source" do
u = create_user
t = create_team
pm = create_project_media team: t
assert_equal pm.relationship_source, pm
end
test "checks truthfulness of is_claim?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Claim"
pm.media.save!
assert pm.is_claim?
end
test "checks truthfulness of is_link?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Link"
pm.media.save!
assert pm.is_link?
end
test "checks truthfulness of is_uploaded_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert pm.is_uploaded_image?
end
test "checks truthfulness of is_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert pm.is_image?
end
test "checks truthfulness of is_text?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Link"
pm.media.save!
assert pm.is_text?
end
test "checks truthfulness of is_blank?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "Blank"
pm.media.save!
assert pm.is_blank?
end
test "checks falsity of is_text?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media.type = "UploadedImage"
pm.media.save!
assert !pm.is_text?
end
test "checks falsity of is_image?" do
u = create_user
t = create_team
pm = create_project_media team: t
pm.media_type = "Link"
assert !pm.is_image?
end
test "non members should not read project media in private team" do
u = create_user
t = create_team
pm = create_project_media team: t
pu = create_user
pt = create_team private: true
create_team_user team: pt, user: pu
pu2 = create_user
create_team_user team: pt, user: pu2, status: 'requested'
ppm = create_project_media team: pt
ProjectMedia.find_if_can(pm.id)
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(u, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
with_current_user_and_team(pu, pt) do
ProjectMedia.find_if_can(ppm.id)
end
assert_raise CheckPermissions::AccessDenied do
with_current_user_and_team(pu2, pt) do
ProjectMedia.find_if_can(ppm.id)
end
end
end
test "should notify Slack based on slack events" do
t = create_team slug: 'test'
u = create_user
tu = create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
p2 = create_project team: t
t.set_slack_notifications_enabled = 1
t.set_slack_webhook = 'https://hooks.slack.com/services/123'
slack_notifications = []
slack_notifications << {
"label": random_string,
"event_type": "item_added",
"values": ["#{p.id}"],
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "any_activity",
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "status_changed",
"values": ["in_progress"],
"slack_channel": "##{random_string}"
}
t.slack_notifications = slack_notifications.to_json
t.save!
with_current_user_and_team(u, t) do
m = create_valid_media
pm = create_project_media team: t, media: m
assert pm.sent_to_slack
m = create_claim_media
pm = create_project_media team: t, media: m
assert pm.sent_to_slack
pm = create_project_media project: p
assert pm.sent_to_slack
# status changes
s = pm.last_status_obj
s.status = 'in_progress'
s.save!
assert s.sent_to_slack
# move item
pm = create_project_media project: p2
pm.project_id = p.id
pm.save!
assert pm.sent_to_slack
end
end
test "should not duplicate slack notification for custom slack list settings" do
Rails.stubs(:env).returns(:production)
t = create_team slug: 'test'
p = create_project team: t
t.set_slack_notifications_enabled = 1
t.set_slack_webhook = 'https://hooks.slack.com/services/123'
slack_notifications = []
slack_notifications << {
"label": random_string,
"event_type": "item_added",
"values": ["#{p.id}"],
"slack_channel": "##{random_string}"
}
slack_notifications << {
"label": random_string,
"event_type": "any_activity",
"slack_channel": "##{random_string}"
}
t.slack_notifications = slack_notifications.to_json
t.save!
u = create_user
p = create_project team: t
Sidekiq::Testing.fake! do
with_current_user_and_team(u, t) do
create_team_user team: t, user: u, role: 'admin'
SlackNotificationWorker.drain
assert_equal 0, SlackNotificationWorker.jobs.size
create_project_media team: t
assert_equal 1, SlackNotificationWorker.jobs.size
SlackNotificationWorker.drain
assert_equal 0, SlackNotificationWorker.jobs.size
create_project_media project: p
assert_equal 1, SlackNotificationWorker.jobs.size
Rails.unstub(:env)
end
end
end
test "should notify Pusher when project media is created" do
pm = create_project_media
assert pm.sent_to_pusher
t = create_team
p = create_project team: t
m = create_claim_media project_id: p.id
pm = create_project_media project: p, media: m
assert pm.sent_to_pusher
end
test "should notify Pusher when project media is destroyed" do
pm = create_project_media
pm.sent_to_pusher = false
pm.destroy!
assert pm.sent_to_pusher
end
test "should notify Pusher in background" do
Rails.stubs(:env).returns(:production)
t = create_team
p = create_project team: t
CheckPusher::Worker.drain
assert_equal 0, CheckPusher::Worker.jobs.size
create_project_media project: p
assert_equal 2, CheckPusher::Worker.jobs.size
CheckPusher::Worker.drain
assert_equal 0, CheckPusher::Worker.jobs.size
Rails.unstub(:env)
end
test "should update project media embed data" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
p1 = create_project
p2 = create_project
pm1 = create_project_media project: p1, media: m
pm2 = create_project_media project: p2, media: m
# fetch data (without overridden)
data = pm1.media.metadata
assert_equal 'test media', data['title']
assert_equal 'add desc', data['description']
# Update media title and description for pm1
info = { title: 'Title A', content: 'Desc A' }
pm1.analysis = info
info = { title: 'Title AA', content: 'Desc AA' }
pm1.analysis = info
# Update media title and description for pm2
info = { title: 'Title B', content: 'Desc B' }
pm2.analysis = info
info = { title: 'Title BB', content: 'Desc BB' }
pm2.analysis = info
# fetch data for pm1
data = pm1.analysis
assert_equal 'Title AA', data['title']
assert_equal 'Desc AA', data['content']
# fetch data for pm2
data = pm2.analysis
assert_equal 'Title BB', data['title']
assert_equal 'Desc BB', data['content']
end
test "should have annotations" do
pm = create_project_media
c1 = create_comment annotated: pm
c2 = create_comment annotated: pm
c3 = create_comment annotated: nil
assert_equal [c1.id, c2.id].sort, pm.reload.annotations('comment').map(&:id).sort
end
test "should get permissions" do
u = create_user
t = create_team current_user: u
tu = create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
pm = create_project_media project: p, current_user: u
perm_keys = [
"read ProjectMedia", "update ProjectMedia", "destroy ProjectMedia", "create Comment",
"create Tag", "create Task", "create Dynamic", "not_spam ProjectMedia", "restore ProjectMedia", "confirm ProjectMedia",
"embed ProjectMedia", "lock Annotation","update Status", "administer Content", "create Relationship",
"create Source", "update Source", "create ClaimDescription"
].sort
User.stubs(:current).returns(u)
Team.stubs(:current).returns(t)
# load permissions as owner
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as editor
tu.update_column(:role, 'editor')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as collaborator
tu.update_column(:role, 'collaborator')
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
# load as authenticated
tu.update_column(:team_id, nil)
assert_equal perm_keys, JSON.parse(pm.permissions).keys.sort
User.unstub(:current)
Team.unstub(:current)
end
test "should create embed for uploaded image" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
pm = ProjectMedia.new
pm.team_id = create_team.id
pm.file = File.new(File.join(Rails.root, 'test', 'data', 'rails.png'))
pm.disable_es_callbacks = true
pm.media_type = 'UploadedImage'
pm.save!
assert_equal media_filename('rails.png', false), pm.title
end
test "should set automatic title for images videos and audios" do
m = create_uploaded_image file: 'rails.png'
v = create_uploaded_video file: 'rails.mp4'
a = create_uploaded_audio file: 'rails.mp3'
bot = create_team_bot name: 'Smooch', login: 'smooch', set_approved: true
u = create_user
team = create_team slug: 'workspace-slug'
create_team_user team: team, user: bot, role: 'admin'
create_team_user team: team, user: u, role: 'admin'
# test with smooch user
with_current_user_and_team(bot, team) do
pm = create_project_media team: team, media: m
count = Media.where(type: 'UploadedImage').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal "image-#{team.slug}-#{count}", pm.title
pm2 = create_project_media team: team, media: v
count = Media.where(type: 'UploadedVideo').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal "video-#{team.slug}-#{count}", pm2.title
pm3 = create_project_media team: team, media: a
count = Media.where(type: 'UploadedAudio').joins("INNER JOIN project_medias pm ON medias.id = pm.media_id")
.where("pm.team_id = ?", team&.id).count
assert_equal pm3.title, "audio-#{team.slug}-#{count}"
pm.destroy; pm2.destroy; pm3.destroy
end
# test with non smooch user
with_current_user_and_team(u, team) do
pm = create_project_media team: team, media: m
assert_equal pm.title, media_filename('rails.png', false)
pm2 = create_project_media team: team, media: v
assert_equal pm2.title, media_filename('rails.mp4', false)
pm3 = create_project_media team: team, media: a
assert_equal pm3.title, media_filename('rails.mp3', false)
end
end
test "should protect attributes from mass assignment" do
raw_params = { project: create_project, user: create_user }
params = ActionController::Parameters.new(raw_params)
assert_raise ActiveModel::ForbiddenAttributesError do
ProjectMedia.create(params)
end
end
test "should create auto tasks" do
t = create_team
create_team_task team_id: t.id
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm1 = create_project_media team: t
end
end
end
test "should collaborator create auto tasks" do
t = create_team
create_team_task team_id: t.id
u = create_user
tu = create_team_user team: t, user: u, role: 'collaborator'
Sidekiq::Testing.inline! do
with_current_user_and_team(u, t) do
assert_difference 'Task.length' do
create_project_media team: t
end
end
end
end
test "should have versions" do
t = create_team
m = create_valid_media team: t
u = create_user
create_team_user user: u, team: t, role: 'admin'
pm = nil
User.current = u
assert_difference 'PaperTrail::Version.count', 2 do
pm = create_project_media team: t, media: m, user: u, skip_autocreate_source: false
end
assert_equal 2, pm.versions.count
pm.destroy!
v = Version.from_partition(t.id).where(item_type: 'ProjectMedia', item_id: pm.id, event: 'destroy').last
assert_not_nil v
User.current = nil
end
test "should get log" do
m = create_valid_media
u = create_user
t = create_team
p = create_project team: t
p2 = create_project team: t
create_team_user user: u, team: t, role: 'admin'
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
c = create_comment annotated: pm
tg = create_tag annotated: pm
f = create_flag annotated: pm
s = pm.annotations.where(annotation_type: 'verification_status').last.load
s.status = 'In Progress'; s.save!
info = { title: 'Foo' }; pm.analysis = info; pm.save!
info = { title: 'Bar' }; pm.analysis = info; pm.save!
assert_equal [
"create_dynamic", "create_dynamicannotationfield", "create_projectmedia",
"create_projectmedia", "create_tag", "update_dynamicannotationfield"
].sort, pm.get_versions_log.map(&:event_type).sort
assert_equal 5, pm.get_versions_log_count
c.destroy
assert_equal 5, pm.get_versions_log_count
tg.destroy
assert_equal 6, pm.get_versions_log_count
f.destroy
assert_equal 6, pm.get_versions_log_count
end
end
test "should get previous project and previous project search object" do
p1 = create_project
p2 = create_project
pm = create_project_media project: p1
assert_nil pm.project_was
pm.previous_project_id = p1.id
pm.save!
assert_equal p1, pm.project_was
assert_kind_of CheckSearch, pm.check_search_project_was
end
test "should refresh Pender data" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = random_url
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"1"}}')
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: '{"type":"media","data":{"url":"' + url + '","type":"item","foo":"2"}}')
m = create_media url: url
pm = create_project_media media: m
t1 = pm.updated_at.to_i
em1 = pm.media.metadata_annotation
assert_not_nil em1
em1_data = JSON.parse(em1.get_field_value('metadata_value'))
assert_equal '1', em1_data['foo']
assert_equal 1, em1_data['refreshes_count']
sleep 2
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
t2 = pm.reload.updated_at.to_i
assert t2 > t1
em2 = pm.media.metadata_annotation
assert_not_nil em2
em2_data = JSON.parse(em2.get_field_value('metadata_value'))
assert_equal '2', em2_data['foo']
assert_equal 2, em2_data['refreshes_count']
assert_equal em1, em2
end
test "should create or reset archive response when refresh media" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
t = create_team
t.set_limits_keep = true
t.save!
l = create_link team: t
tb = BotUser.where(name: 'Keep').last
tb.set_settings = [{ name: 'archive_pender_archive_enabled', type: 'boolean' }]
tb.set_approved = true
tb.save!
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.refresh_media = true
pm.skip_check_ability = true
pm.save!
end
end
a = pm.get_annotations('archiver').last.load
f = a.get_field('pender_archive_response')
f.value = '{"foo":"bar"}'
f.save!
v = a.reload.get_field('pender_archive_response').reload.value
assert_not_equal "{}", v
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.refresh_media = true
pm.skip_check_ability = true
pm.save!
end
end
v = a.reload.get_field('pender_archive_response').reload.value
assert_equal "{}", v
end
test "should get user id for migration" do
pm = ProjectMedia.new
assert_nil pm.send(:user_id_callback, 'test@test.com')
u = create_user(email: 'test@test.com')
assert_equal u.id, pm.send(:user_id_callback, 'test@test.com')
end
test "should get project id for migration" do
p = create_project
mapping = Hash.new
pm = ProjectMedia.new
assert_nil pm.send(:project_id_callback, 1, mapping)
mapping[1] = p.id
assert_equal p.id, pm.send(:project_id_callback, 1, mapping)
end
test "should set annotation" do
ft = DynamicAnnotation::FieldType.where(field_type: 'text').last || create_field_type(field_type: 'text', label: 'Text')
lt = create_field_type(field_type: 'language', label: 'Language')
at = create_annotation_type annotation_type: 'translation', label: 'Translation'
create_field_instance annotation_type_object: at, name: 'translation_text', label: 'Translation Text', field_type_object: ft, optional: false
create_field_instance annotation_type_object: at, name: 'translation_note', label: 'Translation Note', field_type_object: ft, optional: true
create_field_instance annotation_type_object: at, name: 'translation_language', label: 'Translation Language', field_type_object: lt, optional: false
assert_equal 0, Annotation.where(annotation_type: 'translation').count
create_project_media set_annotation: { annotation_type: 'translation', set_fields: { 'translation_text' => 'Foo', 'translation_note' => 'Bar', 'translation_language' => 'pt' }.to_json }.to_json
assert_equal 1, Annotation.where(annotation_type: 'translation').count
end
test "should have reference to search team object" do
pm = create_project_media
assert_kind_of CheckSearch, pm.check_search_team
end
test "should get dynamic annotation by type" do
create_annotation_type annotation_type: 'foo'
create_annotation_type annotation_type: 'bar'
pm = create_project_media
d1 = create_dynamic_annotation annotation_type: 'foo', annotated: pm
d2 = create_dynamic_annotation annotation_type: 'bar', annotated: pm
assert_equal d1, pm.get_dynamic_annotation('foo')
assert_equal d2, pm.get_dynamic_annotation('bar')
end
test "should get report type" do
c = create_claim_media
l = create_link
m = create_project_media media: c
assert_equal 'claim', m.report_type
m = create_project_media media: l
assert_equal 'link', m.report_type
end
test "should delete project media" do
t = create_team
u = create_user
u2 = create_user
tu = create_team_user team: t, user: u, role: 'admin'
tu = create_team_user team: t, user: u2
pm = create_project_media team: t, quote: 'Claim', user: u2
at = create_annotation_type annotation_type: 'test'
ft = create_field_type
fi = create_field_instance name: 'test', field_type_object: ft, annotation_type_object: at
a = create_dynamic_annotation annotator: u2, annotated: pm, annotation_type: 'test', set_fields: { test: 'Test' }.to_json
RequestStore.store[:disable_es_callbacks] = true
with_current_user_and_team(u, t) do
pm.disable_es_callbacks = true
pm.destroy
end
RequestStore.store[:disable_es_callbacks] = false
end
test "should have Pender embeddable URL" do
RequestStore[:request] = nil
t = create_team
pm = create_project_media team: t
stub_configs({ 'pender_url' => 'https://pender.fake' }) do
assert_equal CheckConfig.get('pender_url') + '/api/medias.html?url=' + pm.full_url.to_s, pm.embed_url(false)
end
stub_configs({ 'pender_url' => 'https://pender.fake' }) do
assert_match /#{CheckConfig.get('short_url_host')}/, pm.embed_url
end
end
test "should have oEmbed endpoint" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://test.com'
response = '{"type":"media","data":{"url":"' + url + '/normalized","type":"item", "title": "test media", "description":"add desc"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
m = create_media(account: create_valid_account, url: url)
pm = create_project_media media: m
assert_equal 'test media', pm.as_oembed[:title]
end
test "should have oEmbed URL" do
RequestStore[:request] = nil
t = create_team private: false
p = create_project team: t
pm = create_project_media project: p
stub_configs({ 'checkdesk_base_url' => 'https://checkmedia.org' }) do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
t = create_team private: true
p = create_project team: t
pm = create_project_media project: p
stub_configs({ 'checkdesk_base_url' => 'https://checkmedia.org' }) do
assert_equal "https://checkmedia.org/api/project_medias/#{pm.id}/oembed", pm.oembed_url
end
end
test "should get author name for oEmbed" do
u = create_user name: 'Foo Bar'
pm = create_project_media user: u
assert_equal 'Foo Bar', pm.author_name
pm.user = nil
assert_equal '', pm.author_name
end
test "should get author URL for oEmbed" do
url = 'http://twitter.com/test'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"profile"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
u = create_omniauth_user url: url, provider: 'twitter'
pm = create_project_media user: u
assert_equal url, pm.author_url
pm.user = create_user
assert_equal '', pm.author_url
pm.user = nil
assert_equal '', pm.author_url
end
test "should get author picture for oEmbed" do
u = create_user
pm = create_project_media user: u
assert_match /^http/, pm.author_picture
end
test "should get author username for oEmbed" do
u = create_user login: 'test'
pm = create_project_media user: u
assert_equal 'test', pm.author_username
pm.user = nil
assert_equal '', pm.author_username
end
test "should get author role for oEmbed" do
t = create_team
u = create_user
create_team_user user: u, team: t, role: 'collaborator'
pm = create_project_media team: t, user: u
assert_equal 'collaborator', pm.author_role
pm.user = create_user
assert_equal 'none', pm.author_role
pm.user = nil
assert_equal 'none', pm.author_role
end
test "should get source URL for external link for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal url, pm.source_url
c = create_claim_media
pm = create_project_media media: c
assert_match CheckConfig.get('checkdesk_client'), pm.source_url
end
test "should get completed tasks for oEmbed" do
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response'
pm = create_project_media
assert_equal [], pm.completed_tasks
assert_equal 0, pm.completed_tasks_count
t1 = create_task annotated: pm
t1.response = { annotation_type: 'task_response', set_fields: { response: 'Test' }.to_json }.to_json
t1.save!
t2 = create_task annotated: pm
assert_equal [t1], pm.completed_tasks
assert_equal [t2], pm.open_tasks
assert_equal 1, pm.completed_tasks_count
end
test "should get comments for oEmbed" do
pm = create_project_media
assert_equal [], pm.comments
assert_equal 0, pm.comments_count
c = create_comment annotated: pm
assert_equal [c], pm.comments
assert_equal 1, pm.comments_count
end
test "should get provider for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Twitter', pm.provider
c = create_claim_media
pm = create_project_media media: c
assert_equal 'Check', pm.provider
end
test "should get published time for oEmbed" do
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal '25/01/1989', pm.published_at.strftime('%d/%m/%Y')
c = create_claim_media
pm = create_project_media media: c
assert_nil pm.published_at
end
test "should get source author for oEmbed" do
u = create_user name: 'Foo'
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","author_name":"Bar"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l, user: u
assert_equal 'Bar', pm.source_author[:author_name]
c = create_claim_media
pm = create_project_media media: c, user: u
assert_equal 'Foo', pm.source_author[:author_name]
end
test "should render oEmbed HTML" do
Sidekiq::Testing.inline! do
pm = create_project_media
PenderClient::Request.stubs(:get_medias)
publish_report(pm, {}, nil, {
use_visual_card: false,
use_text_message: true,
use_disclaimer: false,
disclaimer: '',
title: 'Title',
text: '*This* _is_ a ~test~!',
published_article_url: 'http://foo.bar'
})
PenderClient::Request.unstub(:get_medias)
expected = File.read(File.join(Rails.root, 'test', 'data', "oembed-#{pm.default_project_media_status_type}.html")).gsub(/^\s+/m, '')
actual = ProjectMedia.find(pm.id).html.gsub(/.*<body/m, '<body').gsub(/^\s+/m, '').gsub(/https?:\/\/[^:]*:3000/, 'http://check')
assert_equal expected, actual
end
end
test "should have metadata for oEmbed" do
pm = create_project_media
assert_kind_of String, pm.oembed_metadata
end
test "should clear caches when media is updated" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
create_dynamic_annotation annotation_type: 'embed_code', annotated: pm
u = create_user
ProjectMedia.any_instance.unstub(:clear_caches)
CcDeville.expects(:clear_cache_for_url).returns(nil).times(52)
PenderClient::Request.expects(:get_medias).returns(nil).times(16)
Sidekiq::Testing.inline! do
create_comment annotated: pm, user: u
create_task annotated: pm, user: u
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
end
test "should respond to auto-tasks on creation" do
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi1 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Response', field_type_object: ft1
fi2 = create_field_instance annotation_type_object: at, name: 'note_free_text', label: 'Note', field_type_object: ft1
t = create_team
p = create_project team: t
create_team_task team_id: t.id, label: 'When?'
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm = create_project_media project: p, set_tasks_responses: { 'when' => 'Yesterday' }
task = pm.annotations('task').last
assert_equal 'Yesterday', task.first_response
end
end
end
test "should auto-response for Krzana report" do
at = create_annotation_type annotation_type: 'task_response_geolocation', label: 'Task Response Geolocation'
geotype = create_field_type field_type: 'geojson', label: 'GeoJSON'
create_field_instance annotation_type_object: at, name: 'response_geolocation', field_type_object: geotype
at = create_annotation_type annotation_type: 'task_response_datetime', label: 'Task Response Date Time'
datetime = create_field_type field_type: 'datetime', label: 'Date Time'
create_field_instance annotation_type_object: at, name: 'response_datetime', field_type_object: datetime
at = create_annotation_type annotation_type: 'task_response_free_text', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi2 = create_field_instance annotation_type_object: at, name: 'response_free_text', label: 'Note', field_type_object: ft1
t = create_team
p = create_project team: t
p2 = create_project team: t
p3 = create_project team: t
tt1 = create_team_task team_id: t.id, label: 'who?', task_type: 'free_text', mapping: { "type" => "free_text", "match" => "$.mentions[?(@['@type'] == 'Person')].name", "prefix" => "Suggested by Krzana: "}
tt2 = create_team_task team_id: t.id, label: 'where?', task_type: 'geolocation', mapping: { "type" => "geolocation", "match" => "$.mentions[?(@['@type'] == 'Place')]", "prefix" => ""}
tt3 = create_team_task team_id: t.id, label: 'when?', type: 'datetime', mapping: { "type" => "datetime", "match" => "dateCreated", "prefix" => ""}
Sidekiq::Testing.inline! do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
# test empty json+ld
url = 'http://test1.com'
raw = {"json+ld": {}}
response = {'type':'media','data': {'url': url, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media project: p, url: url
t = Task.where(annotation_type: 'task', annotated_id: pm.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with non exist value
url1 = 'http://test11.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person" } ] } }
response = {'type':'media','data': {'url': url1, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url1 } }).to_return(body: response)
pm1 = create_project_media project: p, url: url1
t = Task.where(annotation_type: 'task', annotated_id: pm1.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with empty value
url12 = 'http://test12.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "" } ] } }
response = {'type':'media','data': {'url': url12, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url12 } }).to_return(body: response)
pm12 = create_project_media project: p, url: url12
t = Task.where(annotation_type: 'task', annotated_id: pm12.id).select{ |t| t.team_task_id == tt1.id }.last
assert_nil t.first_response
# test with single selection
url2 = 'http://test2.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" } ] } }
response = {'type':'media','data': {'url': url2, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url2 } }).to_return(body: response)
pm2 = create_project_media project: p, url: url2
t = Task.where(annotation_type: 'task', annotated_id: pm2.id).select{ |t| t.team_task_id == tt1.id }.last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test multiple selection (should get first one)
url3 = 'http://test3.com'
raw = { "json+ld": { "mentions": [ { "@type": "Person", "name": "first_name" }, { "@type": "Person", "name": "last_name" } ] } }
response = {'type':'media','data': {'url': url3, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url3 } }).to_return(body: response)
pm3 = create_project_media project: p, url: url3
t = Task.where(annotation_type: 'task', annotated_id: pm3.id).select{ |t| t.team_task_id == tt1.id }.last
assert_equal "Suggested by Krzana: first_name", t.first_response
# test geolocation mapping
url4 = 'http://test4.com'
raw = { "json+ld": {
"mentions": [ { "name": "Delimara Powerplant", "@type": "Place", "geo": { "latitude": 35.83020073454, "longitude": 14.55602645874 } } ]
} }
response = {'type':'media','data': {'url': url4, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url4 } }).to_return(body: response)
pm4 = create_project_media project: p2, url: url4
t = Task.where(annotation_type: 'task', annotated_id: pm4.id).select{ |t| t.team_task_id == tt2.id }.last
# assert_not_nil t.first_response
# test datetime mapping
url5 = 'http://test5.com'
raw = { "json+ld": { "dateCreated": "2017-08-30T14:22:28+00:00" } }
response = {'type':'media','data': {'url': url5, 'type': 'item', 'raw': raw}}.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url5 } }).to_return(body: response)
pm5 = create_project_media project: p3, url: url5
t = Task.where(annotation_type: 'task', annotated_id: pm5.id).select{ |t| t.team_task_id == tt3.id }.last
assert_not_nil t.first_response
end
end
test "should expose conflict error from Pender" do
url = 'http://test.com'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"error","data":{"message":"Conflict","code":9}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response, status: 409)
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response, status: 409)
t = create_team
pm = ProjectMedia.new
pm.team = t
pm.url = url
pm.media_type = 'Link'
assert_raises RuntimeError do
pm.save!
assert_equal PenderClient::ErrorCodes::DUPLICATED, pm.media.pender_error_code
end
end
test "should not create project media under archived project" do
p = create_project archived: CheckArchivedFlags::FlagCodes::TRASHED
assert_raises ActiveRecord::RecordInvalid do
create_project_media project_id: p.id
end
end
test "should archive" do
pm = create_project_media
assert_equal pm.archived, CheckArchivedFlags::FlagCodes::NONE
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
assert_equal pm.reload.archived, CheckArchivedFlags::FlagCodes::TRASHED
end
test "should create annotation when is embedded for the first time" do
create_annotation_type_and_fields('Embed Code', { 'Copied' => ['Boolean', false] })
pm = create_project_media
assert_difference 'Annotation.where(annotation_type: "embed_code").count', 1 do
pm.as_oembed
end
assert_no_difference 'Annotation.where(annotation_type: "embed_code").count' do
pm.as_oembed
end
end
test "should not crash if mapping value is invalid" do
assert_nothing_raised do
pm = ProjectMedia.new
assert_nil pm.send(:mapping_value, 'foo', 'bar')
end
end
test "should not crash if another user tries to update media" do
u1 = create_user
u2 = create_user
t = create_team
create_team_user team: t, user: u1, role: 'admin'
create_team_user team: t, user: u2, role: 'admin'
pm = nil
with_current_user_and_team(u1, t) do
pm = create_project_media team: t, user: u1
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }
pm.analysis = info
pm.save!
end
with_current_user_and_team(u2, t) do
pm = ProjectMedia.find(pm.id)
info = { title: 'Title' }
pm.analysis = info
pm.save!
end
end
test "should get claim description only if it has been set" do
RequestStore.store[:skip_cached_field_update] = false
c = create_claim_media quote: 'Test'
pm = create_project_media media: c
assert_equal 'Test', pm.reload.description
create_claim_description project_media: pm, description: 'Test 2'
assert_equal 'Test 2', pm.reload.description
end
test "should create pender_archive annotation for link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
p = create_project team: t
pm = create_project_media media: l, team: t, project_id: p.id
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should not create pender_archive annotation when media is not a link" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
c = create_claim_media
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
p = create_project team: t
pm = create_project_media media: c, project: p
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should not create pender_archive annotation when there is no annotation type" do
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
p = create_project team: t
pm = create_project_media media: l, project: p
assert_no_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_no_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
end
test "should create pender_archive annotation using information from pender_embed" do
Link.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json }.with_indifferent_access }))
Media.any_instance.stubs(:pender_embed).returns(OpenStruct.new({ data: { embed: { screenshot_taken: 1, 'archives' => {} }.to_json }.with_indifferent_access }))
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
Link.any_instance.unstub(:pender_embed)
Media.any_instance.unstub(:pender_embed)
end
test "should create pender_archive annotation using information from pender_data" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.set_limits_keep = true
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = true
tbi.save!
Link.any_instance.stubs(:pender_data).returns({ screenshot_taken: 1, 'archives' => {} })
Link.any_instance.stubs(:pender_embed).raises(RuntimeError)
pm = create_project_media media: l, team: t
assert_difference 'Dynamic.where(annotation_type: "archiver").count' do
assert_difference 'DynamicAnnotation::Field.where(annotation_type: "archiver", field_name: "pender_archive_response").count' do
pm.create_all_archive_annotations
end
end
Link.any_instance.unstub(:pender_data)
Link.any_instance.unstub(:pender_embed)
end
test "should update media account when change author_url" do
setup_elasticsearch
u = create_user is_admin: true
t = create_team
create_team_user user: u, team: t
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'http://www.facebook.com/meedan/posts/123456'
author_url = 'http://facebook.com/123456'
author_normal_url = 'http://www.facebook.com/meedan'
author2_url = 'http://facebook.com/789123'
author2_normal_url = 'http://www.facebook.com/meedan2'
data = { url: url, author_url: author_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
data = { url: url, author_url: author2_url, type: 'item' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
data = { url: author_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'Foo', description: 'Bar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author_url } }).to_return(body: response)
data = { url: author2_normal_url, provider: 'facebook', picture: 'http://fb/p.png', title: 'NewFoo', description: 'NewBar', type: 'profile' }
response = '{"type":"media","data":' + data.to_json + '}'
WebMock.stub_request(:get, pender_url).with({ query: { url: author2_url } }).to_return(body: response)
m = create_media team: t, url: url, account: nil, account_id: nil
a = m.account
p = create_project team: t
Sidekiq::Testing.inline! do
pm = create_project_media media: m, project: p, disable_es_callbacks: false
sleep 2
pm = ProjectMedia.find(pm.id)
with_current_user_and_team(u, t) do
pm.refresh_media = true
sleep 2
end
new_account = m.reload.account
assert_not_equal a, new_account
assert_nil Account.where(id: a.id).last
result = $repository.find(get_es_id(pm))
assert_equal 1, result['accounts'].size
assert_equal result['accounts'].first['id'], new_account.id
end
end
test "should update elasticsearch parent_id field" do
setup_elasticsearch
t = create_team
s1 = create_project_media team: t, disable_es_callbacks: false
s2 = create_project_media team: t, disable_es_callbacks: false
s3 = create_project_media team: t, disable_es_callbacks: false
t1 = create_project_media team: t, disable_es_callbacks: false
t2 = create_project_media team: t, disable_es_callbacks: false
t3 = create_project_media team: t, disable_es_callbacks: false
create_relationship source_id: s1.id, target_id: t1.id
create_relationship source_id: s2.id, target_id: t2.id, relationship_type: Relationship.confirmed_type, disable_es_callbacks: false
create_relationship source_id: s3.id, target_id: t3.id, relationship_type: Relationship.suggested_type, disable_es_callbacks: false
sleep 2
t1_es = $repository.find(get_es_id(t1))
assert_equal t1.id, t1_es['parent_id']
t2_es = $repository.find(get_es_id(t2))
assert_equal s2.id, t2_es['parent_id']
t3_es = $repository.find(get_es_id(t3))
assert_equal t3.id, t3_es['parent_id']
end
test "should validate media source" do
t = create_team
t2 = create_team
s = create_source team: t
s2 = create_source team: t2
pm = nil
assert_difference 'ProjectMedia.count', 2 do
create_project_media team: t
pm = create_project_media team: t, source_id: s.id
end
assert_raises ActiveRecord::RecordInvalid do
pm.source_id = s2.id
pm.save!
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media team: t, source_id: s2.id, skip_autocreate_source: false
end
end
test "should assign media source using account" do
u = create_user
t = create_team
t2 = create_team
create_team_user team: t, user: u, role: 'admin'
create_team_user team: t2, user: u, role: 'admin'
m = nil
s = nil
with_current_user_and_team(u, t) do
m = create_valid_media
s = m.account.sources.first
assert_equal t.id, s.team_id
pm = create_project_media media: m, team: t, skip_autocreate_source: false
assert_equal s.id, pm.source_id
end
pm = create_project_media media: m, team: t2, skip_autocreate_source: false
s2 = pm.source
assert_not_nil pm.source_id
assert_not_equal s.id, s2.id
assert_equal t2.id, s2.team_id
assert_equal m.account, s2.accounts.first
end
test "should create media when normalized URL exists" do
ft = create_field_type field_type: 'image_path', label: 'Image Path'
at = create_annotation_type annotation_type: 'reverse_image', label: 'Reverse Image'
create_field_instance annotation_type_object: at, name: 'reverse_image_path', label: 'Reverse Image', field_type_object: ft, optional: false
create_bot name: 'Check Bot'
url = 'https://www.facebook.com/Ma3komMona/videos/695409680623722'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
t = create_team
l = create_link team: t, url: url
pm = create_project_media media: l
url = 'https://www.facebook.com/Ma3komMona/videos/vb.268809099950451/695409680623722/?type=3&theater'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"https://www.facebook.com/Ma3komMona/videos/695409680623722","type":"item"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
assert_difference 'ProjectMedia.count' do
pm = ProjectMedia.new
pm.url = url
pm.media_type = 'Link'
pm.team = t
pm.save!
end
end
test "should complete media if there are pending tasks" do
pm = create_project_media
s = pm.last_verification_status_obj
create_task annotated: pm, required: true
assert_equal 'undetermined', s.reload.get_field('verification_status_status').status
assert_nothing_raised do
s.status = 'verified'
s.save!
end
end
test "should get account from author URL" do
s = create_source
pm = create_project_media
assert_nothing_raised do
pm.send :account_from_author_url, @url, s
end
end
test "should not move media to active status if status is locked" do
pm = create_project_media
assert_equal 'undetermined', pm.last_verification_status
s = pm.last_verification_status_obj
s.locked = true
s.save!
create_task annotated: pm, disable_update_status: false
assert_equal 'undetermined', pm.reload.last_verification_status
end
test "should have status permission" do
u = create_user
t = create_team
p = create_project team: t
pm = create_project_media project: p
with_current_user_and_team(u, t) do
permissions = JSON.parse(pm.permissions)
assert permissions.has_key?('update Status')
end
end
test "should not crash if media does not have status" do
pm = create_project_media
Annotation.delete_all
assert_nothing_raised do
assert_nil pm.last_verification_status_obj
end
end
test "should have relationships and parent and children reports" do
p = create_project
s1 = create_project_media project: p
s2 = create_project_media project: p
t1 = create_project_media project: p
t2 = create_project_media project: p
create_project_media project: p
create_relationship source_id: s1.id, target_id: t1.id
create_relationship source_id: s2.id, target_id: t2.id
assert_equal [t1], s1.targets
assert_equal [t2], s2.targets
assert_equal [s1], t1.sources
assert_equal [s2], t2.sources
end
test "should return related" do
pm = create_project_media
pm2 = create_project_media
assert_nil pm.related_to
pm.related_to_id = pm2.id
assert_equal pm2, pm.related_to
end
test "should include extra attributes in serialized object" do
pm = create_project_media
pm.related_to_id = 1
dump = YAML::dump(pm)
assert_match /related_to_id/, dump
end
test "should skip screenshot archiver" do
create_annotation_type_and_fields('Pender Archive', { 'Response' => ['JSON', false] })
l = create_link
t = create_team
t.save!
BotUser.delete_all
tb = create_team_bot login: 'keep', set_settings: [{ name: 'archive_pender_archive_enabled', type: 'boolean' }], set_approved: true
tbi = create_team_bot_installation user_id: tb.id, team_id: t.id
tbi.set_archive_pender_archive_enabled = false
tbi.save!
pm = create_project_media project: create_project(team: t), media: l
assert pm.should_skip_create_archive_annotation?('pender_archive')
end
test "should destroy project media when associated_id on version is not valid" do
m = create_valid_media
t = create_team
p = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'admin'
pm = nil
with_current_user_and_team(u, t) do
pm = create_project_media project: p, media: m, user: u
pm.source_id = create_source(team_id: t.id).id
pm.save
assert_equal 3, pm.versions.count
end
version = pm.versions.last
version.update_attribute('associated_id', 100)
assert_nothing_raised do
pm.destroy
end
end
# https://errbit.test.meedan.com/apps/581a76278583c6341d000b72/problems/5ca644ecf023ba001260e71d
# https://errbit.test.meedan.com/apps/581a76278583c6341d000b72/problems/5ca4faa1f023ba001260dbae
test "should create claim with Indian characters" do
str1 = "_Buy Redmi Note 5 Pro Mobile at *2999 Rs* (95�\u0000off) in Flash Sale._\r\n\r\n*Grab this offer now, Deal valid only for First 1,000 Customers. Visit here to Buy-* http://sndeals.win/"
str2 = "*प्रधानमंत्री छात्रवृति योजना 2019*\n\n*Scholarship Form for 10th or 12th Open Now*\n\n*Scholarship Amount*\n1.50-60�\u0000- Rs. 5000/-\n2.60-80�\u0000- Rs. 10000/-\n3.Above 80�\u0000- Rs. 25000/-\n\n*सभी 10th और 12th के बच्चो व उनके अभिभावकों को ये SMS भेजे ताकि सभी बच्चे इस योजना का लाभ ले सके*\n\n*Click Here for Apply:*\nhttps://bit.ly/2l71tWl"
[str1, str2].each do |str|
assert_difference 'ProjectMedia.count' do
m = create_claim_media quote: str
create_project_media media: m
end
end
end
test "should not create project media with unsafe URL" do
WebMock.disable_net_connect! allow: [CheckConfig.get('storage_endpoint')]
url = 'http://unsafe.com/'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"error","data":{"code":12}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
assert_raises RuntimeError do
pm = create_project_media media: nil, url: url
assert_equal 12, pm.media.pender_error_code
end
end
test "should get metadata" do
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
url = 'https://twitter.com/test/statuses/123456'
response = { 'type' => 'media', 'data' => { 'url' => url, 'type' => 'item', 'title' => 'Media Title', 'description' => 'Media Description' } }.to_json
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
l = create_link url: url
pm = create_project_media media: l
assert_equal 'Media Title', l.metadata['title']
assert_equal 'Media Description', l.metadata['description']
assert_equal 'Media Title', pm.media.metadata['title']
assert_equal 'Media Description', pm.media.metadata['description']
pm.analysis = { title: 'Project Media Title', content: 'Project Media Description' }
pm.save!
l = Media.find(l.id)
pm = ProjectMedia.find(pm.id)
assert_equal 'Media Title', l.metadata['title']
assert_equal 'Media Description', l.metadata['description']
assert_equal 'Project Media Title', pm.analysis['title']
assert_equal 'Project Media Description', pm.analysis['content']
end
test "should cache and sort by demand" do
setup_elasticsearch
RequestStore.store[:skip_cached_field_update] = false
team = create_team
p = create_project team: team
create_annotation_type_and_fields('Smooch', { 'Data' => ['JSON', false] })
pm = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
ms_pm = get_es_id(pm)
assert_queries(0, '=') { assert_equal(0, pm.demand) }
create_dynamic_annotation annotation_type: 'smooch', annotated: pm
assert_queries(0, '=') { assert_equal(1, pm.demand) }
pm2 = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
ms_pm2 = get_es_id(pm2)
assert_queries(0, '=') { assert_equal(0, pm2.demand) }
2.times { create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2) }
assert_queries(0, '=') { assert_equal(2, pm2.demand) }
# test sorting
result = $repository.find(ms_pm)
assert_equal result['demand'], 1
result = $repository.find(ms_pm2)
assert_equal result['demand'], 2
result = CheckSearch.new({projects: [p.id], sort: 'demand'}.to_json, nil, team.id)
assert_equal [pm2.id, pm.id], result.medias.map(&:id)
result = CheckSearch.new({projects: [p.id], sort: 'demand', sort_type: 'asc'}.to_json, nil, team.id)
assert_equal [pm.id, pm2.id], result.medias.map(&:id)
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_equal 1, pm.reload.requests_count
assert_equal 2, pm2.reload.requests_count
assert_queries(0, '=') { assert_equal(3, pm.demand) }
assert_queries(0, '=') { assert_equal(3, pm2.demand) }
pm3 = create_project_media team: team, project_id: p.id
ms_pm3 = get_es_id(pm3)
assert_queries(0, '=') { assert_equal(0, pm3.demand) }
2.times { create_dynamic_annotation(annotation_type: 'smooch', annotated: pm3) }
assert_queries(0, '=') { assert_equal(2, pm3.demand) }
create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(5, pm.demand) }
assert_queries(0, '=') { assert_equal(5, pm2.demand) }
assert_queries(0, '=') { assert_equal(5, pm3.demand) }
create_dynamic_annotation annotation_type: 'smooch', annotated: pm3
assert_queries(0, '=') { assert_equal(6, pm.demand) }
assert_queries(0, '=') { assert_equal(6, pm2.demand) }
assert_queries(0, '=') { assert_equal(6, pm3.demand) }
r.destroy!
assert_queries(0, '=') { assert_equal(4, pm.demand) }
assert_queries(0, '=') { assert_equal(2, pm2.demand) }
assert_queries(0, '=') { assert_equal(4, pm3.demand) }
assert_queries(0, '>') { assert_equal(4, pm.demand(true)) }
assert_queries(0, '>') { assert_equal(2, pm2.demand(true)) }
assert_queries(0, '>') { assert_equal(4, pm3.demand(true)) }
end
test "should cache number of linked items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm.linked_items_count) }
pm2 = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(1, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
pm3 = create_project_media team: t
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
r = create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal(2, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
r.destroy!
assert_queries(0, '=') { assert_equal(1, pm.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm2.linked_items_count) }
assert_queries(0, '=') { assert_equal(0, pm3.linked_items_count) }
assert_queries(0, '>') { assert_equal(1, pm.linked_items_count(true)) }
end
test "should cache number of requests" do
RequestStore.store[:skip_cached_field_update] = false
team = create_team
pm = create_project_media team: team
t = t0 = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm).created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
sleep 1
pm2 = create_project_media team: team
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
t = pm2.created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
sleep 1
t = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2).created_at.to_i
assert_queries(0, '=') { assert_equal(t, pm.last_seen) }
r.destroy!
assert_queries(0, '=') { assert_equal(t0, pm.last_seen) }
assert_queries(0, '>') { assert_equal(t0, pm.last_seen(true)) }
end
test "should cache status" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
assert pm.respond_to?(:status)
assert_queries 0, '=' do
assert_equal 'undetermined', pm.status
end
s = pm.last_verification_status_obj
s.status = 'verified'
s.save!
assert_queries 0, '=' do
assert_equal 'verified', pm.status
end
assert_queries(0, '>') do
assert_equal 'verified', pm.status(true)
end
end
test "should cache title" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media quote: 'Title 0'
assert_equal 'Title 0', pm.title
cd = create_claim_description project_media: pm, description: 'Title 1'
assert_queries 0, '=' do
assert_equal 'Title 1', pm.title
end
create_fact_check claim_description: cd, title: 'Title 2'
assert_queries 0, '=' do
assert_equal 'Title 1', pm.title
end
assert_queries(0, '>') do
assert_equal 'Title 1', pm.reload.title(true)
end
end
test "should cache title for imported items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
pm = ProjectMedia.create!(
media: Blank.create!,
team: t,
user: u,
channel: { main: CheckChannels::ChannelCodes::FETCH }
)
cd = ClaimDescription.new
cd.skip_check_ability = true
cd.project_media = pm
cd.description = '-'
cd.user = u
cd.save!
fc_summary = 'fc_summary'
fc_title = 'fc_title'
fc = FactCheck.new
fc.claim_description = cd
fc.title = fc_title
fc.summary = fc_summary
fc.user = u
fc.skip_report_update = true
fc.save!
assert_equal fc_title, pm.title
assert_equal fc_summary, pm.description
end
end
test "should cache description" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media quote: 'Description 0'
assert_equal 'Description 0', pm.description
cd = create_claim_description description: 'Description 1', project_media: pm
assert_queries 0, '=' do
assert_equal 'Description 1', pm.description
end
create_fact_check claim_description: cd, summary: 'Description 2'
assert_queries 0, '=' do
assert_equal 'Description 1', pm.description
end
assert_queries(0, '>') do
assert_equal 'Description 1', pm.reload.description(true)
end
end
test "should index sortable fields" do
RequestStore.store[:skip_cached_field_update] = false
# sortable fields are [linked_items_count, last_seen and share_count]
setup_elasticsearch
create_annotation_type_and_fields('Smooch', { 'Data' => ['JSON', false] })
team = create_team
p = create_project team: team
pm = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
result = $repository.find(get_es_id(pm))
assert_equal 0, result['linked_items_count']
assert_equal pm.created_at.to_i, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
t = t0 = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm).created_at.to_i
result = $repository.find(get_es_id(pm))
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
pm2 = create_project_media team: team, project_id: p.id, disable_es_callbacks: false
r = create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
t = pm2.created_at.to_i
result = $repository.find(get_es_id(pm))
result2 = $repository.find(get_es_id(pm2))
assert_equal 1, result['linked_items_count']
assert_equal 0, result2['linked_items_count']
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
t = create_dynamic_annotation(annotation_type: 'smooch', annotated: pm2).created_at.to_i
result = $repository.find(get_es_id(pm))
assert_equal t, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
r.destroy!
result = $repository.find(get_es_id(pm))
assert_equal t0, result['last_seen']
assert_equal pm.reload.last_seen, pm.read_attribute(:last_seen)
result = $repository.find(get_es_id(pm))
result2 = $repository.find(get_es_id(pm2))
assert_equal 0, result['linked_items_count']
assert_equal 0, result2['linked_items_count']
end
test "should get team" do
t = create_team
pm = create_project_media team: t
assert_equal t, pm.reload.team
t2 = create_team
pm.team = t2
assert_equal t2, pm.team
assert_equal t, ProjectMedia.find(pm.id).team
end
test "should query media" do
setup_elasticsearch
t = create_team
p = create_project team: t
p1 = create_project team: t
p2 = create_project team: t
pm = create_project_media team: t, project_id: p.id, disable_es_callbacks: false
create_project_media team: t, project_id: p1.id, disable_es_callbacks: false
create_project_media team: t, archived: CheckArchivedFlags::FlagCodes::TRASHED, project_id: p.id, disable_es_callbacks: false
pm = create_project_media team: t, project_id: p1.id, disable_es_callbacks: false
create_relationship source_id: pm.id, target_id: create_project_media(team: t, project_id: p.id, disable_es_callbacks: false).id, relationship_type: Relationship.confirmed_type
sleep 2
assert_equal 3, CheckSearch.new({ team_id: t.id }.to_json, nil, t.id).medias.size
assert_equal 4, CheckSearch.new({ show_similar: true, team_id: t.id }.to_json, nil, t.id).medias.size
assert_equal 2, CheckSearch.new({ team_id: t.id, projects: [p1.id] }.to_json, nil, t.id).medias.size
assert_equal 0, CheckSearch.new({ team_id: t.id, projects: [p2.id] }.to_json, nil, t.id).medias.size
assert_equal 1, CheckSearch.new({ team_id: t.id, projects: [p1.id], eslimit: 1 }.to_json, nil, t.id).medias.size
end
test "should handle indexing conflicts" do
require File.join(Rails.root, 'lib', 'middleware_sidekiq_server_retry')
Sidekiq::Testing.server_middleware do |chain|
chain.add ::Middleware::Sidekiq::Server::Retry
end
class ElasticSearchTestWorker
include Sidekiq::Worker
attr_accessor :retry_count
sidekiq_options retry: 5
sidekiq_retries_exhausted do |_msg, e|
raise e
end
def perform(id)
begin
client = $repository.client
client.update index: CheckElasticSearchModel.get_index_alias, id: id, retry_on_conflict: 0, body: { doc: { updated_at: Time.now + rand(50).to_i } }
rescue Exception => e
retry_count = retry_count.to_i + 1
if retry_count < 5
perform(id)
else
raise e
end
end
end
end
setup_elasticsearch
threads = []
pm = create_project_media media: nil, quote: 'test', disable_es_callbacks: false
id = get_es_id(pm)
15.times do |i|
threads << Thread.start do
Sidekiq::Testing.inline! do
ElasticSearchTestWorker.perform_async(id)
end
end
end
threads.map(&:join)
end
test "should localize status" do
I18n.locale = :pt
pm = create_project_media
assert_equal 'Não Iniciado', pm.status_i18n(nil, { locale: 'pt' })
t = create_team slug: 'test'
value = {
label: 'Field label',
active: 'test',
default: 'undetermined',
statuses: [
{ id: 'undetermined', locales: { en: { label: 'Undetermined', description: '' } }, style: { color: 'blue' } },
{ id: 'test', locales: { en: { label: 'Test', description: '' }, pt: { label: 'Teste', description: '' } }, style: { color: 'red' } }
]
}
t.set_media_verification_statuses(value)
t.save!
p = create_project team: t
pm = create_project_media project: p
assert_equal 'Undetermined', pm.status_i18n(nil, { locale: 'pt' })
I18n.stubs(:exists?).with('custom_message_status_test_test').returns(true)
I18n.stubs(:t).returns('')
I18n.stubs(:t).with(:custom_message_status_test_test, { locale: 'pt' }).returns('Teste')
assert_equal 'Teste', pm.status_i18n('test', { locale: 'pt' })
I18n.unstub(:t)
I18n.unstub(:exists?)
I18n.locale = :en
end
test "should not throw exception for trashed item if request does not come from a client" do
pm = create_project_media project: p
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
User.current = nil
assert_nothing_raised do
create_comment annotated: pm
end
u = create_user(is_admin: true)
User.current = u
assert_raises ActiveRecord::RecordInvalid do
create_comment annotated: pm
end
User.current = nil
end
test "should set initial custom status of orphan item" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media project: nil, team: t
assert_equal 'stop', pm.last_status
end
test "should change custom status of orphan item" do
t = create_team
value = {
label: 'Status',
default: 'stop',
active: 'done',
statuses: [
{ id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } },
{ id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } }
]
}
t.send :set_media_verification_statuses, value
t.save!
pm = create_project_media project: nil, team: t
assert_nothing_raised do
s = pm.last_status_obj
s.status = 'done'
s.save!
end
end
test "should clear caches when report is updated" do
ProjectMedia.any_instance.unstub(:clear_caches)
Sidekiq::Testing.inline! do
CcDeville.stubs(:clear_cache_for_url).times(6)
pm = create_project_media
pm.skip_clear_cache = false
RequestStore.store[:skip_clear_cache] = false
PenderClient::Request.stubs(:get_medias)
publish_report(pm)
end
CcDeville.unstub(:clear_cache_for_url)
PenderClient::Request.unstub(:get_medias)
ProjectMedia.any_instance.stubs(:clear_caches)
end
test "should generate short URL when getting embed URL for the first time" do
pm = create_project_media
assert_difference 'Shortener::ShortenedUrl.count' do
assert_match /^http/, pm.embed_url
end
assert_no_difference 'Shortener::ShortenedUrl.count' do
assert_match /^http/, pm.embed_url
end
end
test "should validate duplicate based on team" do
t = create_team
p = create_project team: t
t2 = create_team
p2 = create_project team: t2
# Create media in different team with no list
m = create_valid_media
create_project_media team: t, media: m
assert_nothing_raised do
create_project_media team: t2, url: m.url
end
# Try to add same item to list
assert_raises RuntimeError do
create_project_media team: t, url: m.url
end
# Create item in a list then try to add it via all items(with no list)
m2 = create_valid_media
create_project_media team:t, project_id: p.id, media: m2
assert_raises RuntimeError do
create_project_media team: t, url: m2.url
end
# Add same item to list in different team
assert_nothing_raised do
create_project_media team: t2, url: m2.url
end
# create item in a list then try to add it to all items in different team
m3 = create_valid_media
create_project_media team: t, project_id: p.id, media: m3
assert_nothing_raised do
create_project_media team: t2, url: m3.url
end
end
test "should restore and confirm item if not super admin" do
setup_elasticsearch
t = create_team
p = create_project team: t
p3 = create_project team: t
u = create_user
create_team_user user: u, team: t, role: 'admin', is_admin: false
Sidekiq::Testing.inline! do
# test restore
pm = create_project_media project: p, disable_es_callbacks: false, archived: CheckArchivedFlags::FlagCodes::TRASHED
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p.id, result
assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm.archived
with_current_user_and_team(u, t) do
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.disable_es_callbacks = false
pm.project_id = p3.id
pm.save!
end
pm = pm.reload
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.archived
assert_equal p3.id, pm.project_id
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p3.id, result
# test confirm
pm = create_project_media project: p, disable_es_callbacks: false, archived: CheckArchivedFlags::FlagCodes::UNCONFIRMED
sleep 1
assert_equal p.id, pm.project_id
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p.id, result
assert_equal CheckArchivedFlags::FlagCodes::UNCONFIRMED, pm.archived
with_current_user_and_team(u, t) do
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.disable_es_callbacks = false
pm.project_id = p3.id
pm.save!
end
pm = pm.reload
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.archived
assert_equal p3.id, pm.project_id
sleep 1
result = $repository.find(get_es_id(pm))['project_id']
assert_equal p3.id, result
end
end
test "should set media type for links" do
l = create_link
pm = create_project_media url: l.url
pm.send :set_media_type
assert_equal 'Link', pm.media_type
end
test "should create link and account using team pender key" do
t = create_team
p = create_project(team: t)
Team.stubs(:current).returns(t)
url1 = random_url
author_url1 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => url1, "type" => "item", "title" => "Default token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Default token", "author_name" => 'Author with default token'}})
url2 = random_url
author_url2 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url2 }, 'specific_token').returns({"type" => "media","data" => {"url" => url2, "type" => "item", "title" => "Specific token", "author_url" => author_url2}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url2 }, 'specific_token').returns({"type" => "media","data" => {"url" => author_url2, "type" => "profile", "title" => "Specific token", "author_name" => 'Author with specific token'}})
pm = ProjectMedia.create url: url1
assert_equal 'Default token', ProjectMedia.find(pm.id).media.metadata['title']
assert_equal 'Author with default token', ProjectMedia.find(pm.id).media.account.metadata['author_name']
t.set_pender_key = 'specific_token'; t.save!
pm = ProjectMedia.create! url: url2
assert_equal 'Specific token', ProjectMedia.find(pm.id).media.metadata['title']
assert_equal 'Author with specific token', ProjectMedia.find(pm.id).media.account.metadata['author_name']
Team.unstub(:current)
PenderClient::Request.unstub(:get_medias)
end
test "should refresh using team pender key" do
t = create_team
l = create_link
Team.stubs(:current).returns(t)
pm = create_project_media media: l, project: create_project(team: t)
author_url1 = random_url
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: l.url, refresh: '1' }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => l.url, "type" => "item", "title" => "Default token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, CheckConfig.get('pender_key')).returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Default token", "author_name" => 'Author with default token'}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: l.url, refresh: '1' }, 'specific_token').returns({"type" => "media","data" => {"url" => l.url, "type" => "item", "title" => "Specific token", "author_url" => author_url1}})
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: author_url1 }, 'specific_token').returns({"type" => "media","data" => {"url" => author_url1, "type" => "profile", "title" => "Author with specific token", "author_name" => 'Author with specific token'}})
assert pm.media.metadata['title'].blank?
pm.refresh_media = true
pm.save!
assert_equal 'Default token', ProjectMedia.find(pm.id).media.metadata['title']
t.set_pender_key = 'specific_token'; t.save!
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true; pm.save!
assert_equal 'Specific token', ProjectMedia.find(pm.id).media.metadata['title']
Team.unstub(:current)
PenderClient::Request.unstub(:get_medias)
end
test "should not replace one project media by another if not from the same team" do
old = create_project_media team: create_team, media: Blank.create!
new = create_project_media team: create_team
assert_raises RuntimeError do
old.replace_by(new)
end
end
test "should not replace one project media by another if media is not blank" do
t = create_team
old = create_project_media team: t
new = create_project_media team: t
assert_raises RuntimeError do
old.replace_by(new)
end
end
test "should replace a blank project media by another project media" do
setup_elasticsearch
t = create_team
u = create_user
u2 = create_user
create_team_user team: t, user: u2
at = create_annotation_type annotation_type: 'task_response_single_choice', label: 'Task'
ft1 = create_field_type field_type: 'single_choice', label: 'Single Choice'
fi1 = create_field_instance annotation_type_object: at, name: 'response_task', label: 'Response', field_type_object: ft1
tag_a = create_tag_text team_id: t.id
tag_b = create_tag_text team_id: t.id
tag_c = create_tag_text team_id: t.id
tt = create_team_task team_id: t.id, task_type: 'single_choice', options: [{ label: 'Foo'}, { label: 'Faa' }]
tt2 = create_team_task team_id: t.id, task_type: 'single_choice', options: [{ label: 'Optiona a'}, { label: 'Option b' }]
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
RequestStore.store[:skip_clear_cache] = true
old = create_project_media team: t, media: Blank.create!, channel: { main: CheckChannels::ChannelCodes::FETCH }, disable_es_callbacks: false
old_r = publish_report(old)
old_s = old.last_status_obj
new = create_project_media team: t, media: create_uploaded_video, disable_es_callbacks: false
new_r = publish_report(new)
new_s = new.last_status_obj
old_tag_a = create_tag tag: tag_a.id, annotated: old
old_tag_b = create_tag tag: tag_b.id, annotated: old
new_tag_a = create_tag tag: tag_a.id, annotated: new
new_tag_c = create_tag tag: tag_c.id, annotated: new
# add task response
new_tt = new.annotations('task').select{|t| t.team_task_id == tt.id}.last
new_tt.response = { annotation_type: 'task_response_single_choice', set_fields: { response_task: 'Foo' }.to_json }.to_json
new_tt.save!
new_tt2 = new.annotations('task').select{|t| t.team_task_id == tt2.id}.last
# add comments
old_c = create_comment annotated: old
new_c = create_comment annotated: new
# assign to
s = new.last_verification_status_obj
s = Dynamic.find(s.id)
s.assigned_to_ids = u2.id.to_s
s.save!
old.replace_by(new)
assert_nil ProjectMedia.find_by_id(old.id)
assert_nil Annotation.find_by_id(new_s.id)
assert_nil Annotation.find_by_id(new_r.id)
assert_equal old_r, new.get_dynamic_annotation('report_design')
assert_equal old_s, new.get_dynamic_annotation('verification_status')
new = new.reload
assert_equal 'Import', new.creator_name
data = { "main" => CheckChannels::ChannelCodes::FETCH }
assert_equal data, new.channel
assert_equal 3, new.annotations('tag').count
assert_equal 2, new.annotations('comment').count
# Verify ES
result = $repository.find(get_es_id(new))
assert_equal [CheckChannels::ChannelCodes::FETCH], result['channel']
assert_equal [old_c.id, new_c.id], result['comments'].collect{ |c| c['id'] }.sort
assert_equal [new_tag_a.id, new_tag_c.id, old_tag_b.id].sort, result['tags'].collect{ |tag| tag['id'] }.sort
assert_equal [new_tt.id, new_tt2.id].sort, result['task_responses'].collect{ |task| task['id'] }.sort
assert_equal [u2.id], result['assigned_user_ids']
end
end
test "should create metrics annotation after create a project media" do
create_annotation_type_and_fields('Metrics', { 'Data' => ['JSON', false] })
url = 'https://twitter.com/meedan/status/1321600654750613505'
response = {"type" => "media","data" => {"url" => url, "type" => "item", "metrics" => {"facebook"=> {"reaction_count" => 2, "comment_count" => 5, "share_count" => 10, "comment_plugin_count" => 0 }}}}
PenderClient::Request.stubs(:get_medias).with(CheckConfig.get('pender_url_private'), { url: url }, CheckConfig.get('pender_key')).returns(response)
pm = create_project_media media: nil, url: url
assert_equal response['data']['metrics'], JSON.parse(pm.get_annotations('metrics').last.load.get_field_value('metrics_data'))
PenderClient::Request.unstub(:get_medias)
end
test "should cache metadata value" do
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response_test'
t = create_team
tt = create_team_task fieldset: 'metadata', team_id: t.id
pm = create_project_media team: t
m = pm.get_annotations('task').last.load
value = random_string
m.response = { annotation_type: 'task_response', set_fields: { response_test: value }.to_json }.to_json
m.save!
assert_queries(0, '=') do
assert_equal value, pm.send("task_value_#{tt.id}")
end
assert_not_nil Rails.cache.read("project_media:task_value:#{pm.id}:#{tt.id}")
assert_not_nil pm.reload.task_value(tt.id)
d = m.reload.first_response_obj
d.destroy!
assert_nil Rails.cache.read("project_media:task_value:#{pm.id}:#{tt.id}")
assert_nil pm.reload.task_value(tt.id)
end
test "should return item columns values" do
RequestStore.store[:skip_cached_field_update] = false
at = create_annotation_type annotation_type: 'task_response'
create_field_instance annotation_type_object: at, name: 'response_test'
t = create_team
tt1 = create_team_task fieldset: 'metadata', team_id: t.id
tt2 = create_team_task fieldset: 'metadata', team_id: t.id
t.list_columns = ["task_value_#{tt1.id}", "task_value_#{tt2.id}"]
t.save!
pm = create_project_media team: t.reload
m = pm.get_annotations('task').map(&:load).select{ |t| t.team_task_id == tt1.id }.last
m.response = { annotation_type: 'task_response', set_fields: { response_test: 'Foo Value' }.to_json }.to_json
m.save!
m = pm.get_annotations('task').map(&:load).select{ |t| t.team_task_id == tt2.id }.last
m.response = { annotation_type: 'task_response', set_fields: { response_test: 'Bar Value' }.to_json }.to_json
m.save!
pm.team
# The only SQL query should be to get the team tasks
assert_queries(1, '=') do
values = pm.list_columns_values
assert_equal 2, values.size
assert_equal 'Foo Value', values["task_value_#{tt1.id}"]
assert_equal 'Bar Value', values["task_value_#{tt2.id}"]
end
pm2 = create_project_media
pm2.team
pm2.media
# The only SQL query should be to get the team tasks
assert_queries(1, '=') do
assert_equal 8, pm2.list_columns_values.keys.size
end
end
test "should return error if method does not exist" do
pm = create_project_media
assert_raises NoMethodError do
pm.send(random_string)
end
end
test "should cache published value" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
pm2 = create_project_media team: pm.team
create_relationship source_id: pm.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal 'unpublished', pm.report_status }
assert_queries(0, '=') { assert_equal 'unpublished', pm2.report_status }
r = publish_report(pm)
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'published', pm.report_status }
assert_queries(0, '=') { assert_equal 'published', pm2.report_status }
r = Dynamic.find(r.id)
r.set_fields = { state: 'paused' }.to_json
r.action = 'pause'
r.save!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'paused', pm.report_status }
assert_queries(0, '=') { assert_equal 'paused', pm2.report_status }
Rails.cache.clear
assert_queries(0, '>') { assert_equal 'paused', pm.report_status }
pm3 = create_project_media team: pm.team
assert_queries(0, '=') { assert_equal 'unpublished', pm3.report_status }
r = create_relationship source_id: pm.id, target_id: pm3.id, relationship_type: Relationship.confirmed_type
assert_queries(0, '=') { assert_equal 'paused', pm3.report_status }
r.destroy!
assert_queries(0, '=') { assert_equal 'unpublished', pm3.report_status }
end
test "should cache tags list" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media
assert_queries(0, '=') { assert_equal '', pm.tags_as_sentence }
t = create_tag tag: 'foo', annotated: pm
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'foo', pm.tags_as_sentence }
create_tag tag: 'bar', annotated: pm
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'foo, bar', pm.tags_as_sentence }
t.destroy!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 'bar', pm.tags_as_sentence }
Rails.cache.clear
assert_queries(0, '>') { assert_equal 'bar', pm.tags_as_sentence }
end
test "should cache media published at" do
RequestStore.store[:skip_cached_field_update] = false
url = 'http://twitter.com/test/123456'
pender_url = CheckConfig.get('pender_url_private') + '/api/medias'
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:30:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url } }).to_return(body: response)
pm = create_project_media media: nil, url: url
assert_queries(0, '=') { assert_equal 601720200, pm.media_published_at }
response = '{"type":"media","data":{"url":"' + url + '","type":"item","published_at":"1989-01-25 08:31:00"}}'
WebMock.stub_request(:get, pender_url).with({ query: { url: url, refresh: '1' } }).to_return(body: response)
pm = ProjectMedia.find(pm.id)
pm.refresh_media = true
pm.save!
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal 601720260, pm.media_published_at }
end
test "should cache number of related items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm1 = create_project_media team: t
pm2 = create_project_media team: t
assert_queries(0, '=') { assert_equal 0, pm1.related_count }
assert_queries(0, '=') { assert_equal 0, pm2.related_count }
r = create_relationship source_id: pm1.id, target_id: pm2.id
assert_queries(0, '=') { assert_equal 1, pm1.related_count }
assert_queries(0, '=') { assert_equal 1, pm2.related_count }
r.destroy!
assert_queries(0, '=') { assert_equal 0, pm1.related_count }
assert_queries(0, '=') { assert_equal 0, pm2.related_count }
end
test "should cache type of media" do
RequestStore.store[:skip_cached_field_update] = false
setup_elasticsearch
pm = create_project_media
assert_queries(0, '=') { assert_equal 'Link', pm.type_of_media }
Rails.cache.clear
assert_queries(1, '=') { assert_equal 'Link', pm.type_of_media }
assert_queries(0, '=') { assert_equal 'Link', pm.type_of_media }
sleep 1
es = $repository.find(get_es_id(pm))
assert_equal Media.types.index(pm.type_of_media), es['type_of_media']
end
test "should cache project title" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
p1 = create_project title: 'Foo', team: t
p2 = create_project title: 'Bar', team: t
pm = create_project_media team: t
default_folder = t.default_folder
assert_queries(0, '=') { assert_equal default_folder.title, pm.folder }
pm.project_id = p1.id
pm.save!
assert_queries(0, '=') { assert_equal 'Foo', pm.folder }
p1.title = 'Test'
p1.save!
assert_queries(0, '=') { assert_equal 'Test', pm.folder }
pm.project_id = p2.id
pm.save!
assert_queries(0, '=') { assert_equal 'Bar', pm.folder }
assert_equal p2.id, pm.reload.project_id
Sidekiq::Testing.inline! do
p2.destroy!
assert_equal t.default_folder.id, pm.reload.project_id
assert_queries(0, '=') { assert_equal default_folder.title, pm.folder }
end
end
test "should get original title for uploaded files" do
RequestStore.store[:skip_cached_field_update] = false
pm = create_project_media media: create_uploaded_image
create_claim_description project_media: pm, description: 'Custom Title'
assert_equal 'Custom Title', pm.reload.title
assert_equal media_filename('rails.png'), pm.reload.original_title
end
test "should move secondary item to same main item project" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
p = create_project team: t
p2 = create_project team: t
pm = create_project_media project: p
pm2 = create_project_media project: p
pm3 = create_project_media project: p
assert_equal p.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm.id}:folder")
create_relationship source_id: pm.id, target_id: pm2.id
create_relationship source_id: pm.id, target_id: pm3.id
pm.project_id = p2.id
pm.save!
assert_equal p2.id, pm2.reload.project_id
assert_equal p2.id, pm3.reload.project_id
# verify cached folder value
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm.id}:folder")
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm2.id}:folder")
assert_equal p2.title, Rails.cache.read("check_cached_field:ProjectMedia:#{pm3.id}:folder")
end
test "should get report information" do
pm = create_project_media
data = {
title: 'Report text title',
text: 'Report text content',
headline: 'Visual card title',
description: 'Visual card content'
}
publish_report(pm, {}, nil, data)
pm = ProjectMedia.find(pm.id).reload
assert_equal 'Report text title', pm.report_text_title
assert_equal 'Report text content', pm.report_text_content
assert_equal 'Visual card title', pm.report_visual_card_title
assert_equal 'Visual card content', pm.report_visual_card_content
end
test "should get extracted text" do
pm = create_project_media
assert_kind_of String, pm.extracted_text
end
test "should validate archived value" do
assert_difference 'ProjectMedia.count' do
create_project_media archived: CheckArchivedFlags::FlagCodes::SPAM
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media archived: { main: 90 }
end
end
test "should validate channel value" do
# validate channel create (should be in allowed values)
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: 90 }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: '90' }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { others: [90] }
end
assert_raises ActiveRecord::RecordInvalid do
create_project_media channel: { main: CheckChannels::ChannelCodes::MANUAL, others: [90] }
end
pm = nil
assert_difference 'ProjectMedia.count' do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::WHATSAPP }
end
# validate channel update (should not update existing value)
assert_raises ActiveRecord::RecordInvalid do
pm.channel = { main: CheckChannels::ChannelCodes::MESSENGER }
pm.save!
end
assert_raises ActiveRecord::RecordInvalid do
pm.channel = { others: [90] }
pm.save!
end
assert_nothing_raised do
pm.channel = { main: CheckChannels::ChannelCodes::WHATSAPP, others: [main: CheckChannels::ChannelCodes::MESSENGER]}
pm.save!
end
# Set channel with default value MANUAL
pm2 = create_project_media
data = { "main" => CheckChannels::ChannelCodes::MANUAL }
assert_equal data, pm2.channel
# Set channel with API if ApiKey exists
a = create_api_key
ApiKey.current = a
pm3 = create_project_media channel: nil
data = { "main" => CheckChannels::ChannelCodes::API }
assert_equal data, pm3.channel
ApiKey.current = nil
end
test "should not create duplicated media with for the same uploaded file" do
team = create_team
team2 = create_team
{
UploadedVideo: 'rails.mp4',
UploadedImage: 'rails.png',
UploadedAudio: 'rails.mp3'
}.each_pair do |media_type, filename|
# first time the video is added creates a new media
medias_count = media_type.to_s.constantize.count
assert_difference 'ProjectMedia.count', 1 do
pm = ProjectMedia.new media_type: media_type.to_s, team: team
File.open(File.join(Rails.root, 'test', 'data', filename)) do |f|
pm.file = f
pm.save!
end
end
assert_equal medias_count + 1, media_type.to_s.constantize.count
# second time the video is added should not create new media
medias_count = media_type.to_s.constantize.count
assert_difference 'ProjectMedia.count', 1 do
pm = ProjectMedia.new media_type: media_type.to_s, team: team2
File.open(File.join(Rails.root, 'test', 'data', filename)) do |f|
pm.file = f
pm.save!
end
end
assert_equal medias_count, media_type.to_s.constantize.count
end
end
test "should run callbacks for bulk-update status" do
ProjectMedia.stubs(:clear_caches).returns(nil)
setup_elasticsearch
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
p = create_project team: t
rules = []
rules << {
"name": random_string,
"project_ids": "",
"rules": {
"operator": "and",
"groups": [
{
"operator": "and",
"conditions": [
{
"rule_definition": "status_is",
"rule_value": "verified"
}
]
}
]
},
"actions": [
{
"action_definition": "move_to_project",
"action_value": p.id.to_s
}
]
}
t.rules = rules.to_json
t.save!
with_current_user_and_team(u, t) do
pm = create_project_media team: t, disable_es_callbacks: false
publish_report(pm)
pm_status = pm.last_status
pm2 = create_project_media team: t, disable_es_callbacks: false
pm3 = create_project_media team: t, disable_es_callbacks: false
sleep 2
ids = [pm.id, pm2.id, pm3.id]
updates = { action: 'update_status', params: { status: 'verified' }.to_json }
Sidekiq::Testing.inline! do
ProjectMedia.bulk_update(ids, updates, t)
sleep 2
# Verify nothing happens for published reports
assert_equal pm_status, pm.reload.last_status
result = $repository.find(get_es_id(pm))
assert_equal pm_status, result['verification_status']
# Verify rules callback
assert_equal t.default_folder.id, pm.reload.project_id
assert_equal p.id, pm2.reload.project_id
assert_equal p.id, pm3.reload.project_id
# Verify ES index
result = $repository.find(get_es_id(pm2))
assert_equal 'verified', result['verification_status']
result = $repository.find(get_es_id(pm3))
assert_equal 'verified', result['verification_status']
end
end
ProjectMedia.unstub(:clear_caches)
end
test "should cache picture and creator name" do
RequestStore.store[:skip_cached_field_update] = false
u = create_user
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::MANUAL }, user: u
# picture
assert_queries(0, '=') { assert_equal('', pm.picture) }
assert_queries(0, '>') { assert_equal('', pm.picture(true)) }
# creator name
assert_queries(0, '=') { assert_equal(u.name, pm.creator_name) }
assert_queries(0, '>') { assert_equal(u.name, pm.creator_name(true)) }
end
test "should get creator name based on channel" do
RequestStore.store[:skip_cached_field_update] = false
u = create_user
pm = create_project_media user: u
assert_equal pm.creator_name, u.name
pm2 = create_project_media user: u, channel: { main: CheckChannels::ChannelCodes::WHATSAPP }
assert_equal pm2.creator_name, 'Tipline'
pm3 = create_project_media user: u, channel: { main: CheckChannels::ChannelCodes::FETCH }
assert_equal pm3.creator_name, 'Import'
# update cache based on user update
u.name = 'update name'
u.save!
assert_equal pm.creator_name, 'update name'
assert_equal pm.creator_name(true), 'update name'
assert_equal pm2.creator_name, 'Tipline'
assert_equal pm2.creator_name(true), 'Tipline'
assert_equal pm3.creator_name, 'Import'
assert_equal pm3.creator_name(true), 'Import'
User.delete_check_user(u)
assert_equal pm.creator_name, 'Anonymous'
assert_equal pm.reload.creator_name(true), 'Anonymous'
assert_equal pm2.creator_name, 'Tipline'
assert_equal pm2.creator_name(true), 'Tipline'
assert_equal pm3.creator_name, 'Import'
assert_equal pm3.creator_name(true), 'Import'
end
test "should create blank item" do
assert_difference 'ProjectMedia.count' do
assert_difference 'Blank.count' do
ProjectMedia.create! media_type: 'Blank', team: create_team
end
end
end
test "should convert old hash" do
t = create_team
pm = create_project_media team: t
Team.any_instance.stubs(:settings).returns(ActionController::Parameters.new({ media_verification_statuses: { statuses: [] } }))
assert_nothing_raised do
pm.custom_statuses
end
Team.any_instance.unstub(:settings)
end
test "should assign item to default project if project not set" do
t = create_team
pm = create_project_media team: t
assert_equal pm.project, t.default_folder
end
test "should detach similar items when trash parent item" do
setup_elasticsearch
RequestStore.store[:skip_delete_for_ever] = true
t = create_team
default_folder = t.default_folder
p = create_project team: t
pm = create_project_media project: p
pm1_c = create_project_media project: p
pm1_s = create_project_media project: p
pm2_s = create_project_media project: p
r = create_relationship source: pm, target: pm1_c, relationship_type: Relationship.confirmed_type
r2 = create_relationship source: pm, target: pm1_s, relationship_type: Relationship.suggested_type
r3 = create_relationship source: pm, target: pm2_s, relationship_type: Relationship.suggested_type
assert_difference 'Relationship.count', -2 do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_raises ActiveRecord::RecordNotFound do
r2.reload
end
assert_raises ActiveRecord::RecordNotFound do
r3.reload
end
pm1_s = pm1_s.reload; pm2_s.reload
assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm1_c.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm1_s.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm2_s.archived
assert_equal p.id, pm1_s.project_id
assert_equal p.id, pm2_s.project_id
# Verify ES
result = $repository.find(get_es_id(pm1_c))
result['archived'] = CheckArchivedFlags::FlagCodes::TRASHED
result = $repository.find(get_es_id(pm1_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
result = $repository.find(get_es_id(pm2_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
end
test "should detach similar items when spam parent item" do
setup_elasticsearch
RequestStore.store[:skip_delete_for_ever] = true
t = create_team
default_folder = t.default_folder
p = create_project team: t
pm = create_project_media project: p
pm1_c = create_project_media project: p
pm1_s = create_project_media project: p
pm2_s = create_project_media project: p
r = create_relationship source: pm, target: pm1_c, relationship_type: Relationship.confirmed_type
r2 = create_relationship source: pm, target: pm1_s, relationship_type: Relationship.suggested_type
r3 = create_relationship source: pm, target: pm2_s, relationship_type: Relationship.suggested_type
assert_difference 'Relationship.count', -2 do
pm.archived = CheckArchivedFlags::FlagCodes::SPAM
pm.save!
end
assert_raises ActiveRecord::RecordNotFound do
r2.reload
end
assert_raises ActiveRecord::RecordNotFound do
r3.reload
end
pm1_s = pm1_s.reload; pm2_s.reload
assert_equal CheckArchivedFlags::FlagCodes::SPAM, pm1_c.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm1_s.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm2_s.archived
assert_equal p.id, pm1_s.project_id
assert_equal p.id, pm2_s.project_id
# Verify ES
result = $repository.find(get_es_id(pm1_c))
result['archived'] = CheckArchivedFlags::FlagCodes::SPAM
result = $repository.find(get_es_id(pm1_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
result = $repository.find(get_es_id(pm2_s))
result['archived'] = CheckArchivedFlags::FlagCodes::NONE
result['project_id'] = p.id
end
test "should get cluster size" do
pm = create_project_media
assert_nil pm.reload.cluster
c = create_cluster
c.project_medias << pm
assert_equal 1, pm.reload.cluster.size
c.project_medias << create_project_media
assert_equal 2, pm.reload.cluster.size
end
test "should get cluster teams" do
RequestStore.store[:skip_cached_field_update] = false
setup_elasticsearch
t1 = create_team
t2 = create_team
pm1 = create_project_media team: t1
assert_nil pm1.cluster
c = create_cluster project_media: pm1
c.project_medias << pm1
assert_equal [t1.name], pm1.cluster.team_names.values
assert_equal [t1.id], pm1.cluster.team_names.keys
sleep 2
id = get_es_id(pm1)
es = $repository.find(id)
assert_equal [t1.id], es['cluster_teams']
pm2 = create_project_media team: t2
c.project_medias << pm2
sleep 2
assert_equal [t1.name, t2.name].sort, pm1.cluster.team_names.values.sort
assert_equal [t1.id, t2.id].sort, pm1.cluster.team_names.keys.sort
es = $repository.find(id)
assert_equal [t1.id, t2.id], es['cluster_teams']
end
test "should cache sources list" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
s_a = create_source team: t, name: 'source_a'
s_b = create_source team: t, name: 'source_b'
s_c = create_source team: t, name: 'source_c'
s_d = create_source team: t, name: 'source_d'
pm = create_project_media team: t, source: s_a, skip_autocreate_source: false
t1 = create_project_media team: t, source: s_b, skip_autocreate_source: false
t2 = create_project_media team: t, source: s_c, skip_autocreate_source: false
t3 = create_project_media team: t, source: s_d, skip_autocreate_source: false
result = {}
# Verify cache item source
result[s_a.id] = s_a.name
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify cache source for similar items
r1 = create_relationship source_id: pm.id, target_id: t1.id, relationship_type: Relationship.confirmed_type
r2 = create_relationship source_id: pm.id, target_id: t2.id, relationship_type: Relationship.confirmed_type
r3 = create_relationship source_id: pm.id, target_id: t3.id, relationship_type: Relationship.suggested_type
result[s_b.id] = s_b.name
result[s_c.id] = s_c.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify main source is a first element
assert_equal pm.source_id, JSON.parse(pm.sources_as_sentence).keys.first.to_i
# Verify update source names after destroy similar item
r1.destroy
result.delete(s_b.id)
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update item source
new_s1 = create_source team: t, name: 'new_source_1'
pm.source = new_s1; pm.save!
result.delete(s_a.id)
result[new_s1.id] = new_s1.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.keys.sort.map(&:to_s), JSON.parse(pm.sources_as_sentence).keys.sort }
# Verify update source for similar item
result_similar = {}
result_similar[s_c.id] = s_c.name
assert_queries(0, '=') { assert_equal result_similar.to_json, t2.sources_as_sentence }
new_s2 = create_source team: t, name: 'new_source_2'
t2.source = new_s2; t2.save!
t2 = ProjectMedia.find(t2.id)
result_similar.delete(s_c.id)
result_similar[new_s2.id] = new_s2.name
assert_queries(0, '=') { assert_equal result_similar.to_json, t2.sources_as_sentence }
result.delete(s_c.id)
result[new_s2.id] = new_s2.name
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update source name
new_s2.name = 'update source'; new_s2.save!
result[new_s2.id] = 'update source'
pm = ProjectMedia.find(pm.id)
assert_queries(0, '=') { assert_equal result.to_json, pm.sources_as_sentence }
# Verify update relation
r3.relationship_type = Relationship.confirmed_type; r3.save!
result[s_d.id] = s_d.name
pm = ProjectMedia.find(pm.id)
result_keys = result.keys.map(&:to_i).sort
sources_keys = JSON.parse(pm.sources_as_sentence).keys.map(&:to_i).sort
assert_queries(0, '=') { assert_equal result_keys, sources_keys }
Rails.cache.clear
assert_queries(0, '>') { assert_equal result_keys, JSON.parse(pm.sources_as_sentence).keys.map(&:to_i).sort }
end
test "should have web form channel" do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::WEB_FORM }
assert_equal 'Web Form', pm.reload.get_creator_name
end
test "should respond to file upload auto-task on creation" do
url = random_url
WebMock.stub_request(:get, url).to_return(body: File.read(File.join(Rails.root, 'test', 'data', 'rails.png')))
at = create_annotation_type annotation_type: 'task_response_file_upload', label: 'Task'
ft1 = create_field_type field_type: 'text_field', label: 'Text Field'
fi1 = create_field_instance annotation_type_object: at, name: 'response_file_upload', label: 'Response', field_type_object: ft1
t = create_team
create_team_task team_id: t.id, label: 'Upload a file', task_type: 'file_upload'
Sidekiq::Testing.inline! do
assert_difference 'Task.length', 1 do
pm = create_project_media team: t, set_tasks_responses: { 'upload_a_file' => url }
task = pm.annotations('task').last
assert task.existing_files.size > 0
end
end
end
test "should get shared database creator" do
pm = create_project_media channel: { main: CheckChannels::ChannelCodes::SHARED_DATABASE }
assert_equal 'Shared Database', pm.creator_name
end
test "should delete claims and fact-checks when item is deleted" do
pm = create_project_media
cd = create_claim_description project_media: pm
fc = create_fact_check claim_description: cd
assert_difference 'ProjectMedia.count', -1 do
assert_difference 'ClaimDescription.count', -1 do
assert_difference 'FactCheck.count', -1 do
pm.destroy!
end
end
end
end
test "should get claim description and fact-check data" do
pm = create_project_media
assert_nil pm.claim_description_content
assert_nil pm.claim_description_context
cd = create_claim_description project_media: pm, description: 'Foo', context: 'Bar'
fc = create_fact_check claim_description: cd
assert_equal 'Foo', pm.claim_description_content
assert_equal 'Bar', pm.claim_description_context
assert_not_nil pm.fact_check_published_on
end
test "should cache if item is suggested or confirmed" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
main = create_project_media team: t
pm = create_project_media team: t
assert !pm.is_suggested
assert !pm.is_confirmed
r = create_relationship source_id: main.id, target_id: pm.id, relationship_type: Relationship.suggested_type
assert pm.is_suggested
assert !pm.is_confirmed
r.relationship_type = Relationship.confirmed_type
r.save!
assert !pm.is_suggested
assert pm.is_confirmed
r.destroy!
assert !pm.is_suggested
assert !pm.is_confirmed
end
test "should delete for ever trashed items" do
RequestStore.store[:skip_cached_field_update] = false
t = create_team
pm = create_project_media team: t
# Check that cached field exists (pick a key to verify the key deleted after destroy item)
cache_key = "check_cached_field:ProjectMedia:#{pm.id}:folder"
assert Rails.cache.exist?(cache_key)
Sidekiq::Testing.fake! do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_not_nil ProjectMedia.find_by_id(pm.id)
Sidekiq::Worker.drain_all
assert_nil ProjectMedia.find_by_id(pm.id)
assert_not Rails.cache.exist?(cache_key)
# Restore item from trash before apply delete for ever
pm = create_project_media team: t
Sidekiq::Testing.fake! do
pm.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm.save!
end
assert_not_nil ProjectMedia.find_by_id(pm.id)
pm.archived = CheckArchivedFlags::FlagCodes::NONE
pm.save!
Sidekiq::Worker.drain_all
assert_not_nil ProjectMedia.find_by_id(pm.id)
end
test "should delete for ever spam items" do
t = create_team
pm_s = create_project_media team: t
pm_t1 = create_project_media team: t
pm_t2 = create_project_media team: t
pm_t3 = create_project_media team: t
r1 = create_relationship source_id: pm_s.id, target_id: pm_t1.id, relationship_type: Relationship.default_type
r2 = create_relationship source_id: pm_s.id, target_id: pm_t2.id, relationship_type: Relationship.confirmed_type
r3 = create_relationship source_id: pm_s.id, target_id: pm_t3.id, relationship_type: Relationship.suggested_type
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::SPAM
pm_s.save!
end
assert_not_nil ProjectMedia.find_by_id(pm_s.id)
assert_equal 4, ProjectMedia.where(id: [pm_s.id, pm_t1.id, pm_t2.id, pm_t3.id]).count
assert_equal 3, Relationship.where(id: [r1.id, r2.id, r3.id]).count
Sidekiq::Worker.drain_all
assert_equal CheckArchivedFlags::FlagCodes::SPAM, pm_s.reload.archived
assert_equal CheckArchivedFlags::FlagCodes::NONE, pm_t3.reload.archived
assert_equal 0, Relationship.where(id: [r1.id, r2.id, r3.id]).count
assert_nil ProjectMedia.find_by_id(pm_t1.id)
assert_nil ProjectMedia.find_by_id(pm_t2.id)
# Restore item from spam before apply delete for ever
pm_s = create_project_media team: t
pm_t = create_project_media team: t
r = create_relationship source_id: pm_s.id, target_id: pm_t.id, relationship_type: Relationship.confirmed_type
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::TRASHED
pm_s.save!
end
assert_equal 2, ProjectMedia.where(id: [pm_s.id, pm_t.id]).count
Sidekiq::Testing.fake! do
pm_s.archived = CheckArchivedFlags::FlagCodes::NONE
pm_s.save!
end
Sidekiq::Worker.drain_all
assert_equal 2, ProjectMedia.where(id: [pm_s.id, pm_t.id], archived: CheckArchivedFlags::FlagCodes::NONE).count
assert_not_nil Relationship.where(id: r.id).last
end
test "should return cached values for feed data" do
pm = create_project_media
assert_kind_of Hash, pm.feed_columns_values
end
test "should set a custom title" do
m = create_uploaded_image
pm = create_project_media set_title: 'Foo', media: m
assert_equal 'Foo', pm.title
end
test "should bulk remove tags" do
setup_elasticsearch
RequestStore.store[:skip_cached_field_update] = false
t = create_team
u = create_user
create_team_user team: t, user: u, role: 'admin'
with_current_user_and_team(u, t) do
pm = create_project_media team: t
pm2 = create_project_media team: t
pm3 = create_project_media team: t
sports = create_tag_text team_id: t.id, text: 'sports'
news = create_tag_text team_id: t.id, text: 'news'
economic = create_tag_text team_id: t.id, text: 'economic'
# Tag pm
pm_t1 = create_tag annotated: pm, tag: sports.id, disable_es_callbacks: false
pm_t2 = create_tag annotated: pm, tag: news.id, disable_es_callbacks: false
pm_t3 = create_tag annotated: pm, tag: economic.id, disable_es_callbacks: false
# Tag pm2
pm2_t1 = create_tag annotated: pm2, tag: sports.id, disable_es_callbacks: false
pm2_t2 = create_tag annotated: pm2, tag: news.id, disable_es_callbacks: false
# Tag pm3
pm3_t1 = create_tag annotated: pm3, tag: sports.id, disable_es_callbacks: false
sleep 2
assert_equal 3, sports.reload.tags_count
assert_equal 2, news.reload.tags_count
assert_equal 1, economic.reload.tags_count
assert_equal [pm_t1, pm2_t1, pm3_t1].sort, sports.reload.tags.to_a.sort
assert_equal [pm_t2, pm2_t2].sort, news.reload.tags.to_a.sort
assert_equal [pm_t3], economic.reload.tags.to_a
assert_equal 'sports, news, economic', pm.tags_as_sentence
assert_equal 'sports, news', pm2.tags_as_sentence
assert_equal 'sports', pm3.tags_as_sentence
result = $repository.find(get_es_id(pm))
assert_equal 3, result['tags_as_sentence']
assert_equal [pm_t1.id, pm_t2.id, pm_t3.id], result['tags'].collect{|t| t['id']}.sort
result = $repository.find(get_es_id(pm2))
assert_equal 2, result['tags_as_sentence']
assert_equal [pm2_t1.id, pm2_t2.id], result['tags'].collect{|t| t['id']}.sort
result = $repository.find(get_es_id(pm3))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm3_t1.id], result['tags'].collect{|t| t['id']}
# apply bulk-remove
ids = [pm.id, pm2.id, pm3.id]
updates = { action: 'remove_tags', params: { tags_text: "#{sports.id}, #{economic.id}" }.to_json }
ProjectMedia.bulk_update(ids, updates, t)
sleep 2
assert_equal 0, sports.reload.tags_count
assert_equal 2, news.reload.tags_count
assert_equal 0, economic.reload.tags_count
assert_empty sports.reload.tags.to_a
assert_equal [pm_t2, pm2_t2].sort, news.reload.tags.to_a.sort
assert_empty economic.reload.tags.to_a
assert_equal 'news', pm.tags_as_sentence
assert_equal 'news', pm2.tags_as_sentence
assert_empty pm3.tags_as_sentence
result = $repository.find(get_es_id(pm))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm_t2.id], result['tags'].collect{|t| t['id']}
result = $repository.find(get_es_id(pm2))
assert_equal 1, result['tags_as_sentence']
assert_equal [pm2_t2.id], result['tags'].collect{|t| t['id']}
result = $repository.find(get_es_id(pm3))
assert_equal 0, result['tags_as_sentence']
assert_empty result['tags'].collect{|t| t['id']}
end
end
end
|
class Helib < Formula
desc "Implementation of homomorphic encryption"
homepage "https://github.com/homenc/HElib"
url "https://github.com/homenc/HElib/archive/v2.0.0.tar.gz"
sha256 "4e371807fe052ca27dce708ea302495a8dae8d1196e16e86df424fb5b0e40524"
license "Apache-2.0"
bottle do
cellar :any
sha256 "25cbd96cd9585d9e5be0fb45074e86c142abe8466c5d1f6dec5f128dfe3c71b5" => :big_sur
sha256 "ec945f50a4fb75b7a4192d0aaa42c962892d09f2d2483dd69585ef6f92ed2c38" => :arm64_big_sur
sha256 "3ed8276b4065f2ca26b2289f350ddbeebb458545df7cee0de12acf6e7d1eb70d" => :catalina
sha256 "a634c79d901656f8ba5d340241c4ff00e8811ae184542c1c609fc26186b2dc9e" => :mojave
end
depends_on "cmake" => :build
depends_on "bats-core" => :test
depends_on "ntl"
def install
mkdir "build" do
system "cmake", "-DBUILD_SHARED=ON", "..", *std_cmake_args
system "make", "install"
end
pkgshare.install "examples"
end
test do
cp pkgshare/"examples/BGV_country_db_lookup/BGV_country_db_lookup.cpp", testpath/"test.cpp"
mkdir "build"
system ENV.cxx, "-std=c++17", "-L#{lib}", "-L#{Formula["ntl"].opt_lib}",
"-lhelib", "-lntl", "test.cpp", "-o", "build/BGV_country_db_lookup"
cp_r pkgshare/"examples/tests", testpath
system "bats", "."
end
end
helib: update 2.0.0 bottle.
class Helib < Formula
desc "Implementation of homomorphic encryption"
homepage "https://github.com/homenc/HElib"
url "https://github.com/homenc/HElib/archive/v2.0.0.tar.gz"
sha256 "4e371807fe052ca27dce708ea302495a8dae8d1196e16e86df424fb5b0e40524"
license "Apache-2.0"
bottle do
cellar :any
sha256 "be618fac7a91399ea6639c6854d79409d03b602c81252d20fd3c58ad8783af60" => :big_sur
sha256 "9ce026c3f27f43a2a83add1321b376fd5a5f058f3c727487b974e6a52ed4219f" => :arm64_big_sur
sha256 "f1d09887bf3f3ec3d99d69f3b88bade395061e0663252fc688cee9ed7ec0a583" => :catalina
sha256 "0bbf1b2dbe1998ae2d9c27b14bc73ab81fc90c2b910320adb3ec416b92603fc0" => :mojave
end
depends_on "cmake" => :build
depends_on "bats-core" => :test
depends_on "ntl"
def install
mkdir "build" do
system "cmake", "-DBUILD_SHARED=ON", "..", *std_cmake_args
system "make", "install"
end
pkgshare.install "examples"
end
test do
cp pkgshare/"examples/BGV_country_db_lookup/BGV_country_db_lookup.cpp", testpath/"test.cpp"
mkdir "build"
system ENV.cxx, "-std=c++17", "-L#{lib}", "-L#{Formula["ntl"].opt_lib}",
"-lhelib", "-lntl", "test.cpp", "-o", "build/BGV_country_db_lookup"
cp_r pkgshare/"examples/tests", testpath
system "bats", "."
end
end
|
class Hlint < Formula
desc "Haskell source code suggestions"
homepage "https://github.com/ndmitchell/hlint"
url "https://hackage.haskell.org/package/hlint-3.3/hlint-3.3.tar.gz"
sha256 "6dad2afb040f9fa49daee924443c7cd14cf43836ec59ff277622da19075775b1"
license "BSD-3-Clause"
head "https://github.com/ndmitchell/hlint.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "824195c4e13ef2cf8f9a95a791ca7159bb1a576fe4e28768dff7db7e5d1e5986"
sha256 cellar: :any_skip_relocation, catalina: "af3671cbbd2a989cc6fe386e1040c6284acfdffbbf474bd5063ad9f5b9984895"
sha256 cellar: :any_skip_relocation, mojave: "dadf6ed60bdcf3837ebd280bd7a06b95f1e58de6c2f896923eab17f6599c847c"
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
uses_from_macos "ncurses"
def install
system "cabal", "v2-update"
system "cabal", "v2-install", *std_cabal_v2_args
man1.install "data/hlint.1"
end
test do
(testpath/"test.hs").write <<~EOS
main = do putStrLn "Hello World"
EOS
assert_match "No hints", shell_output("#{bin}/hlint test.hs")
(testpath/"test1.hs").write <<~EOS
main = do foo x; return 3; bar z
EOS
assert_match "Redundant return", shell_output("#{bin}/hlint test1.hs", 1)
end
end
hlint 3.3.1
Closes #75987.
Signed-off-by: Carlo Cabrera <3ffc397d0e4bded29cb84b56167de54c01e3a55b@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Hlint < Formula
desc "Haskell source code suggestions"
homepage "https://github.com/ndmitchell/hlint"
url "https://hackage.haskell.org/package/hlint-3.3.1/hlint-3.3.1.tar.gz"
sha256 "0c3e09b42eeb8e42fedb310107919f5171cab4195d01b884653cc0b76eb9828a"
license "BSD-3-Clause"
head "https://github.com/ndmitchell/hlint.git"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "824195c4e13ef2cf8f9a95a791ca7159bb1a576fe4e28768dff7db7e5d1e5986"
sha256 cellar: :any_skip_relocation, catalina: "af3671cbbd2a989cc6fe386e1040c6284acfdffbbf474bd5063ad9f5b9984895"
sha256 cellar: :any_skip_relocation, mojave: "dadf6ed60bdcf3837ebd280bd7a06b95f1e58de6c2f896923eab17f6599c847c"
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
uses_from_macos "ncurses"
def install
system "cabal", "v2-update"
system "cabal", "v2-install", *std_cabal_v2_args
man1.install "data/hlint.1"
end
test do
(testpath/"test.hs").write <<~EOS
main = do putStrLn "Hello World"
EOS
assert_match "No hints", shell_output("#{bin}/hlint test.hs")
(testpath/"test1.hs").write <<~EOS
main = do foo x; return 3; bar z
EOS
assert_match "Redundant return", shell_output("#{bin}/hlint test1.hs", 1)
end
end
|
hmmer 3.1b2 (new formula)
Closes #22675.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Hmmer < Formula
desc "Build profile HMMs and scan against sequence databases"
homepage "http://hmmer.org/"
url "http://eddylab.org/software/hmmer3/3.1b2/hmmer-3.1b2.tar.gz"
sha256 "dd16edf4385c1df072c9e2f58c16ee1872d855a018a2ee6894205277017b5536"
revision 2
def install
system "./configure", "--prefix=#{prefix}"
# Fix error: install: hmmalign: No such file or directory
system "make"
system "make", "install"
doc.install "Userguide.pdf", "tutorial"
end
test do
output = shell_output("#{bin}/hmmstat #{doc}/tutorial/minifam")
assert_match "PF00069.17", output
end
end
|
class Hping < Formula
desc "Command-line oriented TCP/IP packet assembler/analyzer"
homepage "http://www.hping.org/"
url "http://www.hping.org/hping3-20051105.tar.gz"
version "3.20051105"
sha256 "f5a671a62a11dc8114fa98eade19542ed1c3aa3c832b0e572ca0eb1a5a4faee8"
# The first-party download page (http://www.hping.org/download.php) has
# removed links to any archive files, with a notice that Hping is no longer
# actively developed. There won't be any new releases and we can't check for
# any in this state, so it's appropriate to skip this. If the GitHub repo
# (https://github.com/antirez/hping) starts creating releases, then it would
# be appropriate to update this livecheckable but there are no releases at
# the time of writing this.
livecheck do
skip "No longer actively developed"
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "93622fae40adc4978f6578951ac5c7695701733abba5e543e2205355043df0f7"
sha256 cellar: :any_skip_relocation, big_sur: "0dc61d108f8af1261dc84674f8840bb079f1e51fdbfb50dca5284d522049e5a8"
sha256 cellar: :any_skip_relocation, catalina: "3cf96bb2d2dcc407aadab3bb2691937e2adc96008df65314b889914621ade865"
sha256 cellar: :any_skip_relocation, mojave: "dd0b27a1e3b858378a184dd2cca506bbed420d103a75bb98545a649890142ab9"
sha256 cellar: :any_skip_relocation, high_sierra: "088413e9a62fe7b67627f41b91d762c2b639ca6b5598175002616ceb234fe93a"
sha256 cellar: :any_skip_relocation, sierra: "e6b7a8ef4527b282da33e75fc9484dee752f365b34498849fd251146948f0e80"
sha256 cellar: :any_skip_relocation, el_capitan: "9644e041cb830ebd121c224cef3727e20c5bf4dcca918fd91135d74e10eb2123"
sha256 cellar: :any_skip_relocation, yosemite: "95f5fabc90cdd2e8ca9b10189f69a2057019b35ac1f6cb2e7e4afa7bea1221d5"
end
patch :DATA
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-libpcap_stuff.c.diff"
sha256 "56d3af80a6385bf93257080233e971726283d6555cc244ebe886ea21133e83ad"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-ars.c.diff"
sha256 "02138051414e48b9f057a2dd8134c01ccd374aff65593833a799a5aaa36193c4"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-sendip.c.diff"
sha256 "e7befff6dd546cdb38b59d9e6d3ef4a4dc09c79af2982f4609b2ea5dadf1a360"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-Makefile.in.diff"
sha256 "18ceb30104bdb906b540bb5f6316678ce85fb55f5c086d2d74417416de3792f8"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-bytesex.h.diff"
sha256 "7bad5e8b4b5441f72f85d50fa3461857a398b87e2d0cb63bb30985c9457be21d"
end
def install
# Compile fails with tcl support; TCL on macOS is 32-bit only
system "./configure", "--no-tcl"
# Target folders need to exist before installing
sbin.mkpath
man8.mkpath
system "make", "CC=#{ENV.cc}",
"COMPILE_TIME=#{ENV.cflags}",
"INSTALL_PATH=#{prefix}",
"INSTALL_MANPATH=#{man}",
"install"
end
end
__END__
diff --git a/gethostname.c b/gethostname.c
index 3d0ea58..a8a9699 100644
--- a/gethostname.c
+++ b/gethostname.c
@@ -18,8 +18,6 @@
#include <arpa/inet.h>
#include <string.h>
-size_t strlcpy(char *dst, const char *src, size_t siz);
-
char *get_hostname(char* addr)
{
static char answer[1024];
hping: update 3.20051105 bottle.
class Hping < Formula
desc "Command-line oriented TCP/IP packet assembler/analyzer"
homepage "http://www.hping.org/"
url "http://www.hping.org/hping3-20051105.tar.gz"
version "3.20051105"
sha256 "f5a671a62a11dc8114fa98eade19542ed1c3aa3c832b0e572ca0eb1a5a4faee8"
# The first-party download page (http://www.hping.org/download.php) has
# removed links to any archive files, with a notice that Hping is no longer
# actively developed. There won't be any new releases and we can't check for
# any in this state, so it's appropriate to skip this. If the GitHub repo
# (https://github.com/antirez/hping) starts creating releases, then it would
# be appropriate to update this livecheckable but there are no releases at
# the time of writing this.
livecheck do
skip "No longer actively developed"
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "b2befbdc5f1ae3a5ed4f402bc0b0271dc557e05e82853f56da94d15f467c624d"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "93622fae40adc4978f6578951ac5c7695701733abba5e543e2205355043df0f7"
sha256 cellar: :any_skip_relocation, monterey: "87aba50699a65dc41e05d3a5ff68836a2482983b3ec854db0f897cfea712573f"
sha256 cellar: :any_skip_relocation, big_sur: "0dc61d108f8af1261dc84674f8840bb079f1e51fdbfb50dca5284d522049e5a8"
sha256 cellar: :any_skip_relocation, catalina: "3cf96bb2d2dcc407aadab3bb2691937e2adc96008df65314b889914621ade865"
sha256 cellar: :any_skip_relocation, mojave: "dd0b27a1e3b858378a184dd2cca506bbed420d103a75bb98545a649890142ab9"
sha256 cellar: :any_skip_relocation, high_sierra: "088413e9a62fe7b67627f41b91d762c2b639ca6b5598175002616ceb234fe93a"
sha256 cellar: :any_skip_relocation, sierra: "e6b7a8ef4527b282da33e75fc9484dee752f365b34498849fd251146948f0e80"
sha256 cellar: :any_skip_relocation, el_capitan: "9644e041cb830ebd121c224cef3727e20c5bf4dcca918fd91135d74e10eb2123"
sha256 cellar: :any_skip_relocation, yosemite: "95f5fabc90cdd2e8ca9b10189f69a2057019b35ac1f6cb2e7e4afa7bea1221d5"
end
patch :DATA
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-libpcap_stuff.c.diff"
sha256 "56d3af80a6385bf93257080233e971726283d6555cc244ebe886ea21133e83ad"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-ars.c.diff"
sha256 "02138051414e48b9f057a2dd8134c01ccd374aff65593833a799a5aaa36193c4"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-sendip.c.diff"
sha256 "e7befff6dd546cdb38b59d9e6d3ef4a4dc09c79af2982f4609b2ea5dadf1a360"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-Makefile.in.diff"
sha256 "18ceb30104bdb906b540bb5f6316678ce85fb55f5c086d2d74417416de3792f8"
end
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/fc1d446f/hping/patch-bytesex.h.diff"
sha256 "7bad5e8b4b5441f72f85d50fa3461857a398b87e2d0cb63bb30985c9457be21d"
end
def install
# Compile fails with tcl support; TCL on macOS is 32-bit only
system "./configure", "--no-tcl"
# Target folders need to exist before installing
sbin.mkpath
man8.mkpath
system "make", "CC=#{ENV.cc}",
"COMPILE_TIME=#{ENV.cflags}",
"INSTALL_PATH=#{prefix}",
"INSTALL_MANPATH=#{man}",
"install"
end
end
__END__
diff --git a/gethostname.c b/gethostname.c
index 3d0ea58..a8a9699 100644
--- a/gethostname.c
+++ b/gethostname.c
@@ -18,8 +18,6 @@
#include <arpa/inet.h>
#include <string.h>
-size_t strlcpy(char *dst, const char *src, size_t siz);
-
char *get_hostname(char* addr)
{
static char answer[1024];
|
require 'test_helper'
class DummyStatsd
attr_reader :messages
def initialize
@messages = []
end
def batch
yield(self)
end
%i!increment decrement count gauge histogram timing set event!.each do |name|
define_method(name) do |*args|
@messages << [name, args].flatten
end
end
end
class DogstatsdOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
require 'fluent/plugin/out_dogstatsd'
end
def teardown
end
def test_configure
d = create_driver(<<-EOC)
type dogstatsd
host HOST
port 12345
EOC
assert_equal('HOST', d.instance.host)
assert_equal(12345, d.instance.port)
end
def test_write
d = create_driver
d.emit({'type' => 'increment', 'key' => 'hello.world1'}, Time.now.to_i)
d.emit({'type' => 'increment', 'key' => 'hello.world2'}, Time.now.to_i)
d.emit({'type' => 'decrement', 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'count', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'gauge', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'histogram', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'timing', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'set', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'event', 'title' => 'Deploy', 'text' => 'Revision', 'key' => 'hello.world'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world1', {}],
[:increment, 'hello.world2', {}],
[:decrement, 'hello.world', {}],
[:count, 'hello.world', 10, {}],
[:gauge, 'hello.world', 10, {}],
[:histogram, 'hello.world', 10, {}],
[:timing, 'hello.world', 10, {}],
[:set, 'hello.world', 10, {}],
[:event, 'Deploy', 'Revision', {}],
])
end
def test_flat_tag
d = create_driver(<<-EOC)
#{default_config}
flat_tag true
EOC
d.emit({'type' => 'increment', 'key' => 'hello.world', 'tagKey' => 'tagValue'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world', {tags: ["tagKey:tagValue"]}],
])
end
def test_metric_type
d = create_driver(<<-EOC)
#{default_config}
metric_type decrement
EOC
d.emit({'key' => 'hello.world', 'tags' => {'tagKey' => 'tagValue'}}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:decrement, 'hello.world', {tags: ["tagKey:tagValue"]}],
])
end
def test_use_tag_as_key
d = create_driver(<<-EOC)
#{default_config}
use_tag_as_key true
EOC
d.emit({'type' => 'increment'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {}],
])
end
def test_use_tag_as_key_fallback
d = create_driver(<<-EOC)
#{default_config}
use_tag_as_key_if_missing true
EOC
d.emit({'type' => 'increment'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {}],
])
end
def test_tags
d = create_driver
d.emit({'type' => 'increment', 'key' => 'hello.world', 'tags' => {'key' => 'value'}}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world', {tags: ["key:value"]}],
])
end
def test_sample_rate_config
d = create_driver(<<-EOC)
#{default_config}
sample_rate .5
EOC
d.emit({'type' => 'increment'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {sample_rate: 0.5}],
])
end
def test_sample_rate
d = create_driver
d.emit({'type' => 'increment', 'sample_rate' => 0.5}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {sample_rate: 0.5}],
])
end
private
def default_config
<<-EOC
type dogstatsd
EOC
end
def create_driver(conf = default_config)
Fluent::Test::BufferedOutputTestDriver.new(Fluent::DogstatsdOutput, 'dogstatsd.tag').configure(conf).tap do |d|
d.instance.statsd = DummyStatsd.new
end
end
end
Fix failing sample tests
require 'test_helper'
class DummyStatsd
attr_reader :messages
def initialize
@messages = []
end
def batch
yield(self)
end
%i!increment decrement count gauge histogram timing set event!.each do |name|
define_method(name) do |*args|
@messages << [name, args].flatten
end
end
end
class DogstatsdOutputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
require 'fluent/plugin/out_dogstatsd'
end
def teardown
end
def test_configure
d = create_driver(<<-EOC)
type dogstatsd
host HOST
port 12345
EOC
assert_equal('HOST', d.instance.host)
assert_equal(12345, d.instance.port)
end
def test_write
d = create_driver
d.emit({'type' => 'increment', 'key' => 'hello.world1'}, Time.now.to_i)
d.emit({'type' => 'increment', 'key' => 'hello.world2'}, Time.now.to_i)
d.emit({'type' => 'decrement', 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'count', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'gauge', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'histogram', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'timing', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'set', 'value' => 10, 'key' => 'hello.world'}, Time.now.to_i)
d.emit({'type' => 'event', 'title' => 'Deploy', 'text' => 'Revision', 'key' => 'hello.world'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world1', {}],
[:increment, 'hello.world2', {}],
[:decrement, 'hello.world', {}],
[:count, 'hello.world', 10, {}],
[:gauge, 'hello.world', 10, {}],
[:histogram, 'hello.world', 10, {}],
[:timing, 'hello.world', 10, {}],
[:set, 'hello.world', 10, {}],
[:event, 'Deploy', 'Revision', {}],
])
end
def test_flat_tag
d = create_driver(<<-EOC)
#{default_config}
flat_tag true
EOC
d.emit({'type' => 'increment', 'key' => 'hello.world', 'tagKey' => 'tagValue'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world', {tags: ["tagKey:tagValue"]}],
])
end
def test_metric_type
d = create_driver(<<-EOC)
#{default_config}
metric_type decrement
EOC
d.emit({'key' => 'hello.world', 'tags' => {'tagKey' => 'tagValue'}}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:decrement, 'hello.world', {tags: ["tagKey:tagValue"]}],
])
end
def test_use_tag_as_key
d = create_driver(<<-EOC)
#{default_config}
use_tag_as_key true
EOC
d.emit({'type' => 'increment'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {}],
])
end
def test_use_tag_as_key_fallback
d = create_driver(<<-EOC)
#{default_config}
use_tag_as_key_if_missing true
EOC
d.emit({'type' => 'increment'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'dogstatsd.tag', {}],
])
end
def test_tags
d = create_driver
d.emit({'type' => 'increment', 'key' => 'hello.world', 'tags' => {'key' => 'value'}}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'hello.world', {tags: ["key:value"]}],
])
end
def test_sample_rate_config
d = create_driver(<<-EOC)
#{default_config}
sample_rate .5
EOC
d.emit({'type' => 'increment', 'key' => 'tag'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'tag', {sample_rate: 0.5}],
])
end
def test_sample_rate
d = create_driver
d.emit({'type' => 'increment', 'sample_rate' => 0.5, 'key' => 'tag'}, Time.now.to_i)
d.run
assert_equal(d.instance.statsd.messages, [
[:increment, 'tag', {sample_rate: 0.5}],
])
end
private
def default_config
<<-EOC
type dogstatsd
EOC
end
def create_driver(conf = default_config)
Fluent::Test::BufferedOutputTestDriver.new(Fluent::DogstatsdOutput, 'dogstatsd.tag').configure(conf).tap do |d|
d.instance.statsd = DummyStatsd.new
end
end
end
|
class Httpd < Formula
desc "Apache HTTP server"
homepage "https://httpd.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=httpd/httpd-2.4.37.tar.bz2"
sha256 "3498dc5c6772fac2eb7307dc7963122ffe243b5e806e0be4fb51974ff759d726"
bottle do
sha256 "9056baef0b95a004147020b23099102d88bf477ce0eac8ea6f28734213000f71" => :mojave
sha256 "c4799839ef633e76339b9a595d7bafcc83a5747affec314ee921d1987d07297c" => :high_sierra
sha256 "98a4c47ff83d3992d7b832bac17d67350130ebc1b293c8d632e6aa14dbd82fb8" => :sierra
end
depends_on "apr"
depends_on "apr-util"
depends_on "brotli"
depends_on "nghttp2"
depends_on "openssl"
depends_on "pcre"
def install
# fixup prefix references in favour of opt_prefix references
inreplace "Makefile.in",
'#@@ServerRoot@@#$(prefix)#', '#@@ServerRoot@@'"##{opt_prefix}#"
inreplace "docs/conf/extra/httpd-autoindex.conf.in",
"@exp_iconsdir@", "#{opt_pkgshare}/icons"
inreplace "docs/conf/extra/httpd-multilang-errordoc.conf.in",
"@exp_errordir@", "#{opt_pkgshare}/error"
# fix default user/group when running as root
inreplace "docs/conf/httpd.conf.in", /(User|Group) daemon/, "\\1 _www"
# use Slackware-FHS layout as it's closest to what we want.
# these values cannot be passed directly to configure, unfortunately.
inreplace "config.layout" do |s|
s.gsub! "${datadir}/htdocs", "${datadir}"
s.gsub! "${htdocsdir}/manual", "#{pkgshare}/manual"
s.gsub! "${datadir}/error", "#{pkgshare}/error"
s.gsub! "${datadir}/icons", "#{pkgshare}/icons"
end
system "./configure", "--enable-layout=Slackware-FHS",
"--prefix=#{prefix}",
"--sbindir=#{bin}",
"--mandir=#{man}",
"--sysconfdir=#{etc}/httpd",
"--datadir=#{var}/www",
"--localstatedir=#{var}",
"--enable-mpms-shared=all",
"--enable-mods-shared=all",
"--enable-authnz-fcgi",
"--enable-cgi",
"--enable-pie",
"--enable-suexec",
"--with-suexec-bin=#{opt_bin}/suexec",
"--with-suexec-caller=_www",
"--with-port=8080",
"--with-sslport=8443",
"--with-apr=#{Formula["apr"].opt_prefix}",
"--with-apr-util=#{Formula["apr-util"].opt_prefix}",
"--with-brotli=#{Formula["brotli"].opt_prefix}",
"--with-mpm=prefork",
"--with-nghttp2=#{Formula["nghttp2"].opt_prefix}",
"--with-ssl=#{Formula["openssl"].opt_prefix}",
"--with-pcre=#{Formula["pcre"].opt_prefix}",
"--disable-lua",
"--disable-luajit"
system "make"
system "make", "install"
# suexec does not install without root
bin.install "support/suexec"
# remove non-executable files in bin dir (for brew audit)
rm bin/"envvars"
rm bin/"envvars-std"
# avoid using Cellar paths
inreplace %W[
#{include}/httpd/ap_config_layout.h
#{lib}/httpd/build/config_vars.mk
] do |s|
s.gsub! "#{lib}/httpd/modules", "#{HOMEBREW_PREFIX}/lib/httpd/modules"
end
inreplace %W[
#{bin}/apachectl
#{bin}/apxs
#{include}/httpd/ap_config_auto.h
#{include}/httpd/ap_config_layout.h
#{lib}/httpd/build/config_vars.mk
#{lib}/httpd/build/config.nice
] do |s|
s.gsub! prefix, opt_prefix
end
inreplace "#{lib}/httpd/build/config_vars.mk" do |s|
pcre = Formula["pcre"]
s.gsub! pcre.prefix.realpath, pcre.opt_prefix
s.gsub! "${prefix}/lib/httpd/modules",
"#{HOMEBREW_PREFIX}/lib/httpd/modules"
end
end
def post_install
(var/"cache/httpd").mkpath
(var/"www").mkpath
end
def caveats
<<~EOS
DocumentRoot is #{var}/www.
The default ports have been set in #{etc}/httpd/httpd.conf to 8080 and in
#{etc}/httpd/extra/httpd-ssl.conf to 8443 so that httpd can run without sudo.
EOS
end
plist_options :manual => "apachectl start"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/httpd</string>
<string>-D</string>
<string>FOREGROUND</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
begin
require "socket"
server = TCPServer.new(0)
port = server.addr[1]
server.close
expected_output = "Hello world!"
(testpath/"index.html").write expected_output
(testpath/"httpd.conf").write <<~EOS
Listen #{port}
ServerName localhost:#{port}
DocumentRoot "#{testpath}"
ErrorLog "#{testpath}/httpd-error.log"
PidFile "#{testpath}/httpd.pid"
LoadModule authz_core_module #{lib}/httpd/modules/mod_authz_core.so
LoadModule unixd_module #{lib}/httpd/modules/mod_unixd.so
LoadModule dir_module #{lib}/httpd/modules/mod_dir.so
LoadModule mpm_prefork_module #{lib}/httpd/modules/mod_mpm_prefork.so
EOS
pid = fork do
exec bin/"httpd", "-X", "-f", "#{testpath}/httpd.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
ensure
Process.kill("TERM", pid)
Process.wait(pid)
end
end
end
httpd: update 2.4.37 bottle.
class Httpd < Formula
desc "Apache HTTP server"
homepage "https://httpd.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=httpd/httpd-2.4.37.tar.bz2"
sha256 "3498dc5c6772fac2eb7307dc7963122ffe243b5e806e0be4fb51974ff759d726"
bottle do
sha256 "ebdc934511011232b124591665679a583b412768f711c698105f73e7049872df" => :mojave
sha256 "b18fa371fe65ee49728f0c894c1b450125fe298b4b0201e0442c012855d94b32" => :high_sierra
sha256 "371da89cc70df87d592050dccf534b4b898c065a78d44830154cbb4f924c7846" => :sierra
end
depends_on "apr"
depends_on "apr-util"
depends_on "brotli"
depends_on "nghttp2"
depends_on "openssl"
depends_on "pcre"
def install
# fixup prefix references in favour of opt_prefix references
inreplace "Makefile.in",
'#@@ServerRoot@@#$(prefix)#', '#@@ServerRoot@@'"##{opt_prefix}#"
inreplace "docs/conf/extra/httpd-autoindex.conf.in",
"@exp_iconsdir@", "#{opt_pkgshare}/icons"
inreplace "docs/conf/extra/httpd-multilang-errordoc.conf.in",
"@exp_errordir@", "#{opt_pkgshare}/error"
# fix default user/group when running as root
inreplace "docs/conf/httpd.conf.in", /(User|Group) daemon/, "\\1 _www"
# use Slackware-FHS layout as it's closest to what we want.
# these values cannot be passed directly to configure, unfortunately.
inreplace "config.layout" do |s|
s.gsub! "${datadir}/htdocs", "${datadir}"
s.gsub! "${htdocsdir}/manual", "#{pkgshare}/manual"
s.gsub! "${datadir}/error", "#{pkgshare}/error"
s.gsub! "${datadir}/icons", "#{pkgshare}/icons"
end
system "./configure", "--enable-layout=Slackware-FHS",
"--prefix=#{prefix}",
"--sbindir=#{bin}",
"--mandir=#{man}",
"--sysconfdir=#{etc}/httpd",
"--datadir=#{var}/www",
"--localstatedir=#{var}",
"--enable-mpms-shared=all",
"--enable-mods-shared=all",
"--enable-authnz-fcgi",
"--enable-cgi",
"--enable-pie",
"--enable-suexec",
"--with-suexec-bin=#{opt_bin}/suexec",
"--with-suexec-caller=_www",
"--with-port=8080",
"--with-sslport=8443",
"--with-apr=#{Formula["apr"].opt_prefix}",
"--with-apr-util=#{Formula["apr-util"].opt_prefix}",
"--with-brotli=#{Formula["brotli"].opt_prefix}",
"--with-mpm=prefork",
"--with-nghttp2=#{Formula["nghttp2"].opt_prefix}",
"--with-ssl=#{Formula["openssl"].opt_prefix}",
"--with-pcre=#{Formula["pcre"].opt_prefix}",
"--disable-lua",
"--disable-luajit"
system "make"
system "make", "install"
# suexec does not install without root
bin.install "support/suexec"
# remove non-executable files in bin dir (for brew audit)
rm bin/"envvars"
rm bin/"envvars-std"
# avoid using Cellar paths
inreplace %W[
#{include}/httpd/ap_config_layout.h
#{lib}/httpd/build/config_vars.mk
] do |s|
s.gsub! "#{lib}/httpd/modules", "#{HOMEBREW_PREFIX}/lib/httpd/modules"
end
inreplace %W[
#{bin}/apachectl
#{bin}/apxs
#{include}/httpd/ap_config_auto.h
#{include}/httpd/ap_config_layout.h
#{lib}/httpd/build/config_vars.mk
#{lib}/httpd/build/config.nice
] do |s|
s.gsub! prefix, opt_prefix
end
inreplace "#{lib}/httpd/build/config_vars.mk" do |s|
pcre = Formula["pcre"]
s.gsub! pcre.prefix.realpath, pcre.opt_prefix
s.gsub! "${prefix}/lib/httpd/modules",
"#{HOMEBREW_PREFIX}/lib/httpd/modules"
end
end
def post_install
(var/"cache/httpd").mkpath
(var/"www").mkpath
end
def caveats
<<~EOS
DocumentRoot is #{var}/www.
The default ports have been set in #{etc}/httpd/httpd.conf to 8080 and in
#{etc}/httpd/extra/httpd-ssl.conf to 8443 so that httpd can run without sudo.
EOS
end
plist_options :manual => "apachectl start"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/httpd</string>
<string>-D</string>
<string>FOREGROUND</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
begin
require "socket"
server = TCPServer.new(0)
port = server.addr[1]
server.close
expected_output = "Hello world!"
(testpath/"index.html").write expected_output
(testpath/"httpd.conf").write <<~EOS
Listen #{port}
ServerName localhost:#{port}
DocumentRoot "#{testpath}"
ErrorLog "#{testpath}/httpd-error.log"
PidFile "#{testpath}/httpd.pid"
LoadModule authz_core_module #{lib}/httpd/modules/mod_authz_core.so
LoadModule unixd_module #{lib}/httpd/modules/mod_unixd.so
LoadModule dir_module #{lib}/httpd/modules/mod_dir.so
LoadModule mpm_prefork_module #{lib}/httpd/modules/mod_mpm_prefork.so
EOS
pid = fork do
exec bin/"httpd", "-X", "-f", "#{testpath}/httpd.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
ensure
Process.kill("TERM", pid)
Process.wait(pid)
end
end
end
|
class Hwloc < Formula
desc "Portable abstraction of the hierarchical topology of modern architectures"
homepage "https://www.open-mpi.org/projects/hwloc/"
url "https://www.open-mpi.org/software/hwloc/v2.4/downloads/hwloc-2.4.1.tar.bz2"
sha256 "392421e69f26120c8ab95d151fe989f2b4b69dab3c7735741c4e0a6d7de5de63"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any, arm64_big_sur: "c2e4c957e5d881fbbf136ce97b14c7fd90a4bd9ed0eba7c786c9cac3e0429736"
sha256 cellar: :any, big_sur: "ec7b2827de6ecaf19af60fe5c0b17cfca16a04adba68a12cd77b861d83c0311e"
sha256 cellar: :any, catalina: "2891b4a4c672422f8a9c45083ec2ac39aeafc1cbdbc9d0446718f783a326d330"
sha256 cellar: :any, mojave: "0b8cd8f304cedc64e8e2c47fc37b67e129c6cbb67d945d0147403259ad289f29"
end
head do
url "https://github.com/open-mpi/hwloc.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
uses_from_macos "libxml2"
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--enable-shared",
"--enable-static",
"--prefix=#{prefix}",
"--disable-cairo",
"--without-x"
system "make", "install"
pkgshare.install "tests"
# remove homebrew shims directory references
rm Dir[pkgshare/"tests/**/Makefile"]
end
test do
system ENV.cc, pkgshare/"tests/hwloc/hwloc_groups.c", "-I#{include}",
"-L#{lib}", "-lhwloc", "-o", "test"
system "./test"
end
end
hwloc: update 2.4.1 bottle.
class Hwloc < Formula
desc "Portable abstraction of the hierarchical topology of modern architectures"
homepage "https://www.open-mpi.org/projects/hwloc/"
url "https://www.open-mpi.org/software/hwloc/v2.4/downloads/hwloc-2.4.1.tar.bz2"
sha256 "392421e69f26120c8ab95d151fe989f2b4b69dab3c7735741c4e0a6d7de5de63"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any, arm64_big_sur: "c9fee522e4e68aef27a0f89e4eaacc1040571d53d784109821ccfbc492666b3d"
sha256 cellar: :any, big_sur: "2128d93734fe007235ae4dc6222d02c4d3f7e1faf06529cbf2a7926d6819f64a"
sha256 cellar: :any, catalina: "472c55e16e5ad6b615f41eb323565533a31e7c8cc05add78106a856be31cf3cf"
sha256 cellar: :any, mojave: "747f91410adfd6735acc68a230088b3ddb39024e508f376cc64752d6db3c8dff"
end
head do
url "https://github.com/open-mpi/hwloc.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "pkg-config" => :build
uses_from_macos "libxml2"
def install
system "./autogen.sh" if build.head?
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--enable-shared",
"--enable-static",
"--prefix=#{prefix}",
"--disable-cairo",
"--without-x"
system "make", "install"
pkgshare.install "tests"
# remove homebrew shims directory references
rm Dir[pkgshare/"tests/**/Makefile"]
end
test do
system ENV.cc, pkgshare/"tests/hwloc/hwloc_groups.c", "-I#{include}",
"-L#{lib}", "-lhwloc", "-o", "test"
system "./test"
end
end
|
require "language/haskell"
class Idris < Formula
include Language::Haskell::Cabal
homepage "http://www.idris-lang.org"
url "https://github.com/idris-lang/Idris-dev/archive/v0.9.17.tar.gz"
sha1 "d51d68227b3e3d3967769749314d3a75755a68ef"
head "https://github.com/idris-lang/Idris-dev.git"
bottle do
sha1 "c25ba4b91264c187485111c5b8c18670c7f0441b" => :yosemite
sha1 "df1773cb1800d6c629db9ba194666faf0019de31" => :mavericks
sha1 "bbbe93cbd829bb02a9cdbb680805470f29702bbb" => :mountain_lion
end
depends_on "ghc" => :build
depends_on "cabal-install" => :build
depends_on "gmp"
depends_on "libffi" => :recommended
depends_on "pkg-config" => :build if build.with? "libffi"
def install
flags = []
flags << "-f FFI" if build.with? "libffi"
flags << "-f release" if build.stable?
install_cabal_package flags
end
test do
(testpath/"hello.idr").write <<-EOS.undent
module Main
main : IO ()
main = putStrLn "Hello, Homebrew!"
EOS
shell_output "#{bin}/idris #{testpath}/hello.idr -o #{testpath}/hello"
result = shell_output "#{testpath}/hello"
assert_match /Hello, Homebrew!/, result
if build.with? "libffi"
cmd = "#{bin}/idris --exec 'putStrLn {ffi=FFI_C} \"Hello, interpreter!\"'"
result = shell_output cmd
assert_match /Hello, interpreter!/, result
end
end
end
idris: update 0.9.17 bottle.
require "language/haskell"
class Idris < Formula
include Language::Haskell::Cabal
homepage "http://www.idris-lang.org"
url "https://github.com/idris-lang/Idris-dev/archive/v0.9.17.tar.gz"
sha1 "d51d68227b3e3d3967769749314d3a75755a68ef"
head "https://github.com/idris-lang/Idris-dev.git"
bottle do
sha256 "bb4ac9869a6dc76d6b4a8ecb4e6edc2ae476872432f71509134d1c47e51abdee" => :yosemite
sha256 "69b0bbf45713c1819696bffd870c2f74fa7ff3e8b5d68dc1b96e194579ce3f13" => :mavericks
sha256 "bb2d159e3626c95e2f23c20b1e3020151a6ab928b7606fb6790b701360735769" => :mountain_lion
end
depends_on "ghc" => :build
depends_on "cabal-install" => :build
depends_on "gmp"
depends_on "libffi" => :recommended
depends_on "pkg-config" => :build if build.with? "libffi"
def install
flags = []
flags << "-f FFI" if build.with? "libffi"
flags << "-f release" if build.stable?
install_cabal_package flags
end
test do
(testpath/"hello.idr").write <<-EOS.undent
module Main
main : IO ()
main = putStrLn "Hello, Homebrew!"
EOS
shell_output "#{bin}/idris #{testpath}/hello.idr -o #{testpath}/hello"
result = shell_output "#{testpath}/hello"
assert_match /Hello, Homebrew!/, result
if build.with? "libffi"
cmd = "#{bin}/idris --exec 'putStrLn {ffi=FFI_C} \"Hello, interpreter!\"'"
result = shell_output cmd
assert_match /Hello, interpreter!/, result
end
end
end
|
class Infer < Formula
desc "Static analyzer for Java, C, C++, and Objective-C"
homepage "https://fbinfer.com/"
url "https://github.com/facebook/infer/archive/v1.1.0.tar.gz"
sha256 "201c7797668a4b498fe108fcc13031b72d9dbf04dab0dc65dd6bd3f30e1f89ee"
license "MIT"
head "https://github.com/facebook/infer.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, big_sur: "e3f2d774f27d1daac2b41ed5cb2bcf1b180f9b6d6440ae5ddfb8d1c001c4413a"
sha256 cellar: :any, catalina: "2dcd6c8c088ee88b21f3740a770fd3f73850815aa1f9270d814bfdd4095d2fc4"
sha256 cellar: :any, mojave: "b1e1ea3fd12e96a325ca3a5618032a0f9289caae1704afcab131b87a2104ad84"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "libtool" => :build
depends_on "ninja" => :build
depends_on "opam" => :build
depends_on "openjdk@11" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "python@3.9" => :build
depends_on "gmp"
depends_on "mpfr"
depends_on "sqlite"
uses_from_macos "m4" => :build
uses_from_macos "unzip" => :build
uses_from_macos "ncurses"
uses_from_macos "xz"
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
end
def install
# needed to build clang
ENV.permit_arch_flags
# Apple's libstdc++ is too old to build LLVM
ENV.libcxx if ENV.compiler == :clang
# Use JDK11
ENV["JAVA_HOME"] = Formula["openjdk@11"].opt_prefix
opamroot = buildpath/"opamroot"
opamroot.mkpath
ENV["OPAMROOT"] = opamroot
ENV["OPAMYES"] = "1"
ENV["OPAMVERBOSE"] = "1"
on_linux do
ENV["PATCHELF"] = Formula["patchelf"].opt_bin/"patchelf"
end
system "opam", "init", "--no-setup", "--disable-sandboxing"
# do not attempt to use the clang in facebook-clang-plugins/ as it hasn't been built yet
ENV["INFER_CONFIGURE_OPTS"] = "--prefix=#{prefix} --without-fcp-clang"
# Let's try build clang faster
ENV["JOBS"] = ENV.make_jobs.to_s
# Release build
touch ".release"
system "./build-infer.sh", "all", "--yes"
system "make", "install-with-libs"
end
test do
ENV["JAVA_HOME"] = Formula["openjdk@11"].opt_prefix
(testpath/"FailingTest.c").write <<~EOS
#include <stdio.h>
int main() {
int *s = NULL;
*s = 42;
return 0;
}
EOS
(testpath/"PassingTest.c").write <<~EOS
#include <stdio.h>
int main() {
int *s = NULL;
if (s != NULL) {
*s = 42;
}
return 0;
}
EOS
no_issues_output = "\n No issues found \n"
failing_c_output = <<~EOS
FailingTest.c:5: error: Null Dereference
\ pointer `s` last assigned on line 4 could be null and is dereferenced at line 5, column 3.
\ 3. int main() {
\ 4. int *s = NULL;
\ 5. *s = 42;
\ ^
\ 6. return 0;
\ 7. }
Found 1 issue
\ Issue Type(ISSUED_TYPE_ID): #
\ Null Dereference(NULL_DEREFERENCE): 1
EOS
assert_equal failing_c_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- clang -c FailingTest.c", 2)
assert_equal no_issues_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- clang -c PassingTest.c")
(testpath/"FailingTest.java").write <<~EOS
class FailingTest {
String mayReturnNull(int i) {
if (i > 0) {
return "Hello, Infer!";
}
return null;
}
int mayCauseNPE() {
String s = mayReturnNull(0);
return s.length();
}
}
EOS
(testpath/"PassingTest.java").write <<~EOS
class PassingTest {
String mayReturnNull(int i) {
if (i > 0) {
return "Hello, Infer!";
}
return null;
}
int mayCauseNPE() {
String s = mayReturnNull(0);
return s == null ? 0 : s.length();
}
}
EOS
failing_java_output = <<~EOS
FailingTest.java:12: error: Null Dereference
\ object `s` last assigned on line 11 could be null and is dereferenced at line 12.
\ 10. int mayCauseNPE() {
\ 11. String s = mayReturnNull(0);
\ 12. > return s.length();
\ 13. }
\ 14. }
Found 1 issue
\ Issue Type(ISSUED_TYPE_ID): #
\ Null Dereference(NULL_DEREFERENCE): 1
EOS
assert_equal failing_java_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- javac FailingTest.java", 2)
assert_equal no_issues_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- javac PassingTest.java")
end
end
infer: fix Linux build and usage with macOS 11.3+ SDK
Closes #83649.
Signed-off-by: Daniel Nachun <67d4b1adb270d50ecb7ec053ff144a69f3054d28@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Infer < Formula
desc "Static analyzer for Java, C, C++, and Objective-C"
homepage "https://fbinfer.com/"
license "MIT"
revision 1
head "https://github.com/facebook/infer.git", branch: "master"
stable do
url "https://github.com/facebook/infer/archive/v1.1.0.tar.gz"
sha256 "201c7797668a4b498fe108fcc13031b72d9dbf04dab0dc65dd6bd3f30e1f89ee"
# Fix FileUtils.cpp:44:57: error: invalid initialization of reference of type 'const string& ...
# Remove in the next release.
patch do
url "https://github.com/facebook/infer/commit/c90ec0683456e0f03135e7c059a1233351440736.patch?full_index=1"
sha256 "516585352727c5372c4d4582ed9a64bc12e7a9eb59386aa3cec9908f0cfc86a8"
end
# Apply patch for finding correct C++ header from Apple SDKs.
# Remove in the next release.
patch do
url "https://github.com/facebook/infer/commit/ec976d3be4e78dbbb019b3be941066f74e826880.patch?full_index=1"
sha256 "4f299566c88dd5b6761d36fcb090d238c216d3721dde9037c725dac255be9d3b"
end
end
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, big_sur: "e3f2d774f27d1daac2b41ed5cb2bcf1b180f9b6d6440ae5ddfb8d1c001c4413a"
sha256 cellar: :any, catalina: "2dcd6c8c088ee88b21f3740a770fd3f73850815aa1f9270d814bfdd4095d2fc4"
sha256 cellar: :any, mojave: "b1e1ea3fd12e96a325ca3a5618032a0f9289caae1704afcab131b87a2104ad84"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cmake" => :build
depends_on "libtool" => :build
depends_on "ninja" => :build
depends_on "opam" => :build
depends_on "openjdk@11" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "python@3.9" => :build
depends_on "gmp"
depends_on "mpfr"
depends_on "sqlite"
# Add `llvm` for lld due to CMake bug where CC=clang doesn't fallback to ld.
# This causes error: /bin/sh: 1: CMAKE_LINKER-NOTFOUND: not found
# CMake PR ref: https://gitlab.kitware.com/cmake/cmake/-/merge_requests/6457
uses_from_macos "llvm" => :build # TODO: remove when `cmake` is fixed
uses_from_macos "m4" => :build
uses_from_macos "unzip" => :build
uses_from_macos "libedit"
uses_from_macos "libffi"
uses_from_macos "libxml2"
uses_from_macos "ncurses"
uses_from_macos "xz"
uses_from_macos "zlib"
on_linux do
depends_on "patchelf" => :build
depends_on "elfutils" # openmp requires <gelf.h>
end
def install
# needed to build clang
ENV.permit_arch_flags
# Apple's libstdc++ is too old to build LLVM
ENV.libcxx if ENV.compiler == :clang
# Use JDK11
ENV["JAVA_HOME"] = Formula["openjdk@11"].opt_prefix
opamroot = buildpath/"opamroot"
opamroot.mkpath
ENV["OPAMROOT"] = opamroot
ENV["OPAMYES"] = "1"
ENV["OPAMVERBOSE"] = "1"
on_linux do
ENV["PATCHELF"] = Formula["patchelf"].opt_bin/"patchelf"
end
system "opam", "init", "--no-setup", "--disable-sandboxing"
# do not attempt to use the clang in facebook-clang-plugins/ as it hasn't been built yet
ENV["INFER_CONFIGURE_OPTS"] = "--prefix=#{prefix} --without-fcp-clang"
# Let's try build clang faster
ENV["JOBS"] = ENV.make_jobs.to_s
# Release build
touch ".release"
# Disable handling external dependencies as opam is not aware of Homebrew on Linux.
# Error: Package conflict! * Missing dependency: - conf-autoconf
on_linux { inreplace "build-infer.sh", "infer \"$INFER_ROOT\" $locked", "\\0 --no-depexts" }
system "./build-infer.sh", "all", "--yes"
system "make", "install-with-libs"
end
test do
ENV["JAVA_HOME"] = Formula["openjdk@11"].opt_prefix
ENV.append_path "PATH", Formula["openjdk@11"].opt_bin
(testpath/"FailingTest.c").write <<~EOS
#include <stdio.h>
int main() {
int *s = NULL;
*s = 42;
return 0;
}
EOS
(testpath/"PassingTest.c").write <<~EOS
#include <stdio.h>
int main() {
int *s = NULL;
if (s != NULL) {
*s = 42;
}
return 0;
}
EOS
no_issues_output = "\n No issues found \n"
failing_c_output = <<~EOS
FailingTest.c:5: error: Null Dereference
\ pointer `s` last assigned on line 4 could be null and is dereferenced at line 5, column 3.
\ 3. int main() {
\ 4. int *s = NULL;
\ 5. *s = 42;
\ ^
\ 6. return 0;
\ 7. }
Found 1 issue
\ Issue Type(ISSUED_TYPE_ID): #
\ Null Dereference(NULL_DEREFERENCE): 1
EOS
assert_equal failing_c_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- clang -c FailingTest.c", 2)
assert_equal no_issues_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- clang -c PassingTest.c")
(testpath/"FailingTest.java").write <<~EOS
class FailingTest {
String mayReturnNull(int i) {
if (i > 0) {
return "Hello, Infer!";
}
return null;
}
int mayCauseNPE() {
String s = mayReturnNull(0);
return s.length();
}
}
EOS
(testpath/"PassingTest.java").write <<~EOS
class PassingTest {
String mayReturnNull(int i) {
if (i > 0) {
return "Hello, Infer!";
}
return null;
}
int mayCauseNPE() {
String s = mayReturnNull(0);
return s == null ? 0 : s.length();
}
}
EOS
failing_java_output = <<~EOS
FailingTest.java:12: error: Null Dereference
\ object `s` last assigned on line 11 could be null and is dereferenced at line 12.
\ 10. int mayCauseNPE() {
\ 11. String s = mayReturnNull(0);
\ 12. > return s.length();
\ 13. }
\ 14. }
Found 1 issue
\ Issue Type(ISSUED_TYPE_ID): #
\ Null Dereference(NULL_DEREFERENCE): 1
EOS
assert_equal failing_java_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- javac FailingTest.java", 2)
assert_equal no_issues_output.to_s,
shell_output("#{bin}/infer --fail-on-issue -P -- javac PassingTest.java")
end
end
|
class Irssi < Formula
desc "Modular IRC client"
homepage "https://irssi.org/"
url "https://github.com/irssi/irssi/releases/download/1.1.0/irssi-1.1.0.tar.xz"
sha256 "109bab173d046391212b330de1f86611ed72af247e6886c18bee73265c2d5f02"
bottle do
sha256 "6e0726a3a7b1518ec5aa3ef01586a0a94d6b85e83500f55aff56a407ff71634c" => :high_sierra
sha256 "4ca64721a3d8a3d11fa32ec0a17a75159a65d2eb7e3ec36cf5cac2398609db2c" => :sierra
sha256 "5813b30eb66687e618ff78e7270b71b1b6d22541034f12d0e5619c9b3d968578" => :el_capitan
end
head do
url "https://github.com/irssi/irssi.git"
depends_on "automake" => :build
depends_on "autoconf" => :build
depends_on "libtool" => :build
depends_on "lynx" => :build
end
option "with-dante", "Build with SOCKS support"
option "without-perl", "Build without perl support"
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "openssl" => :recommended
depends_on "dante" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{etc}
--with-bot
--with-proxy
--enable-true-color
--with-socks=#{build.with?("dante") ? "yes" : "no"}
--with-ncurses=#{MacOS.sdk_path}/usr
]
if build.with? "perl"
args << "--with-perl=yes"
args << "--with-perl-lib=#{lib}/perl5/site_perl"
else
args << "--with-perl=no"
end
args << "--disable-ssl" if build.without? "openssl"
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
# "make" and "make install" must be done separately on some systems
system "make"
system "make", "install"
end
test do
IO.popen("#{bin}/irssi --connect=irc.freenode.net", "w") do |pipe|
pipe.puts "/quit\n"
pipe.close_write
end
end
end
irssi 1.1.1
Closes #24181.
Signed-off-by: ilovezfs <fbd54dbbcf9e596abad4ccdc4dfc17f80ebeaee2@icloud.com>
class Irssi < Formula
desc "Modular IRC client"
homepage "https://irssi.org/"
url "https://github.com/irssi/irssi/releases/download/1.1.1/irssi-1.1.1.tar.xz"
sha256 "784807e7a1ba25212347f03e4287cff9d0659f076edfb2c6b20928021d75a1bf"
bottle do
sha256 "6e0726a3a7b1518ec5aa3ef01586a0a94d6b85e83500f55aff56a407ff71634c" => :high_sierra
sha256 "4ca64721a3d8a3d11fa32ec0a17a75159a65d2eb7e3ec36cf5cac2398609db2c" => :sierra
sha256 "5813b30eb66687e618ff78e7270b71b1b6d22541034f12d0e5619c9b3d968578" => :el_capitan
end
head do
url "https://github.com/irssi/irssi.git"
depends_on "automake" => :build
depends_on "autoconf" => :build
depends_on "libtool" => :build
depends_on "lynx" => :build
end
option "with-dante", "Build with SOCKS support"
option "without-perl", "Build without perl support"
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "openssl" => :recommended
depends_on "dante" => :optional
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--sysconfdir=#{etc}
--with-bot
--with-proxy
--enable-true-color
--with-socks=#{build.with?("dante") ? "yes" : "no"}
--with-ncurses=#{MacOS.sdk_path}/usr
]
if build.with? "perl"
args << "--with-perl=yes"
args << "--with-perl-lib=#{lib}/perl5/site_perl"
else
args << "--with-perl=no"
end
args << "--disable-ssl" if build.without? "openssl"
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
# "make" and "make install" must be done separately on some systems
system "make"
system "make", "install"
end
test do
IO.popen("#{bin}/irssi --connect=irc.freenode.net", "w") do |pipe|
pipe.puts "/quit\n"
pipe.close_write
end
end
end
|
require 'formula'
class Irssi < Formula
homepage 'http://irssi.org/'
url 'http://irssi.org/files/irssi-0.8.15.tar.bz2'
md5 '1dcb3f511b88df94b0c996f36668c7da'
depends_on 'pkg-config' => :build
depends_on 'glib'
# Don't strip, to allow dynamic loading of modules
skip_clean 'bin'
def patches
# Fix Perl path
DATA
end
def install
ENV.append 'ARCHFLAGS', ' ' # wtf?
system "./configure", "--prefix=#{prefix}",
"--with-perl=yes",
"--with-perl-lib=#{lib}/perl5/site_perl",
"--with-bot",
"--with-proxy",
"--enable-ssl",
"--enable-ipv6"
system "make install"
end
end
__END__
--- a/configure 2009-12-03 19:35:07.000000000 -0800
+++ b/configure 2009-12-03 19:35:33.000000000 -0800
@@ -27419,7 +27419,7 @@
if test -z "$perlpath"; then
perl_check_error="perl binary not found"
else
- PERL_CFLAGS=`$perlpath -MExtUtils::Embed -e ccopts 2>/dev/null`
+ PERL_CFLAGS=`$perlpath -MExtUtils::Embed -e ccopts 2>/dev/null | $SED -e 's/-arch [^ ]\{1,\}//g'`
fi
if test "x$ac_cv_c_compiler_gnu" = "xyes" -a -z "`echo $host_os|grep 'bsd\|linux'`"; then
@@ -27437,7 +27437,7 @@
$as_echo "not found, building without Perl" >&6; }
want_perl=no
else
- PERL_LDFLAGS=`$perlpath -MExtUtils::Embed -e ldopts 2>/dev/null`
+ PERL_LDFLAGS=`$perlpath -MExtUtils::Embed -e ldopts 2>/dev/null | $SED -e 's/-arch [^ ]\{1,\}//g'`
if test "x$DYNLIB_MODULES" = "xno" -a "$want_perl" != "static"; then
want_perl=static
irssi fixes
- Fix paths in man page
- install configuration file to HOMEBREW_PREFIX/etc
- enable SOCKS library
Signed-off-by: Jack Nagel <43386ce32af96f5c56f2a88e458cb94cebee3751@gmail.com>
require 'formula'
class Irssi < Formula
homepage 'http://irssi.org/'
url 'http://irssi.org/files/irssi-0.8.15.tar.bz2'
md5 '1dcb3f511b88df94b0c996f36668c7da'
depends_on 'pkg-config' => :build
depends_on 'glib'
# Don't strip, to allow dynamic loading of modules
skip_clean 'bin'
def patches
# Fix Perl path and path in man page
DATA
end
def install
ENV.append 'ARCHFLAGS', ' ' # wtf?
system "./configure", "--prefix=#{prefix}",
"--sysconfdir=#{etc}",
"--with-perl=yes",
"--with-perl-lib=#{lib}/perl5/site_perl",
"--with-bot",
"--with-proxy",
"--enable-ssl",
"--enable-ipv6",
"--with-socks"
system "make install"
end
end
__END__
--- a/configure 2009-12-03 19:35:07.000000000 -0800
+++ b/configure 2009-12-03 19:35:33.000000000 -0800
@@ -27419,7 +27419,7 @@
if test -z "$perlpath"; then
perl_check_error="perl binary not found"
else
- PERL_CFLAGS=`$perlpath -MExtUtils::Embed -e ccopts 2>/dev/null`
+ PERL_CFLAGS=`$perlpath -MExtUtils::Embed -e ccopts 2>/dev/null | $SED -e 's/-arch [^ ]\{1,\}//g'`
fi
if test "x$ac_cv_c_compiler_gnu" = "xyes" -a -z "`echo $host_os|grep 'bsd\|linux'`"; then
@@ -27437,7 +27437,7 @@
$as_echo "not found, building without Perl" >&6; }
want_perl=no
else
- PERL_LDFLAGS=`$perlpath -MExtUtils::Embed -e ldopts 2>/dev/null`
+ PERL_LDFLAGS=`$perlpath -MExtUtils::Embed -e ldopts 2>/dev/null | $SED -e 's/-arch [^ ]\{1,\}//g'`
if test "x$DYNLIB_MODULES" = "xno" -a "$want_perl" != "static"; then
want_perl=static
diff --git a/docs/irssi.1 b/docs/irssi.1
index 62c2844..482cd96 100644
--- a/docs/irssi.1
+++ b/docs/irssi.1
@@ -65,10 +65,10 @@ display brief usage message.
.SH SEE ALSO
.B Irssi
has been supplied with a huge amount of documentation. Check /help or look
-at the files contained by /usr/share/doc/irssi*
+at the files contained by HOMEBREW_PREFIX/share/doc/irssi*
.SH FILES
.TP
-.I /etc/irssi.conf
+.I HOMEBREW_PREFIX/etc/irssi.conf
Global configuration file
.TP
.I ~/.irssi/config
@@ -83,13 +83,13 @@ Default irssi theme
.I ~/.irssi/away.log
Logged messages in away status
.TP
-.I /usr/share/irssi/help/
+.I HOMEBREW_PREFIX/share/irssi/help/
Directory including many help files
.TP
-.I /usr/share/irssi/scripts/
+.I HOMEBREW_PREFIX/share/irssi/scripts/
Global scripts directory
.TP
-.I /usr/share/irssi/themes/
+.I HOMEBREW_PREFIX/share/irssi/themes/
Global themes directory
.TP
.I ~/.irssi/scripts/
|
class Jetty < Formula
desc "Java servlet engine and webserver"
homepage "https://www.eclipse.org/jetty/"
url "https://search.maven.org/remotecontent?filepath=org/eclipse/jetty/jetty-distribution/9.4.25.v20191220/jetty-distribution-9.4.25.v20191220.tar.gz"
version "9.4.25.v20191220"
sha256 "2af42f9aab19ffc390225d1395ea430ee4d070d0fc72ac63d3798ab62d24ddca"
bottle :unneeded
depends_on :java => "1.8+"
def install
libexec.install Dir["*"]
(libexec+"logs").mkpath
bin.mkpath
Dir.glob("#{libexec}/bin/*.sh") do |f|
scriptname = File.basename(f, ".sh")
(bin+scriptname).write <<~EOS
#!/bin/bash
JETTY_HOME=#{libexec}
#{f} "$@"
EOS
chmod 0755, bin+scriptname
end
end
test do
ENV["JETTY_BASE"] = testpath
cp_r Dir[libexec/"*"], testpath
pid = fork { exec bin/"jetty", "start" }
sleep 5 # grace time for server start
begin
assert_match /Jetty running pid=\d+/, shell_output("#{bin}/jetty check")
assert_equal "Stopping Jetty: OK\n", shell_output("#{bin}/jetty stop")
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
jetty 9.4.26.v20200117 (#49253)
class Jetty < Formula
desc "Java servlet engine and webserver"
homepage "https://www.eclipse.org/jetty/"
url "https://search.maven.org/remotecontent?filepath=org/eclipse/jetty/jetty-distribution/9.4.26.v20200117/jetty-distribution-9.4.26.v20200117.tar.gz"
version "9.4.26.v20200117"
sha256 "31a157c493687e9b7be7366a5dc4ee7ef9cae1663ea279cd9fcf4070d53ef071"
bottle :unneeded
depends_on :java => "1.8+"
def install
libexec.install Dir["*"]
(libexec+"logs").mkpath
bin.mkpath
Dir.glob("#{libexec}/bin/*.sh") do |f|
scriptname = File.basename(f, ".sh")
(bin+scriptname).write <<~EOS
#!/bin/bash
JETTY_HOME=#{libexec}
#{f} "$@"
EOS
chmod 0755, bin+scriptname
end
end
test do
ENV["JETTY_BASE"] = testpath
cp_r Dir[libexec/"*"], testpath
pid = fork { exec bin/"jetty", "start" }
sleep 5 # grace time for server start
begin
assert_match /Jetty running pid=\d+/, shell_output("#{bin}/jetty check")
assert_equal "Stopping Jetty: OK\n", shell_output("#{bin}/jetty stop")
ensure
Process.kill 9, pid
Process.wait pid
end
end
end
|
class Jruby < Formula
desc "Ruby implementation in pure Java"
homepage "https://www.jruby.org/"
url "https://search.maven.org/remotecontent?filepath=org/jruby/jruby-dist/9.3.2.0/jruby-dist-9.3.2.0-bin.tar.gz"
sha256 "26699ca02beeafa8326573c1125c57a5971ba8b94d15f84e6b3baf2594244f33"
license any_of: ["EPL-2.0", "GPL-2.0-only", "LGPL-2.1-only"]
livecheck do
url "https://www.jruby.org/download"
regex(%r{href=.*?/jruby-dist[._-]v?(\d+(?:\.\d+)+)-bin\.t}i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "d74a733c36f2ca87ec9636140f3b7faf4c8603ca154ff83124cd751a4f3601f9"
sha256 cellar: :any, arm64_big_sur: "6dcf340e69824cf06256e30d4da376082286f97927bbcc8ce4cf4229cafe02fa"
sha256 cellar: :any, monterey: "b3f81064a42a232096dffaa39095c81f80399289903b9c26884db588db03c060"
sha256 cellar: :any, big_sur: "b3f81064a42a232096dffaa39095c81f80399289903b9c26884db588db03c060"
sha256 cellar: :any, catalina: "b3f81064a42a232096dffaa39095c81f80399289903b9c26884db588db03c060"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cce2a980edb9946bd83610aa772b6d95549bffb2161300365107b0c753a4fb9e"
end
depends_on "openjdk"
def install
# Remove Windows files
rm Dir["bin/*.{bat,dll,exe}"]
cd "bin" do
# Prefix a 'j' on some commands to avoid clashing with other rubies
%w[ast bundle bundler rake rdoc ri racc].each { |f| mv f, "j#{f}" }
# Delete some unnecessary commands
rm "gem" # gem is a wrapper script for jgem
rm "irb" # irb is an identical copy of jirb
end
# Only keep the macOS native libraries
rm_rf Dir["lib/jni/*"] - ["lib/jni/Darwin"]
libexec.install Dir["*"]
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin", Language::Java.overridable_java_home_env
# Replace (prebuilt!) universal binaries with their native slices
# FIXME: Build libjffi-1.2.jnilib from source.
deuniversalize_machos
end
test do
assert_equal "hello\n", shell_output("#{bin}/jruby -e \"puts 'hello'\"")
end
end
jruby: update 9.3.2.0 bottle.
class Jruby < Formula
desc "Ruby implementation in pure Java"
homepage "https://www.jruby.org/"
url "https://search.maven.org/remotecontent?filepath=org/jruby/jruby-dist/9.3.2.0/jruby-dist-9.3.2.0-bin.tar.gz"
sha256 "26699ca02beeafa8326573c1125c57a5971ba8b94d15f84e6b3baf2594244f33"
license any_of: ["EPL-2.0", "GPL-2.0-only", "LGPL-2.1-only"]
livecheck do
url "https://www.jruby.org/download"
regex(%r{href=.*?/jruby-dist[._-]v?(\d+(?:\.\d+)+)-bin\.t}i)
end
bottle do
rebuild 1
sha256 cellar: :any, arm64_monterey: "227867b3ebcaaddd621c00b2455d665a81b4c48329c1ed7e1b6d03e4443c44b7"
sha256 cellar: :any, arm64_big_sur: "ed636b1d558dfa99179e994364a7a4e857c6f76f19808ba9ac387ce9ba366a27"
sha256 cellar: :any, monterey: "57a054fac4a8352dd894ee5606cf6d78143f0df9eaa5f7c7404dce84dc70a4a8"
sha256 cellar: :any, big_sur: "57a054fac4a8352dd894ee5606cf6d78143f0df9eaa5f7c7404dce84dc70a4a8"
sha256 cellar: :any, catalina: "57a054fac4a8352dd894ee5606cf6d78143f0df9eaa5f7c7404dce84dc70a4a8"
sha256 cellar: :any_skip_relocation, x86_64_linux: "df795ec4b9c97332d783538b8e4e729cabb8460a3c04f82f494ab42694319229"
end
depends_on "openjdk"
def install
# Remove Windows files
rm Dir["bin/*.{bat,dll,exe}"]
cd "bin" do
# Prefix a 'j' on some commands to avoid clashing with other rubies
%w[ast bundle bundler rake rdoc ri racc].each { |f| mv f, "j#{f}" }
# Delete some unnecessary commands
rm "gem" # gem is a wrapper script for jgem
rm "irb" # irb is an identical copy of jirb
end
# Only keep the macOS native libraries
rm_rf Dir["lib/jni/*"] - ["lib/jni/Darwin"]
libexec.install Dir["*"]
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files libexec/"bin", Language::Java.overridable_java_home_env
# Replace (prebuilt!) universal binaries with their native slices
# FIXME: Build libjffi-1.2.jnilib from source.
deuniversalize_machos
end
test do
assert_equal "hello\n", shell_output("#{bin}/jruby -e \"puts 'hello'\"")
end
end
|
class Kahip < Formula
desc "Karlsruhe High Quality Partitioning"
homepage "https://algo2.iti.kit.edu/documents/kahip/index.html"
url "https://github.com/KaHIP/KaHIP/archive/v3.14.tar.gz"
sha256 "9da04f3b0ea53b50eae670d6014ff54c0df2cb40f6679b2f6a96840c1217f242"
license "MIT"
revision 1
head "https://github.com/KaHIP/KaHIP.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "caefdd4a209465343d4b986895d17278c811acd876f7ecce50388ab0c4e7b250"
sha256 cellar: :any, arm64_big_sur: "a393a6470d7569acf1c2e1e0b402d5901cea07c9880a7d6f01423acdaad7262a"
sha256 cellar: :any, monterey: "8f147b571794bbc87b050e84edaca1eb90be0b7c3ed6f0976f3f22c7a6a6ed96"
sha256 cellar: :any, big_sur: "d6ef09d6bde208d85c59ea4d5748a0289a6eddec3e75315766a05e692b857c6d"
sha256 cellar: :any, catalina: "f3c5fee2f01f5d4dce03a9f5c43ec8bdb6ba2199aa199c0bb09eefcffe1cb425"
sha256 cellar: :any_skip_relocation, x86_64_linux: "6780ba35f379f397d06db1b6b6f5d1b4a236993959821400d94f5058b1686b83"
end
depends_on "cmake" => :build
depends_on "open-mpi"
on_macos do
depends_on "gcc"
end
def install
if OS.mac?
gcc_major_ver = Formula["gcc"].any_installed_version.major
ENV["CC"] = Formula["gcc"].opt_bin/"gcc-#{gcc_major_ver}"
ENV["CXX"] = Formula["gcc"].opt_bin/"g++-#{gcc_major_ver}"
end
mkdir "build" do
system "cmake", *std_cmake_args, ".."
system "make", "install"
end
end
test do
output = shell_output("#{bin}/interface_test")
assert_match "edge cut 2", output
end
end
kahip: `fails_with :clang`
Closes #112705.
Signed-off-by: FX Coudert <c329953660db96eae534be5bbf1a735c2baf69b5@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Kahip < Formula
desc "Karlsruhe High Quality Partitioning"
homepage "https://algo2.iti.kit.edu/documents/kahip/index.html"
url "https://github.com/KaHIP/KaHIP/archive/v3.14.tar.gz"
sha256 "9da04f3b0ea53b50eae670d6014ff54c0df2cb40f6679b2f6a96840c1217f242"
license "MIT"
revision 1
head "https://github.com/KaHIP/KaHIP.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_monterey: "caefdd4a209465343d4b986895d17278c811acd876f7ecce50388ab0c4e7b250"
sha256 cellar: :any, arm64_big_sur: "a393a6470d7569acf1c2e1e0b402d5901cea07c9880a7d6f01423acdaad7262a"
sha256 cellar: :any, monterey: "8f147b571794bbc87b050e84edaca1eb90be0b7c3ed6f0976f3f22c7a6a6ed96"
sha256 cellar: :any, big_sur: "d6ef09d6bde208d85c59ea4d5748a0289a6eddec3e75315766a05e692b857c6d"
sha256 cellar: :any, catalina: "f3c5fee2f01f5d4dce03a9f5c43ec8bdb6ba2199aa199c0bb09eefcffe1cb425"
sha256 cellar: :any_skip_relocation, x86_64_linux: "6780ba35f379f397d06db1b6b6f5d1b4a236993959821400d94f5058b1686b83"
end
depends_on "cmake" => :build
depends_on "open-mpi"
on_macos do
depends_on "gcc"
end
fails_with :clang do
cause "needs OpenMP support"
end
def install
system "cmake", "-S", ".", "-B", "build", *std_cmake_args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
end
test do
output = shell_output("#{bin}/interface_test")
assert_match "edge cut 2", output
end
end
|
khiva 0.5.0 (new formula)
Closes #67078.
Signed-off-by: chenrui <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
class Khiva < Formula
desc "Algorithms to analyse time series"
homepage "https://khiva.readthedocs.io/"
url "https://github.com/shapelets/khiva.git",
tag: "v0.5.0",
revision: "c2c72474f98ce3547cbde5f934deabb1b4eda1c9"
license "MPL-2.0"
depends_on "boost" => :build
depends_on "cmake" => :build
depends_on "arrayfire"
depends_on "eigen"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args,
"-DKHIVA_USE_CONAN=OFF",
"-DKHIVA_BUILD_TESTS=OFF",
"-DKHIVA_BUILD_BENCHMARKS=OFF",
"-DKHIVA_BUILD_JNI_BINDINGS=OFF"
system "make"
system "make", "install"
end
pkgshare.install "examples"
end
test do
cp pkgshare/"examples/matrixExample.cpp", testpath
system ENV.cxx, "-std=c++11", "matrixExample.cpp",
"-L#{Formula["arrayfire"].opt_lib}", "-laf",
"-L#{lib}", "-lkhiva",
"-o", "test"
system "./test"
end
end
|
require "language/perl"
class Kpcli < Formula
include Language::Perl::Shebang
desc "Command-line interface to KeePass database files"
homepage "https://kpcli.sourceforge.io/"
url "https://downloads.sourceforge.net/project/kpcli/kpcli-3.6.pl"
sha256 "01f23882d458dfffc176fe5f268ced13c667de22b7fbf60d488eca87f3362deb"
license any_of: ["Artistic-1.0-Perl", "GPL-1.0-or-later"]
revision 1
livecheck do
url :stable
regex(%r{url=.*?/kpcli[._-]v?(\d+(?:\.\d+)+)\.pl}i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "ada893ae300554a13c7b56fce7bc91716d45294078a0f2006175a10e581eddc0"
sha256 cellar: :any, big_sur: "0918f51306694d5a21d117030f32ce49e08a4da4b47d0106d8542b826db03ce6"
sha256 cellar: :any, catalina: "8b88064bbd450ba7b3a7e902a85ac926d2d57b38b74b8a79f4cb72cc9a43aee9"
sha256 cellar: :any, mojave: "6cbffa6128e72f200e5216a6df39b07586048d165e69e45a8cb4821928b98a5d"
end
depends_on "readline"
uses_from_macos "perl"
resource "Module::Build" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Module-Build-0.4231.tar.gz"
sha256 "7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717"
end
resource "File::KeePass" do
url "https://cpan.metacpan.org/authors/id/R/RH/RHANDOM/File-KeePass-2.03.tar.gz"
sha256 "c30c688027a52ff4f58cd69d6d8ef35472a7cf106d4ce94eb73a796ba7c7ffa7"
end
resource "Crypt::Rijndael" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Crypt-Rijndael-1.15.tar.gz"
sha256 "a0989b55990d7905d1b5bf524cd8b46aadc0de778414d4ca8d406aa2aa594163"
end
resource "Sort::Naturally" do
url "https://cpan.metacpan.org/authors/id/B/BI/BINGOS/Sort-Naturally-1.03.tar.gz"
sha256 "eaab1c5c87575a7826089304ab1f8ffa7f18e6cd8b3937623e998e865ec1e746"
end
resource "Term::ShellUI" do
url "https://cpan.metacpan.org/authors/id/B/BR/BRONSON/Term-ShellUI-0.92.tar.gz"
sha256 "3279c01c76227335eeff09032a40f4b02b285151b3576c04cacd15be05942bdb"
end
resource "Term::Readline::Gnu" do
url "https://cpan.metacpan.org/authors/id/H/HA/HAYASHI/Term-ReadLine-Gnu-1.37.tar.gz"
sha256 "3bd31a998a9c14748ee553aed3e6b888ec47ff57c07fc5beafb04a38a72f0078"
end
resource "Data::Password" do
url "https://cpan.metacpan.org/authors/id/R/RA/RAZINF/Data-Password-1.12.tar.gz"
sha256 "830cde81741ff384385412e16faba55745a54a7cc019dd23d7ed4f05d551a961"
end
resource "Clipboard" do
url "https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF/Clipboard-0.26.tar.gz"
sha256 "886ae43dc8538f9bfc4e07fdbcf09b7fbd6ee59c31f364618c859de14953c58a"
end
resource "Mac::Pasteboard" do
url "https://cpan.metacpan.org/authors/id/W/WY/WYANT/Mac-Pasteboard-0.011.tar.gz"
sha256 "bd8c4510b1e805c43e4b55155c0beaf002b649fe30b6a7841ff05e7399ba02a9"
end
resource "Capture::Tiny" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/Capture-Tiny-0.48.tar.gz"
sha256 "6c23113e87bad393308c90a207013e505f659274736638d8c79bac9c67cc3e19"
end
resource "Term::ReadKey" do
url "https://cpan.metacpan.org/authors/id/J/JS/JSTOWE/TermReadKey-2.38.tar.gz"
sha256 "5a645878dc570ac33661581fbb090ff24ebce17d43ea53fd22e105a856a47290"
end
resource "Clone" do
url "https://cpan.metacpan.org/authors/id/A/AT/ATOOMIC/Clone-0.45.tar.gz"
sha256 "cbb6ee348afa95432e4878893b46752549e70dc68fe6d9e430d1d2e99079a9e6"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
ENV.prepend_path "PERL5LIB", libexec/"lib"
resources = [
"Module::Build",
"File::KeePass",
"Crypt::Rijndael",
"Sort::Naturally",
"Term::ShellUI",
"Data::Password",
"Clipboard",
"Capture::Tiny",
]
resources += (OS.mac? ? ["Mac::Pasteboard"] : ["Term::ReadKey", "Clone"])
resources.each do |r|
resource(r).stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
end
resource("Clipboard").stage do
system "perl", "Build.PL", "--install_base", libexec
system "./Build"
system "./Build", "install"
end
resource("Term::Readline::Gnu").stage do
# Prevent the Makefile to try and build universal binaries
ENV.refurbish_args
# Work around issue with Makefile.PL not detecting -ltermcap
# https://rt.cpan.org/Public/Bug/Display.html?id=133846
inreplace "Makefile.PL", "my $TERMCAP_LIB =", "my $TERMCAP_LIB = '-lncurses'; 0 &&"
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}",
"--includedir=#{Formula["readline"].opt_include}",
"--libdir=#{Formula["readline"].opt_lib}"
system "make", "install"
end
rewrite_shebang detected_perl_shebang, "kpcli-#{version}.pl"
libexec.install "kpcli-#{version}.pl" => "kpcli"
chmod 0755, libexec/"kpcli"
(bin/"kpcli").write_env_script("#{libexec}/kpcli", PERL5LIB: ENV["PERL5LIB"])
end
test do
system bin/"kpcli", "--help"
end
end
kpcli: align with homebrew-core (#22264)
This does not build on linux right now so let's cleanup the diff
require "language/perl"
class Kpcli < Formula
include Language::Perl::Shebang
desc "Command-line interface to KeePass database files"
homepage "https://kpcli.sourceforge.io/"
url "https://downloads.sourceforge.net/project/kpcli/kpcli-3.6.pl"
sha256 "01f23882d458dfffc176fe5f268ced13c667de22b7fbf60d488eca87f3362deb"
license any_of: ["Artistic-1.0-Perl", "GPL-1.0-or-later"]
revision 1
livecheck do
url :stable
regex(%r{url=.*?/kpcli[._-]v?(\d+(?:\.\d+)+)\.pl}i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "ada893ae300554a13c7b56fce7bc91716d45294078a0f2006175a10e581eddc0"
sha256 cellar: :any, big_sur: "0918f51306694d5a21d117030f32ce49e08a4da4b47d0106d8542b826db03ce6"
sha256 cellar: :any, catalina: "8b88064bbd450ba7b3a7e902a85ac926d2d57b38b74b8a79f4cb72cc9a43aee9"
sha256 cellar: :any, mojave: "6cbffa6128e72f200e5216a6df39b07586048d165e69e45a8cb4821928b98a5d"
end
depends_on "readline"
uses_from_macos "perl"
resource "Module::Build" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Module-Build-0.4231.tar.gz"
sha256 "7e0f4c692c1740c1ac84ea14d7ea3d8bc798b2fb26c09877229e04f430b2b717"
end
resource "File::KeePass" do
url "https://cpan.metacpan.org/authors/id/R/RH/RHANDOM/File-KeePass-2.03.tar.gz"
sha256 "c30c688027a52ff4f58cd69d6d8ef35472a7cf106d4ce94eb73a796ba7c7ffa7"
end
resource "Crypt::Rijndael" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Crypt-Rijndael-1.15.tar.gz"
sha256 "a0989b55990d7905d1b5bf524cd8b46aadc0de778414d4ca8d406aa2aa594163"
end
resource "Sort::Naturally" do
url "https://cpan.metacpan.org/authors/id/B/BI/BINGOS/Sort-Naturally-1.03.tar.gz"
sha256 "eaab1c5c87575a7826089304ab1f8ffa7f18e6cd8b3937623e998e865ec1e746"
end
resource "Term::ShellUI" do
url "https://cpan.metacpan.org/authors/id/B/BR/BRONSON/Term-ShellUI-0.92.tar.gz"
sha256 "3279c01c76227335eeff09032a40f4b02b285151b3576c04cacd15be05942bdb"
end
resource "Term::Readline::Gnu" do
url "https://cpan.metacpan.org/authors/id/H/HA/HAYASHI/Term-ReadLine-Gnu-1.37.tar.gz"
sha256 "3bd31a998a9c14748ee553aed3e6b888ec47ff57c07fc5beafb04a38a72f0078"
end
resource "Data::Password" do
url "https://cpan.metacpan.org/authors/id/R/RA/RAZINF/Data-Password-1.12.tar.gz"
sha256 "830cde81741ff384385412e16faba55745a54a7cc019dd23d7ed4f05d551a961"
end
resource "Clipboard" do
url "https://cpan.metacpan.org/authors/id/S/SH/SHLOMIF/Clipboard-0.26.tar.gz"
sha256 "886ae43dc8538f9bfc4e07fdbcf09b7fbd6ee59c31f364618c859de14953c58a"
end
resource "Mac::Pasteboard" do
url "https://cpan.metacpan.org/authors/id/W/WY/WYANT/Mac-Pasteboard-0.011.tar.gz"
sha256 "bd8c4510b1e805c43e4b55155c0beaf002b649fe30b6a7841ff05e7399ba02a9"
end
resource "Capture::Tiny" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/Capture-Tiny-0.48.tar.gz"
sha256 "6c23113e87bad393308c90a207013e505f659274736638d8c79bac9c67cc3e19"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
ENV.prepend_path "PERL5LIB", libexec/"lib"
resources = [
"Module::Build",
"File::KeePass",
"Crypt::Rijndael",
"Sort::Naturally",
"Term::ShellUI",
"Data::Password",
"Mac::Pasteboard",
"Capture::Tiny",
]
resources.each do |r|
resource(r).stage do
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
end
resource("Clipboard").stage do
system "perl", "Build.PL", "--install_base", libexec
system "./Build"
system "./Build", "install"
end
resource("Term::Readline::Gnu").stage do
# Prevent the Makefile to try and build universal binaries
ENV.refurbish_args
# Work around issue with Makefile.PL not detecting -ltermcap
# https://rt.cpan.org/Public/Bug/Display.html?id=133846
inreplace "Makefile.PL", "my $TERMCAP_LIB =", "my $TERMCAP_LIB = '-lncurses'; 0 &&"
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}",
"--includedir=#{Formula["readline"].opt_include}",
"--libdir=#{Formula["readline"].opt_lib}"
system "make", "install"
end
rewrite_shebang detected_perl_shebang, "kpcli-#{version}.pl"
libexec.install "kpcli-#{version}.pl" => "kpcli"
chmod 0755, libexec/"kpcli"
(bin/"kpcli").write_env_script("#{libexec}/kpcli", PERL5LIB: ENV["PERL5LIB"])
end
test do
system bin/"kpcli", "--help"
end
end
|
class Libav < Formula
desc "Audio and video processing tools"
homepage "https://libav.org/"
url "https://libav.org/releases/libav-12.3.tar.xz"
sha256 "6893cdbd7bc4b62f5d8fd6593c8e0a62babb53e323fbc7124db3658d04ab443b"
license "GPL-2.0-or-later"
revision 8
head "https://git.libav.org/libav.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "0654bef05e6d8a3fa7fbeb6e9be5a02abe411ebbb3eec69c7a2e1f4b309cb6f5"
sha256 cellar: :any, big_sur: "0bd97c8c39f11b5b29d5c271a28eb4ea4a40b4062a4331f8d97f738c9a82fb05"
sha256 cellar: :any, catalina: "fcfafef0bb5eeee417c1d69d8ddb1fe0d7a8f8fe70edf39b8499a0df841f6905"
sha256 cellar: :any, mojave: "f71b7acc7dd972d60176b7d6c9bfe247181867d98ff991d771dcff54a6beace5"
end
# See: https://lists.libav.org/pipermail/libav-devel/2020-April/086589.html
deprecate! date: "2019-04-16", because: :unmaintained
depends_on "pkg-config" => :build
# manpages won't be built without texi2html
depends_on "texi2html" => :build
depends_on "yasm" => :build
depends_on "faac"
depends_on "fdk-aac"
depends_on "freetype"
depends_on "lame"
depends_on "libvorbis"
depends_on "libvpx"
depends_on "opus"
depends_on "sdl"
depends_on "theora"
depends_on "x264"
depends_on "xvid"
# Cherry-picked hunk from https://github.com/libav/libav/commit/fe7bc1f16abaefe66d8a20f734ca3eb8a4ce4d43
# (second hunk in above commit conflicts with released source)
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/e07f287/libav/remove_unconditional_X11_probe.patch"
sha256 "093364c5cb0d79fb80566b5b466e6e8877d01c70e32b6f8ad624205005caba26"
end
# https://bugzilla.libav.org/show_bug.cgi?id=1033
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b6e917c/libav/Check-for--no_weak_imports-in-ldflags-on-macOS.patch"
sha256 "986d748ba2c7c83319a59d76fbb0dca22dcd51f0252b3d1f3b80dbda2cf79742"
end
# Upstream patch for x264 version >= 153, should be included in libav > 12.3
patch do
url "https://github.com/libav/libav/commit/c6558e8840fbb2386bf8742e4d68dd6e067d262e.patch?full_index=1"
sha256 "0fcfe69274cccbca33825414f526300a1fbbf0c464ac32577e1cc137b8618820"
end
# Upstream patch to fix building with fdk-aac 2
patch do
url "https://github.com/libav/libav/commit/141c960e21d2860e354f9b90df136184dd00a9a8.patch?full_index=1"
sha256 "7081183fed875f71d53cce1e71f6b58fb5d5eee9f30462d35f9367ec2210507b"
end
# Fix for image formats removed from libvpx
# https://github.com/shirkdog/hardenedbsd-ports/blob/HEAD/multimedia/libav/files/patch-libavcodec_libvpx.c
patch :DATA
def install
args = %W[
--disable-debug
--disable-shared
--disable-indev=jack
--prefix=#{prefix}
--cc=#{ENV.cc}
--host-cflags=#{ENV.cflags}
--host-ldflags=#{ENV.ldflags}
--enable-gpl
--enable-libfaac
--enable-libfdk-aac
--enable-libfreetype
--enable-libmp3lame
--enable-libopus
--enable-libvorbis
--enable-libvpx
--enable-libx264
--enable-libxvid
--enable-nonfree
--enable-vda
--enable-version3
--enable-libtheora
--disable-libxcb
--disable-vaapi
--disable-vdpau
]
system "./configure", *args
system "make"
bin.install "avconv", "avprobe", "avplay"
man1.install "doc/avconv.1", "doc/avprobe.1", "doc/avplay.1"
end
test do
# Create an example mp4 file
mp4out = testpath/"video.mp4"
system bin/"avconv", "-y", "-filter_complex", "testsrc=rate=1:duration=1", mp4out
assert_predicate mp4out, :exist?
end
end
__END__
--- a/libavcodec/libvpx.c
+++ b/libavcodec/libvpx.c
@@ -25,6 +25,7 @@
enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt_t img)
{
switch (img) {
+#if VPX_IMAGE_ABI_VERSION < 5
case VPX_IMG_FMT_RGB24: return AV_PIX_FMT_RGB24;
case VPX_IMG_FMT_RGB565: return AV_PIX_FMT_RGB565BE;
case VPX_IMG_FMT_RGB555: return AV_PIX_FMT_RGB555BE;
@@ -36,10 +37,13 @@ enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt
case VPX_IMG_FMT_ARGB_LE: return AV_PIX_FMT_BGRA;
case VPX_IMG_FMT_RGB565_LE: return AV_PIX_FMT_RGB565LE;
case VPX_IMG_FMT_RGB555_LE: return AV_PIX_FMT_RGB555LE;
+#endif
case VPX_IMG_FMT_I420: return AV_PIX_FMT_YUV420P;
case VPX_IMG_FMT_I422: return AV_PIX_FMT_YUV422P;
case VPX_IMG_FMT_I444: return AV_PIX_FMT_YUV444P;
+#if VPX_IMAGE_ABI_VERSION < 5
case VPX_IMG_FMT_444A: return AV_PIX_FMT_YUVA444P;
+#endif
#if VPX_IMAGE_ABI_VERSION >= 3
case VPX_IMG_FMT_I440: return AV_PIX_FMT_YUV440P;
case VPX_IMG_FMT_I42016: return AV_PIX_FMT_YUV420P16BE;
@@ -53,6 +57,7 @@ enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt
vpx_img_fmt_t ff_vpx_pixfmt_to_imgfmt(enum AVPixelFormat pix)
{
switch (pix) {
+#if VPX_IMAGE_ABI_VERSION < 5
case AV_PIX_FMT_RGB24: return VPX_IMG_FMT_RGB24;
case AV_PIX_FMT_RGB565BE: return VPX_IMG_FMT_RGB565;
case AV_PIX_FMT_RGB555BE: return VPX_IMG_FMT_RGB555;
@@ -64,10 +69,13 @@ vpx_img_fmt_t ff_vpx_pixfmt_to_imgfmt(enum AVPixelForm
case AV_PIX_FMT_BGRA: return VPX_IMG_FMT_ARGB_LE;
case AV_PIX_FMT_RGB565LE: return VPX_IMG_FMT_RGB565_LE;
case AV_PIX_FMT_RGB555LE: return VPX_IMG_FMT_RGB555_LE;
+#endif
case AV_PIX_FMT_YUV420P: return VPX_IMG_FMT_I420;
case AV_PIX_FMT_YUV422P: return VPX_IMG_FMT_I422;
case AV_PIX_FMT_YUV444P: return VPX_IMG_FMT_I444;
+#if VPX_IMAGE_ABI_VERSION < 5
case AV_PIX_FMT_YUVA444P: return VPX_IMG_FMT_444A;
+#endif
#if VPX_IMAGE_ABI_VERSION >= 3
case AV_PIX_FMT_YUV440P: return VPX_IMG_FMT_I440;
case AV_PIX_FMT_YUV420P16BE: return VPX_IMG_FMT_I42016;
libav: update 12.3_8 bottle.
class Libav < Formula
desc "Audio and video processing tools"
homepage "https://libav.org/"
url "https://libav.org/releases/libav-12.3.tar.xz"
sha256 "6893cdbd7bc4b62f5d8fd6593c8e0a62babb53e323fbc7124db3658d04ab443b"
license "GPL-2.0-or-later"
revision 8
head "https://git.libav.org/libav.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "0654bef05e6d8a3fa7fbeb6e9be5a02abe411ebbb3eec69c7a2e1f4b309cb6f5"
sha256 cellar: :any, big_sur: "0bd97c8c39f11b5b29d5c271a28eb4ea4a40b4062a4331f8d97f738c9a82fb05"
sha256 cellar: :any, catalina: "fcfafef0bb5eeee417c1d69d8ddb1fe0d7a8f8fe70edf39b8499a0df841f6905"
sha256 cellar: :any, mojave: "f71b7acc7dd972d60176b7d6c9bfe247181867d98ff991d771dcff54a6beace5"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e9084c8190150a220fdffd6da4d93297336f5d9b59cdd6895eeb839af950efd0"
end
# See: https://lists.libav.org/pipermail/libav-devel/2020-April/086589.html
deprecate! date: "2019-04-16", because: :unmaintained
depends_on "pkg-config" => :build
# manpages won't be built without texi2html
depends_on "texi2html" => :build
depends_on "yasm" => :build
depends_on "faac"
depends_on "fdk-aac"
depends_on "freetype"
depends_on "lame"
depends_on "libvorbis"
depends_on "libvpx"
depends_on "opus"
depends_on "sdl"
depends_on "theora"
depends_on "x264"
depends_on "xvid"
# Cherry-picked hunk from https://github.com/libav/libav/commit/fe7bc1f16abaefe66d8a20f734ca3eb8a4ce4d43
# (second hunk in above commit conflicts with released source)
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/e07f287/libav/remove_unconditional_X11_probe.patch"
sha256 "093364c5cb0d79fb80566b5b466e6e8877d01c70e32b6f8ad624205005caba26"
end
# https://bugzilla.libav.org/show_bug.cgi?id=1033
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b6e917c/libav/Check-for--no_weak_imports-in-ldflags-on-macOS.patch"
sha256 "986d748ba2c7c83319a59d76fbb0dca22dcd51f0252b3d1f3b80dbda2cf79742"
end
# Upstream patch for x264 version >= 153, should be included in libav > 12.3
patch do
url "https://github.com/libav/libav/commit/c6558e8840fbb2386bf8742e4d68dd6e067d262e.patch?full_index=1"
sha256 "0fcfe69274cccbca33825414f526300a1fbbf0c464ac32577e1cc137b8618820"
end
# Upstream patch to fix building with fdk-aac 2
patch do
url "https://github.com/libav/libav/commit/141c960e21d2860e354f9b90df136184dd00a9a8.patch?full_index=1"
sha256 "7081183fed875f71d53cce1e71f6b58fb5d5eee9f30462d35f9367ec2210507b"
end
# Fix for image formats removed from libvpx
# https://github.com/shirkdog/hardenedbsd-ports/blob/HEAD/multimedia/libav/files/patch-libavcodec_libvpx.c
patch :DATA
def install
args = %W[
--disable-debug
--disable-shared
--disable-indev=jack
--prefix=#{prefix}
--cc=#{ENV.cc}
--host-cflags=#{ENV.cflags}
--host-ldflags=#{ENV.ldflags}
--enable-gpl
--enable-libfaac
--enable-libfdk-aac
--enable-libfreetype
--enable-libmp3lame
--enable-libopus
--enable-libvorbis
--enable-libvpx
--enable-libx264
--enable-libxvid
--enable-nonfree
--enable-vda
--enable-version3
--enable-libtheora
--disable-libxcb
--disable-vaapi
--disable-vdpau
]
system "./configure", *args
system "make"
bin.install "avconv", "avprobe", "avplay"
man1.install "doc/avconv.1", "doc/avprobe.1", "doc/avplay.1"
end
test do
# Create an example mp4 file
mp4out = testpath/"video.mp4"
system bin/"avconv", "-y", "-filter_complex", "testsrc=rate=1:duration=1", mp4out
assert_predicate mp4out, :exist?
end
end
__END__
--- a/libavcodec/libvpx.c
+++ b/libavcodec/libvpx.c
@@ -25,6 +25,7 @@
enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt_t img)
{
switch (img) {
+#if VPX_IMAGE_ABI_VERSION < 5
case VPX_IMG_FMT_RGB24: return AV_PIX_FMT_RGB24;
case VPX_IMG_FMT_RGB565: return AV_PIX_FMT_RGB565BE;
case VPX_IMG_FMT_RGB555: return AV_PIX_FMT_RGB555BE;
@@ -36,10 +37,13 @@ enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt
case VPX_IMG_FMT_ARGB_LE: return AV_PIX_FMT_BGRA;
case VPX_IMG_FMT_RGB565_LE: return AV_PIX_FMT_RGB565LE;
case VPX_IMG_FMT_RGB555_LE: return AV_PIX_FMT_RGB555LE;
+#endif
case VPX_IMG_FMT_I420: return AV_PIX_FMT_YUV420P;
case VPX_IMG_FMT_I422: return AV_PIX_FMT_YUV422P;
case VPX_IMG_FMT_I444: return AV_PIX_FMT_YUV444P;
+#if VPX_IMAGE_ABI_VERSION < 5
case VPX_IMG_FMT_444A: return AV_PIX_FMT_YUVA444P;
+#endif
#if VPX_IMAGE_ABI_VERSION >= 3
case VPX_IMG_FMT_I440: return AV_PIX_FMT_YUV440P;
case VPX_IMG_FMT_I42016: return AV_PIX_FMT_YUV420P16BE;
@@ -53,6 +57,7 @@ enum AVPixelFormat ff_vpx_imgfmt_to_pixfmt(vpx_img_fmt
vpx_img_fmt_t ff_vpx_pixfmt_to_imgfmt(enum AVPixelFormat pix)
{
switch (pix) {
+#if VPX_IMAGE_ABI_VERSION < 5
case AV_PIX_FMT_RGB24: return VPX_IMG_FMT_RGB24;
case AV_PIX_FMT_RGB565BE: return VPX_IMG_FMT_RGB565;
case AV_PIX_FMT_RGB555BE: return VPX_IMG_FMT_RGB555;
@@ -64,10 +69,13 @@ vpx_img_fmt_t ff_vpx_pixfmt_to_imgfmt(enum AVPixelForm
case AV_PIX_FMT_BGRA: return VPX_IMG_FMT_ARGB_LE;
case AV_PIX_FMT_RGB565LE: return VPX_IMG_FMT_RGB565_LE;
case AV_PIX_FMT_RGB555LE: return VPX_IMG_FMT_RGB555_LE;
+#endif
case AV_PIX_FMT_YUV420P: return VPX_IMG_FMT_I420;
case AV_PIX_FMT_YUV422P: return VPX_IMG_FMT_I422;
case AV_PIX_FMT_YUV444P: return VPX_IMG_FMT_I444;
+#if VPX_IMAGE_ABI_VERSION < 5
case AV_PIX_FMT_YUVA444P: return VPX_IMG_FMT_444A;
+#endif
#if VPX_IMAGE_ABI_VERSION >= 3
case AV_PIX_FMT_YUV440P: return VPX_IMG_FMT_I440;
case AV_PIX_FMT_YUV420P16BE: return VPX_IMG_FMT_I42016;
|
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
license "LGPL-2.1"
bottle do
sha256 cellar: :any, arm64_big_sur: "95b358e3f04623998f6c2d734599ec7e63b3c389f9d6e0cc9fc6311850929f55"
sha256 cellar: :any, big_sur: "19eef0619f05faa15a7d5368973dcd3e5ed2e44291b56cc6ff72825fe8879845"
sha256 cellar: :any, catalina: "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444"
sha256 cellar: :any, mojave: "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e"
sha256 cellar: :any, high_sierra: "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e4abab6d8b5735e6b1dac973850d9608e71c644255c13b954365398daf8aeec4"
end
head do
url "https://git.code.sf.net/p/liblo/git.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
liblo: add `head` branch
Signed-off-by: Rui Chen <907c7afd57be493757f13ccd1dd45dddf02db069@chenrui.dev>
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
license "LGPL-2.1"
bottle do
sha256 cellar: :any, arm64_big_sur: "95b358e3f04623998f6c2d734599ec7e63b3c389f9d6e0cc9fc6311850929f55"
sha256 cellar: :any, big_sur: "19eef0619f05faa15a7d5368973dcd3e5ed2e44291b56cc6ff72825fe8879845"
sha256 cellar: :any, catalina: "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444"
sha256 cellar: :any, mojave: "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e"
sha256 cellar: :any, high_sierra: "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e4abab6d8b5735e6b1dac973850d9608e71c644255c13b954365398daf8aeec4"
end
head do
url "https://git.code.sf.net/p/liblo/git.git", branch: "master"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
|
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
license "LGPL-2.1"
stable do
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
# Fix -flat_namespace being used on Big Sur and later.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/03cf8088210822aa2c1ab544ed58ea04c897d9c4/libtool/configure-big_sur.diff"
sha256 "35acd6aebc19843f1a2b3a63e880baceb0f5278ab1ace661e57a502d9d78c93c"
end
end
bottle do
sha256 cellar: :any, arm64_big_sur: "95b358e3f04623998f6c2d734599ec7e63b3c389f9d6e0cc9fc6311850929f55"
sha256 cellar: :any, big_sur: "19eef0619f05faa15a7d5368973dcd3e5ed2e44291b56cc6ff72825fe8879845"
sha256 cellar: :any, catalina: "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444"
sha256 cellar: :any, mojave: "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e"
sha256 cellar: :any, high_sierra: "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e4abab6d8b5735e6b1dac973850d9608e71c644255c13b954365398daf8aeec4"
end
head do
url "https://git.code.sf.net/p/liblo/git.git", branch: "master"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
liblo: update 0.31 bottle.
class Liblo < Formula
desc "Lightweight Open Sound Control implementation"
homepage "https://liblo.sourceforge.io/"
license "LGPL-2.1"
stable do
url "https://downloads.sourceforge.net/project/liblo/liblo/0.31/liblo-0.31.tar.gz"
sha256 "2b4f446e1220dcd624ecd8405248b08b7601e9a0d87a0b94730c2907dbccc750"
# Fix -flat_namespace being used on Big Sur and later.
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/03cf8088210822aa2c1ab544ed58ea04c897d9c4/libtool/configure-big_sur.diff"
sha256 "35acd6aebc19843f1a2b3a63e880baceb0f5278ab1ace661e57a502d9d78c93c"
end
end
bottle do
sha256 cellar: :any, arm64_monterey: "ec5eeaedb57fa7e93e3f5ff00fc5092427ca254fcbaab1306350fed40329c832"
sha256 cellar: :any, arm64_big_sur: "95b358e3f04623998f6c2d734599ec7e63b3c389f9d6e0cc9fc6311850929f55"
sha256 cellar: :any, monterey: "375403935f81443482f672921c5b2d5ca2802f31186fd2834f0ba1d6c7cea19f"
sha256 cellar: :any, big_sur: "19eef0619f05faa15a7d5368973dcd3e5ed2e44291b56cc6ff72825fe8879845"
sha256 cellar: :any, catalina: "aac4280d5e147a6baab53c252bbf7cda296fe5bdeceb26d7aa60acb10ecc5444"
sha256 cellar: :any, mojave: "3310110ec91fb412b8d5c727bda03454aebec087d78ebada20bb53ad9582088e"
sha256 cellar: :any, high_sierra: "034eaec236ee4df490d16db9998ec7a4d88223d929b333c8b08ade641bc74bcb"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e4abab6d8b5735e6b1dac973850d9608e71c644255c13b954365398daf8aeec4"
end
head do
url "https://git.code.sf.net/p/liblo/git.git", branch: "master"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
]
if build.head?
system "./autogen.sh", *args
else
system "./configure", *args
end
system "make", "install"
end
test do
(testpath/"lo_version.c").write <<~EOS
#include <stdio.h>
#include "lo/lo.h"
int main() {
char version[6];
lo_version(version, 6, 0, 0, 0, 0, 0, 0, 0);
printf("%s", version);
return 0;
}
EOS
system ENV.cc, "lo_version.c", "-I#{include}", "-L#{lib}", "-llo", "-o", "lo_version"
lo_version = `./lo_version`
assert_equal version.to_str, lo_version
end
end
|
class Libpq < Formula
desc "Postgres C API library"
homepage "https://www.postgresql.org/docs/14/libpq.html"
url "https://ftp.postgresql.org/pub/source/v14.2/postgresql-14.2.tar.bz2"
sha256 "2cf78b2e468912f8101d695db5340cf313c2e9f68a612fb71427524e8c9a977a"
license "PostgreSQL"
livecheck do
formula "postgresql"
end
bottle do
sha256 arm64_monterey: "36b074f07ded99c1945dbcc26f54e45abeba0dbf34d16e63fb6ab16d371158ee"
sha256 arm64_big_sur: "a3fff4783cf1f60544db79e6476a3adb6b6d3398a558e6be62c4cb9f07977725"
sha256 monterey: "9f7a628d2ca6f3ef1613b1ca4f754cb270e18a28ca5f7bed30001f4a51fdd9f2"
sha256 big_sur: "a85a1932a49c8cbba9cf90f9d1f1af30190a8effabda965ce2a4b9a618a26fd3"
sha256 catalina: "b4263f4a513e3e97f0735de8d5919af8a1aa574101e8fcb9db414f1cc2173583"
sha256 x86_64_linux: "2e935bd76326ff8254db26eb04256a672e75604b1ddcf8505ad3a6aee6f8d5ec"
end
keg_only "conflicts with postgres formula"
# GSSAPI provided by Kerberos.framework crashes when forked.
# See https://github.com/Homebrew/homebrew-core/issues/47494.
depends_on "krb5"
depends_on "openssl@1.1"
on_linux do
depends_on "readline"
end
def install
system "./configure", "--disable-debug",
"--prefix=#{prefix}",
"--with-gssapi",
"--with-openssl",
"--libdir=#{opt_lib}",
"--includedir=#{opt_include}"
dirs = %W[
libdir=#{lib}
includedir=#{include}
pkgincludedir=#{include}/postgresql
includedir_server=#{include}/postgresql/server
includedir_internal=#{include}/postgresql/internal
]
system "make"
system "make", "-C", "src/bin", "install", *dirs
system "make", "-C", "src/include", "install", *dirs
system "make", "-C", "src/interfaces", "install", *dirs
system "make", "-C", "src/common", "install", *dirs
system "make", "-C", "src/port", "install", *dirs
system "make", "-C", "doc", "install", *dirs
end
test do
(testpath/"libpq.c").write <<~EOS
#include <stdlib.h>
#include <stdio.h>
#include <libpq-fe.h>
int main()
{
const char *conninfo;
PGconn *conn;
conninfo = "dbname = postgres";
conn = PQconnectdb(conninfo);
if (PQstatus(conn) != CONNECTION_OK) // This should always fail
{
printf("Connection to database attempted and failed");
PQfinish(conn);
exit(0);
}
return 0;
}
EOS
system ENV.cc, "libpq.c", "-L#{lib}", "-I#{include}", "-lpq", "-o", "libpqtest"
assert_equal "Connection to database attempted and failed", shell_output("./libpqtest")
end
end
libpq 14.3
Closes #101365.
Signed-off-by: Alexander Bayandin <673dbf9b1367181cd47bae83bf10b2ffe51be6ac@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Libpq < Formula
desc "Postgres C API library"
homepage "https://www.postgresql.org/docs/14/libpq.html"
url "https://ftp.postgresql.org/pub/source/v14.3/postgresql-14.3.tar.bz2"
sha256 "279057368bf59a919c05ada8f95c5e04abb43e74b9a2a69c3d46a20e07a9af38"
license "PostgreSQL"
livecheck do
formula "postgresql"
end
bottle do
sha256 arm64_monterey: "36b074f07ded99c1945dbcc26f54e45abeba0dbf34d16e63fb6ab16d371158ee"
sha256 arm64_big_sur: "a3fff4783cf1f60544db79e6476a3adb6b6d3398a558e6be62c4cb9f07977725"
sha256 monterey: "9f7a628d2ca6f3ef1613b1ca4f754cb270e18a28ca5f7bed30001f4a51fdd9f2"
sha256 big_sur: "a85a1932a49c8cbba9cf90f9d1f1af30190a8effabda965ce2a4b9a618a26fd3"
sha256 catalina: "b4263f4a513e3e97f0735de8d5919af8a1aa574101e8fcb9db414f1cc2173583"
sha256 x86_64_linux: "2e935bd76326ff8254db26eb04256a672e75604b1ddcf8505ad3a6aee6f8d5ec"
end
keg_only "conflicts with postgres formula"
# GSSAPI provided by Kerberos.framework crashes when forked.
# See https://github.com/Homebrew/homebrew-core/issues/47494.
depends_on "krb5"
depends_on "openssl@1.1"
on_linux do
depends_on "readline"
end
def install
system "./configure", "--disable-debug",
"--prefix=#{prefix}",
"--with-gssapi",
"--with-openssl",
"--libdir=#{opt_lib}",
"--includedir=#{opt_include}"
dirs = %W[
libdir=#{lib}
includedir=#{include}
pkgincludedir=#{include}/postgresql
includedir_server=#{include}/postgresql/server
includedir_internal=#{include}/postgresql/internal
]
system "make"
system "make", "-C", "src/bin", "install", *dirs
system "make", "-C", "src/include", "install", *dirs
system "make", "-C", "src/interfaces", "install", *dirs
system "make", "-C", "src/common", "install", *dirs
system "make", "-C", "src/port", "install", *dirs
system "make", "-C", "doc", "install", *dirs
end
test do
(testpath/"libpq.c").write <<~EOS
#include <stdlib.h>
#include <stdio.h>
#include <libpq-fe.h>
int main()
{
const char *conninfo;
PGconn *conn;
conninfo = "dbname = postgres";
conn = PQconnectdb(conninfo);
if (PQstatus(conn) != CONNECTION_OK) // This should always fail
{
printf("Connection to database attempted and failed");
PQfinish(conn);
exit(0);
}
return 0;
}
EOS
system ENV.cc, "libpq.c", "-L#{lib}", "-I#{include}", "-lpq", "-o", "libpqtest"
assert_equal "Connection to database attempted and failed", shell_output("./libpqtest")
end
end
|
class Libuv < Formula
desc "Multi-platform support library with a focus on asynchronous I/O"
homepage "https://github.com/libuv/libuv"
url "https://github.com/libuv/libuv/archive/v1.35.0.tar.gz"
sha256 "ff84a26c79559e511f087aa67925c3b4e0f0aac60cd8039d4d38b292f208ff58"
head "https://github.com/libuv/libuv.git", :branch => "v1.x"
bottle do
cellar :any
sha256 "d8baf122f9a30c593cb7170941c74a4bbb6954c6c6bab3aea46219b88a42b78d" => :catalina
sha256 "6e59fa700af314f6e6a0d30b39d04fb23df6e6ed831449ed4a8d3079804ee880" => :mojave
sha256 "aeb0d5c115db3b94032a5bc56b2032bdde25891e12586c49a5f2e19f101a0323" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
def install
# This isn't yet handled by the make install process sadly.
cd "docs" do
system "make", "man"
system "make", "singlehtml"
man1.install "build/man/libuv.1"
doc.install Dir["build/singlehtml/*"]
end
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <uv.h>
#include <stdlib.h>
int main()
{
uv_loop_t* loop = malloc(sizeof *loop);
uv_loop_init(loop);
uv_loop_close(loop);
free(loop);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-luv", "-o", "test"
system "./test"
end
end
libuv: update 1.35.0 bottle.
class Libuv < Formula
desc "Multi-platform support library with a focus on asynchronous I/O"
homepage "https://github.com/libuv/libuv"
url "https://github.com/libuv/libuv/archive/v1.35.0.tar.gz"
sha256 "ff84a26c79559e511f087aa67925c3b4e0f0aac60cd8039d4d38b292f208ff58"
head "https://github.com/libuv/libuv.git", :branch => "v1.x"
bottle do
cellar :any
sha256 "f9e218fe31eec56662a5a10b0e736ac4de35d3fb22f0faa4a1c6b6fadf923b09" => :catalina
sha256 "e5d3596b963e5ee7fa09db56190768d6b12c249485c79558e99ec69415e8ac84" => :mojave
sha256 "33e80baa14f51bbf8944935e7291472baa5a430861a39ebb6f134ecceb20aec9" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
def install
# This isn't yet handled by the make install process sadly.
cd "docs" do
system "make", "man"
system "make", "singlehtml"
man1.install "build/man/libuv.1"
doc.install Dir["build/singlehtml/*"]
end
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <uv.h>
#include <stdlib.h>
int main()
{
uv_loop_t* loop = malloc(sizeof *loop);
uv_loop_init(loop);
uv_loop_close(loop);
free(loop);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-luv", "-o", "test"
system "./test"
end
end
|
class Libuv < Formula
desc "Multi-platform support library with a focus on asynchronous I/O"
homepage "https://github.com/libuv/libuv"
url "https://github.com/libuv/libuv/archive/v1.27.0.tar.gz"
sha256 "4afcdc84cd315b77c8e532e7b3fde43d536af0e2e835eafbd0e75518ed26dbed"
head "https://github.com/libuv/libuv.git", :branch => "v1.x"
bottle do
cellar :any
sha256 "03c85cc0bf74bbe423d69f54ceddb076d892da6f6124171d4afe332a2dd6f543" => :mojave
sha256 "73585642837684c0604a51714c9290dcebb7bddd71c9e909a0c6423f16a35b0d" => :high_sierra
sha256 "6d1e7a80887edb23fa0f02e3171fa7c6fbb46e22f07a2f20c59c79711e0042fb" => :sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
def install
# This isn't yet handled by the make install process sadly.
cd "docs" do
system "make", "man"
system "make", "singlehtml"
man1.install "build/man/libuv.1"
doc.install Dir["build/singlehtml/*"]
end
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <uv.h>
#include <stdlib.h>
int main()
{
uv_loop_t* loop = malloc(sizeof *loop);
uv_loop_init(loop);
uv_loop_close(loop);
free(loop);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-luv", "-o", "test"
system "./test"
end
end
libuv: update 1.27.0 bottle.
class Libuv < Formula
desc "Multi-platform support library with a focus on asynchronous I/O"
homepage "https://github.com/libuv/libuv"
url "https://github.com/libuv/libuv/archive/v1.27.0.tar.gz"
sha256 "4afcdc84cd315b77c8e532e7b3fde43d536af0e2e835eafbd0e75518ed26dbed"
head "https://github.com/libuv/libuv.git", :branch => "v1.x"
bottle do
cellar :any
sha256 "62c01d11ecde6423eeb1124423f7040ed76be389d954219bb1baac31ab64243e" => :mojave
sha256 "c74e6fd13bdcd4ee6808028ae523a780abd1b2f4009bd2aa73effc2e65d24877" => :high_sierra
sha256 "eedccfc120ae823659d0362d9e64b707ccecc6ccd5cd21ac559b92e5ca68d143" => :sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "sphinx-doc" => :build
def install
# This isn't yet handled by the make install process sadly.
cd "docs" do
system "make", "man"
system "make", "singlehtml"
man1.install "build/man/libuv.1"
doc.install Dir["build/singlehtml/*"]
end
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <uv.h>
#include <stdlib.h>
int main()
{
uv_loop_t* loop = malloc(sizeof *loop);
uv_loop_init(loop);
uv_loop_close(loop);
free(loop);
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-luv", "-o", "test"
system "./test"
end
end
|
class Libxc < Formula
desc "Library of exchange and correlation functionals for codes"
homepage "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc"
url "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz"
sha256 "6ca1d0bb5fdc341d59960707bc67f23ad54de8a6018e19e02eee2b16ea7cc642"
revision 2
bottle do
cellar :any
sha256 "b3b385ead0069356959d76135b9aa72d6cb492172c06a232720adadf243f9eeb" => :el_capitan
sha256 "909e86fd8eccf2b7c356d3c4d17179cbaa3e9663846a27381d0268c1c9c00975" => :yosemite
sha256 "993e4eb1dbe2c4f7cd1b73b46db0413bca8dc1128df6aab4f2d8cacef09d8e5f" => :mavericks
end
depends_on :fortran
def install
system "./configure", "--prefix=#{prefix}",
"--enable-shared",
"FCCPP=#{ENV.fc} -E -x c",
"CC=#{ENV.cc}",
"CFLAGS=-pipe"
system "make"
system "make", "check"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
#include <xc.h>
int main()
{
int i, vmajor, vminor, func_id = 1;
xc_version(&vmajor, &vminor);
printf(\"%d.%d\", vmajor, vminor);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lxc", "-I#{include}", "-o", "ctest"
system "./ctest"
(testpath/"test.f90").write <<-EOS.undent
program lxctest
use xc_f90_types_m
use xc_f90_lib_m
end program lxctest
EOS
ENV.fortran
system ENV.fc, "test.f90", "-L#{lib}", "-lxc", "-I#{include}", "-o", "ftest"
system "./ftest"
end
end
libxc: update 2.2.2_2 bottle.
class Libxc < Formula
desc "Library of exchange and correlation functionals for codes"
homepage "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc"
url "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz"
sha256 "6ca1d0bb5fdc341d59960707bc67f23ad54de8a6018e19e02eee2b16ea7cc642"
revision 2
bottle do
cellar :any
sha256 "9946ee821225893e5e4b8ef9b453d1df61472934bf415a8ddc47ccf1eb842ec3" => :sierra
sha256 "b3b385ead0069356959d76135b9aa72d6cb492172c06a232720adadf243f9eeb" => :el_capitan
sha256 "909e86fd8eccf2b7c356d3c4d17179cbaa3e9663846a27381d0268c1c9c00975" => :yosemite
sha256 "993e4eb1dbe2c4f7cd1b73b46db0413bca8dc1128df6aab4f2d8cacef09d8e5f" => :mavericks
end
depends_on :fortran
def install
system "./configure", "--prefix=#{prefix}",
"--enable-shared",
"FCCPP=#{ENV.fc} -E -x c",
"CC=#{ENV.cc}",
"CFLAGS=-pipe"
system "make"
system "make", "check"
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <stdio.h>
#include <xc.h>
int main()
{
int i, vmajor, vminor, func_id = 1;
xc_version(&vmajor, &vminor);
printf(\"%d.%d\", vmajor, vminor);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lxc", "-I#{include}", "-o", "ctest"
system "./ctest"
(testpath/"test.f90").write <<-EOS.undent
program lxctest
use xc_f90_types_m
use xc_f90_lib_m
end program lxctest
EOS
ENV.fortran
system ENV.fc, "test.f90", "-L#{lib}", "-lxc", "-I#{include}", "-o", "ftest"
system "./ftest"
end
end
|
class Libxi < Formula
desc "X.Org Libraries: libXi"
homepage "https://www.x.org/" ### http://www.linuxfromscratch.org/blfs/view/svn/x/x7lib.html
url "https://ftp.x.org/pub/individual/lib/libXi-1.7.9.tar.bz2"
sha256 "c2e6b8ff84f9448386c1b5510a5cf5a16d788f76db018194dacdc200180faf45"
# tag "linuxbrew"
bottle do
cellar :any_skip_relocation
sha256 "84d0e465048c45d9236563a7803716fcb0bdafbbcf670992653fdcab6ca68d84" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
option "with-docs", "Build documentation"
option "with-specs", "Build specifications"
depends_on "linuxbrew/xorg/xextproto" => :build
depends_on "linuxbrew/xorg/xproto" => :build
depends_on "pkg-config" => :build
depends_on "linuxbrew/xorg/inputproto"
depends_on "linuxbrew/xorg/libx11"
depends_on "linuxbrew/xorg/libxext"
depends_on "linuxbrew/xorg/libxfixes"
if build.with?("docs") || build.with?("specs")
patch do
url "https://raw.githubusercontent.com/Linuxbrew/homebrew-xorg/master/Patches/patch_configure.diff"
sha256 "e3aff4be9c8a992fbcbd73fa9ea6202691dd0647f73d1974ace537f3795ba15f"
end
depends_on "xmlto" => :build
depends_on "fop" => [:build, :recommended]
depends_on "libxslt" => [:build, :recommended]
depends_on "asciidoc" => [:build, :optional]
depends_on "linuxbrew/xorg/xorg-sgml-doctools" => [:build, :recommended]
end
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-static=#{build.with?("static") ? "yes" : "no"}
--enable-docs=#{build.with?("docs") ? "yes" : "no"}
--enable-specs=#{build.with?("specs") ? "yes" : "no"}
]
system "./configure", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
end
libxi: clean up dependencies
Remove 'xextproto' and 'xproto' dependencies
as they're brought in by other packages.
Signed-off-by: Maxim Belkin <032baa6ccf1d4eaa4536fc4f9b83b9a1f593449a@gmail.com>
class Libxi < Formula
desc "X.Org Libraries: libXi"
homepage "https://www.x.org/" ### http://www.linuxfromscratch.org/blfs/view/svn/x/x7lib.html
url "https://ftp.x.org/pub/individual/lib/libXi-1.7.9.tar.bz2"
sha256 "c2e6b8ff84f9448386c1b5510a5cf5a16d788f76db018194dacdc200180faf45"
# tag "linuxbrew"
bottle do
cellar :any_skip_relocation
sha256 "84d0e465048c45d9236563a7803716fcb0bdafbbcf670992653fdcab6ca68d84" => :x86_64_linux
end
option "without-test", "Skip compile-time tests"
option "with-static", "Build static libraries (not recommended)"
option "with-docs", "Build documentation"
option "with-specs", "Build specifications"
depends_on "pkg-config" => :build
depends_on "linuxbrew/xorg/inputproto"
depends_on "linuxbrew/xorg/libx11"
depends_on "linuxbrew/xorg/libxext"
depends_on "linuxbrew/xorg/libxfixes"
if build.with?("docs") || build.with?("specs")
patch do
url "https://raw.githubusercontent.com/Linuxbrew/homebrew-xorg/master/Patches/patch_configure.diff"
sha256 "e3aff4be9c8a992fbcbd73fa9ea6202691dd0647f73d1974ace537f3795ba15f"
end
depends_on "xmlto" => :build
depends_on "fop" => [:build, :recommended]
depends_on "libxslt" => [:build, :recommended]
depends_on "asciidoc" => [:build, :optional]
depends_on "linuxbrew/xorg/xorg-sgml-doctools" => [:build, :recommended]
end
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-static=#{build.with?("static") ? "yes" : "no"}
--enable-docs=#{build.with?("docs") ? "yes" : "no"}
--enable-specs=#{build.with?("specs") ? "yes" : "no"}
]
system "./configure", *args
system "make"
system "make", "check" if build.with? "test"
system "make", "install"
end
end
|
class Mdcat < Formula
desc "Show markdown documents on text terminals"
homepage "https://github.com/lunaryorn/mdcat"
url "https://github.com/lunaryorn/mdcat/archive/mdcat-0.22.1.tar.gz"
sha256 "b14846542fb8b60ac0235b399136372df7569aa59ed63f3faf88ff7a485abe5f"
license "MPL-2.0"
bottle do
cellar :any_skip_relocation
sha256 "6c609c9f35779be95823935df676648969795f55c4c73d7767fdb210e15e41d7" => :catalina
sha256 "7285471f52c43c0a8f761a04ee2e4b32538d223fc8793345041eb45de060cba3" => :mojave
sha256 "dfbca3e7c6197fa9d668d047f85842c9093a1a00bf7fe0e4db99d8d5d1c65f7e" => :high_sierra
end
depends_on "cmake" => :build
depends_on "rust" => :build
on_linux do
depends_on "pkg-config" => :build
end
def install
system "cargo", "install", *std_cargo_args
end
test do
(testpath/"test.md").write <<~EOS
_lorem_ **ipsum** dolor **sit** _amet_
EOS
output = shell_output("#{bin}/mdcat --no-colour test.md")
assert_match "lorem ipsum dolor sit amet", output
end
end
mdcat: update 0.22.1 bottle.
class Mdcat < Formula
desc "Show markdown documents on text terminals"
homepage "https://github.com/lunaryorn/mdcat"
url "https://github.com/lunaryorn/mdcat/archive/mdcat-0.22.1.tar.gz"
sha256 "b14846542fb8b60ac0235b399136372df7569aa59ed63f3faf88ff7a485abe5f"
license "MPL-2.0"
bottle do
cellar :any_skip_relocation
sha256 "8765ebb14e200949ef0cc2fa572aef8a84eea0c7b5b5b89ed6b8e2ee1896c4a1" => :catalina
sha256 "3d46736eb02798a8d9dc986bcf025d89b3e5c19bc4bf0900eab9ea7c7aafb519" => :mojave
sha256 "74d85385506c912257d520d7425ef9770cb7da76cd0edb1da1f4f22abeebaa2c" => :high_sierra
end
depends_on "cmake" => :build
depends_on "rust" => :build
on_linux do
depends_on "pkg-config" => :build
end
def install
system "cargo", "install", *std_cargo_args
end
test do
(testpath/"test.md").write <<~EOS
_lorem_ **ipsum** dolor **sit** _amet_
EOS
output = shell_output("#{bin}/mdcat --no-colour test.md")
assert_match "lorem ipsum dolor sit amet", output
end
end
|
require "formula"
class Mesos < Formula
homepage "http://mesos.apache.org"
version "0.19.1"
url "http://mirror.cogentco.com/pub/apache/mesos/0.19.1/mesos-0.19.1.tar.gz"
sha1 "3f219313324f86e11df25688ccb86c2814ab29c5"
depends_on :java => "1.7"
depends_on "maven" => :build
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
require "timeout"
master = fork do
exec "#{sbin}/mesos-master", "--ip=127.0.0.1",
"--registry=in_memory"
end
slave = fork do
exec "#{sbin}/mesos-slave", "--master=127.0.0.1:5050",
"--work_dir=#{testpath}"
end
Timeout::timeout(15) do
system "#{bin}/mesos", "execute",
"--master=127.0.0.1:5050",
"--name=execute-touch",
"--command=touch\s#{testpath}/executed"
end
Process.kill("TERM", master)
Process.kill("TERM", slave)
system "[ -e #{testpath}/executed ]"
end
end
mesos: fix audit warning
require "formula"
class Mesos < Formula
homepage "http://mesos.apache.org"
url "http://mirror.cogentco.com/pub/apache/mesos/0.19.1/mesos-0.19.1.tar.gz"
sha1 "3f219313324f86e11df25688ccb86c2814ab29c5"
depends_on :java => "1.7"
depends_on "maven" => :build
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
require "timeout"
master = fork do
exec "#{sbin}/mesos-master", "--ip=127.0.0.1",
"--registry=in_memory"
end
slave = fork do
exec "#{sbin}/mesos-slave", "--master=127.0.0.1:5050",
"--work_dir=#{testpath}"
end
Timeout::timeout(15) do
system "#{bin}/mesos", "execute",
"--master=127.0.0.1:5050",
"--name=execute-touch",
"--command=touch\s#{testpath}/executed"
end
Process.kill("TERM", master)
Process.kill("TERM", slave)
system "[ -e #{testpath}/executed ]"
end
end
|
class Mfcuk < Formula
desc "MiFare Classic Universal toolKit"
homepage "https://github.com/nfc-tools/mfcuk"
url "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/mfcuk/mfcuk-0.3.8.tar.gz"
sha256 "977595765b4b46e4f47817e9500703aaf5c1bcad39cb02661f862f9d83f13a55"
bottle do
cellar :any
sha256 "a4ae4d6f6cdec9dd28c52ff04da99b9de86c79a19c6e182ef3a557f48dde0741" => :sierra
sha256 "8b329dbd3feb25bc4f04f40451cf25e832395721a5184eb4ee287366aaa06334" => :el_capitan
sha256 "bdf696192e1a660b2fa1ad58498bdce941b1d45c4b51847b95427f41debd4c2d" => :yosemite
sha256 "1394e4115a4e65abacc23e81659fd77475d6039ac39979cea7fd335ee5cf09e6" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "libnfc"
depends_on "libusb"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system bin/"mfcuk", "-h"
end
end
mfcuk: update 0.3.8 bottle.
class Mfcuk < Formula
desc "MiFare Classic Universal toolKit"
homepage "https://github.com/nfc-tools/mfcuk"
url "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/mfcuk/mfcuk-0.3.8.tar.gz"
sha256 "977595765b4b46e4f47817e9500703aaf5c1bcad39cb02661f862f9d83f13a55"
bottle do
cellar :any
sha256 "1dddb64e65b9fb1e387f2bd18ed2ae0f600aa868fa92a0141f66fb835a1e8dac" => :high_sierra
sha256 "a4ae4d6f6cdec9dd28c52ff04da99b9de86c79a19c6e182ef3a557f48dde0741" => :sierra
sha256 "8b329dbd3feb25bc4f04f40451cf25e832395721a5184eb4ee287366aaa06334" => :el_capitan
sha256 "bdf696192e1a660b2fa1ad58498bdce941b1d45c4b51847b95427f41debd4c2d" => :yosemite
sha256 "1394e4115a4e65abacc23e81659fd77475d6039ac39979cea7fd335ee5cf09e6" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "libnfc"
depends_on "libusb"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
system bin/"mfcuk", "-h"
end
end
|
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2021-10-13T00-23-17Z",
revision: "129f41cee9e061a2b311f82ddfbf7c0bb4263926"
version "20211013002317"
license "AGPL-3.0-or-later"
head "https://github.com/minio/minio.git"
livecheck do
url :stable
regex(%r{href=.*?/tag/(?:RELEASE[._-]?)?([\d\-TZ]+)["' >]}i)
strategy :github_latest do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub(/\D/, "") }
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "41cef2139dff90e410915a0ebd58a2c7a4c2c25dcf68ba457301b3ad3a4f20b2"
sha256 cellar: :any_skip_relocation, big_sur: "acade1bf63e2a22e4b7b63afceccba935cbc77487d842582d910c06d726313d4"
sha256 cellar: :any_skip_relocation, catalina: "6270a9295b1fbd36ca9437a3cc11f73382cc894df8eb7d28b4f4f3e0d1c0bf79"
sha256 cellar: :any_skip_relocation, mojave: "def8121bf5735e3f083137c21a528cb266c8a6a42fb2c34743b79a018ca77065"
sha256 cellar: :any_skip_relocation, x86_64_linux: "aaae4ddd7aa604397310968f7ce06840d4a80a8c0fd69c4ae093c64eac074752"
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
ldflags = %W[
-s -w
-X github.com/minio/minio/cmd.Version=#{version}
-X github.com/minio/minio/cmd.ReleaseTag=#{release}
-X github.com/minio/minio/cmd.CommitID=#{Utils.git_head}
]
system "go", "build", *std_go_args(ldflags: ldflags.join(" "))
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
service do
run [opt_bin/"minio", "server", "--config-dir=#{etc}/minio", "--address=:9000", var/"minio"]
keep_alive true
working_dir HOMEBREW_PREFIX
log_path var/"log/minio.log"
error_log_path var/"log/minio.log"
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
minio: update 20211013002317 bottle.
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2021-10-13T00-23-17Z",
revision: "129f41cee9e061a2b311f82ddfbf7c0bb4263926"
version "20211013002317"
license "AGPL-3.0-or-later"
head "https://github.com/minio/minio.git"
livecheck do
url :stable
regex(%r{href=.*?/tag/(?:RELEASE[._-]?)?([\d\-TZ]+)["' >]}i)
strategy :github_latest do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub(/\D/, "") }
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "259145cd4bc8bd786dc8039dd5ea8eea2a03c82a08c04a49798aa0e5c50d17a1"
sha256 cellar: :any_skip_relocation, big_sur: "6d3675b1cdaf0f86f1c228cf452f006d890e679a5c4f7efc0c8dbd528f88ea0b"
sha256 cellar: :any_skip_relocation, catalina: "65281097a8dac7a9408d4084b4b76a7dc8a537793ed1cb4e87035c1970e18362"
sha256 cellar: :any_skip_relocation, mojave: "554de231c396a39cf36ffbe7f25af384b93f8d660b80435206b7582b748dc4db"
sha256 cellar: :any_skip_relocation, x86_64_linux: "4e80c7590f82c2fd5eee9c396c65f8256fee1c3b37638e5ada40b112b9afb4d4"
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
ldflags = %W[
-s -w
-X github.com/minio/minio/cmd.Version=#{version}
-X github.com/minio/minio/cmd.ReleaseTag=#{release}
-X github.com/minio/minio/cmd.CommitID=#{Utils.git_head}
]
system "go", "build", *std_go_args(ldflags: ldflags.join(" "))
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
service do
run [opt_bin/"minio", "server", "--config-dir=#{etc}/minio", "--address=:9000", var/"minio"]
keep_alive true
working_dir HOMEBREW_PREFIX
log_path var/"log/minio.log"
error_log_path var/"log/minio.log"
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
|
class Minio < Formula
desc "Amazon S3 compatible object storage server"
homepage "https://github.com/minio/minio"
url "https://github.com/minio/minio.git",
:tag => "RELEASE.2019-06-27T21-13-50Z",
:revision => "36c19f1d653adf3ef70128eb3be1a35b6b032731"
version "20190627211350"
bottle do
cellar :any_skip_relocation
sha256 "bd5b82f9b054855baf74612bb2e1adedc01e65fa66fc75d825348180dff0336b" => :mojave
sha256 "89246290faf95055458e18604361e78b00fd57a9f627f954a4a13c14b1f8bb08" => :high_sierra
sha256 "cde25cfd1615d50a1ea1b791d83a9b61cb2adcab74f467c4ef2418a9a817123e" => :sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "on"
src = buildpath/"src/github.com/minio/minio"
src.install buildpath.children
src.cd do
if build.head?
system "go", "build", "-o", buildpath/"minio"
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)\-(\d+)\-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", "-o", buildpath/"minio", "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
bin.install buildpath/"minio"
prefix.install_metafiles
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options :manual => "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
system "#{bin}/minio", "version"
end
end
minio: update 20190627211350 bottle.
class Minio < Formula
desc "Amazon S3 compatible object storage server"
homepage "https://github.com/minio/minio"
url "https://github.com/minio/minio.git",
:tag => "RELEASE.2019-06-27T21-13-50Z",
:revision => "36c19f1d653adf3ef70128eb3be1a35b6b032731"
version "20190627211350"
bottle do
cellar :any_skip_relocation
sha256 "57a95c2223d20b597a2549175583983489056d97ff0569162cdb07df45d8db8a" => :mojave
sha256 "8c21be02f2db235f84366105ee0edbd829be5782536fdb01b74fa315b572c7f2" => :high_sierra
sha256 "160452bbf0fec46bed1340ad8e20bfbfa9e0adc217679f5f012200b2384b140e" => :sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "on"
src = buildpath/"src/github.com/minio/minio"
src.install buildpath.children
src.cd do
if build.head?
system "go", "build", "-o", buildpath/"minio"
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)\-(\d+)\-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", "-o", buildpath/"minio", "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
bin.install buildpath/"minio"
prefix.install_metafiles
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options :manual => "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
system "#{bin}/minio", "version"
end
end
|
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2020-08-26T00-00-49Z",
revision: "9acdeab73d7edd154c91481504036fb5209d583f"
version "20200826000049"
license "Apache-2.0"
head "https://github.com/minio/minio.git"
bottle do
cellar :any_skip_relocation
sha256 "6e338c2139a2b0a4c4e25db6ef4c5060800793e7b224e9d804ec78ad17bfd9a6" => :catalina
sha256 "6af9cde089e1865b2421a8cc744dd2fca589692c5dd6581bfc28410cc68303ad" => :mojave
sha256 "a62c3c38f871f8253b0c90a5a8bc232cbd92764f9fe22564caebbb9ed67b98e3" => :high_sierra
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", *std_go_args, "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options manual: "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
minio: update 20200826000049 bottle.
class Minio < Formula
desc "High Performance, Kubernetes Native Object Storage"
homepage "https://min.io"
url "https://github.com/minio/minio.git",
tag: "RELEASE.2020-08-26T00-00-49Z",
revision: "9acdeab73d7edd154c91481504036fb5209d583f"
version "20200826000049"
license "Apache-2.0"
head "https://github.com/minio/minio.git"
bottle do
cellar :any_skip_relocation
sha256 "58636cfc063ef35189cb3073c825594145acbd898644dde2deeffbbbc979b507" => :catalina
sha256 "d493c582b6995a73efc96af382766f0e90f7176c5c94b423c65e8702b3e6c429" => :mojave
sha256 "cdf8416dd866e5dc132dc8610c10498b146a83f536e96db690b0778f08909f1b" => :high_sierra
end
depends_on "go" => :build
def install
if build.head?
system "go", "build", *std_go_args
else
release = `git tag --points-at HEAD`.chomp
version = release.gsub(/RELEASE\./, "").chomp.gsub(/T(\d+)-(\d+)-(\d+)Z/, 'T\1:\2:\3Z')
commit = `git rev-parse HEAD`.chomp
proj = "github.com/minio/minio"
system "go", "build", *std_go_args, "-ldflags", <<~EOS
-X #{proj}/cmd.Version=#{version}
-X #{proj}/cmd.ReleaseTag=#{release}
-X #{proj}/cmd.CommitID=#{commit}
EOS
end
end
def post_install
(var/"minio").mkpath
(etc/"minio").mkpath
end
plist_options manual: "minio server"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/minio</string>
<string>server</string>
<string>--config-dir=#{etc}/minio</string>
<string>--address=:9000</string>
<string>#{var}/minio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>WorkingDirectory</key>
<string>#{HOMEBREW_PREFIX}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/minio.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/minio.log</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match "minio server - start object storage server",
shell_output("#{bin}/minio server --help 2>&1")
assert_match "minio gateway - start object storage gateway",
shell_output("#{bin}/minio gateway 2>&1")
assert_match "ERROR Unable to validate credentials",
shell_output("#{bin}/minio gateway s3 2>&1", 1)
end
end
|
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
tag: "v6.1.0",
revision: "2097bbf9996308534db3a77e79d7600252066476"
license "Apache-2.0"
head "https://github.com/mattermost/mmctl.git", branch: "master"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "e6bf894872872c70af9f8a65ee1d9d2ab1c45ef2ab26b22057a40cda5bb2b3df"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "63b3c764ba7c57d75b5ef142b75f266aeeb7a4b1f372b16f06579e0fd13cca9f"
sha256 cellar: :any_skip_relocation, monterey: "3dced8034513a782ec50a239c4e02f115cebdf2577a10d8549b256236abaf66f"
sha256 cellar: :any_skip_relocation, big_sur: "e1b297a593c96dd42f75e03396fd24293e6ba77067851560f3fa21ab700f1014"
sha256 cellar: :any_skip_relocation, catalina: "6b8e26f417984114729de87ad39fd8c2e156d8e28ed36739d1497e91985fc565"
sha256 cellar: :any_skip_relocation, x86_64_linux: "6ec9ca96cd33f6e58310458b02dfeb038ee9facffbacb2db75a03ba419ddb628"
end
depends_on "go" => :build
def install
ldflags = "-s -w -X github.com/mattermost/mmctl/commands.BuildHash=#{Utils.git_head}"
system "go", "build", *std_go_args(ldflags: ldflags), "-mod=vendor"
# Install shell completions
output = Utils.safe_popen_read(bin/"mmctl", "completion", "bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read(bin/"mmctl", "completion", "zsh")
(zsh_completion/"_mmctl").write output
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
refute_match(/.*No such file or directory.*/, output)
refute_match(/.*command not found.*/, output)
assert_match(/.*mmctl \[command\].*/, output)
end
end
mmctl 6.2.0
Closes #89996.
Signed-off-by: Carlo Cabrera <3ffc397d0e4bded29cb84b56167de54c01e3a55b@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
tag: "v6.2.0",
revision: "025ed3414054b39ccca681dd498e3021741ae1e6"
license "Apache-2.0"
head "https://github.com/mattermost/mmctl.git", branch: "master"
livecheck do
url :stable
strategy :github_latest
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "e6bf894872872c70af9f8a65ee1d9d2ab1c45ef2ab26b22057a40cda5bb2b3df"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "63b3c764ba7c57d75b5ef142b75f266aeeb7a4b1f372b16f06579e0fd13cca9f"
sha256 cellar: :any_skip_relocation, monterey: "3dced8034513a782ec50a239c4e02f115cebdf2577a10d8549b256236abaf66f"
sha256 cellar: :any_skip_relocation, big_sur: "e1b297a593c96dd42f75e03396fd24293e6ba77067851560f3fa21ab700f1014"
sha256 cellar: :any_skip_relocation, catalina: "6b8e26f417984114729de87ad39fd8c2e156d8e28ed36739d1497e91985fc565"
sha256 cellar: :any_skip_relocation, x86_64_linux: "6ec9ca96cd33f6e58310458b02dfeb038ee9facffbacb2db75a03ba419ddb628"
end
depends_on "go" => :build
def install
ldflags = "-s -w -X github.com/mattermost/mmctl/commands.BuildHash=#{Utils.git_head}"
system "go", "build", *std_go_args(ldflags: ldflags), "-mod=vendor"
# Install shell completions
output = Utils.safe_popen_read(bin/"mmctl", "completion", "bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read(bin/"mmctl", "completion", "zsh")
(zsh_completion/"_mmctl").write output
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
refute_match(/.*No such file or directory.*/, output)
refute_match(/.*command not found.*/, output)
assert_match(/.*mmctl \[command\].*/, output)
end
end
|
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
tag: "v5.29.0",
revision: "fc2ffd482f5062fef373444f3878252a33367683"
license "Apache-2.0"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "e8286b3dca1fc493717c788e4b4c826966ad54bb09b46d2138cc0f1086a0797c" => :catalina
sha256 "865ba6d1a37ce690118b408c001310b31ee20f7d86e0c0cf5c2862f819512714" => :mojave
sha256 "bb55ff1ce984bab1079145f84fb1cd5c37ffe29538451f582ee9eaba9571569d" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.safe_popen_read("git", "rev-parse", "HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.safe_popen_read("#{bin}/mmctl", "completion", "bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read("#{bin}/mmctl", "completion", "zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
mmctl: update 5.29.0 bottle.
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
tag: "v5.29.0",
revision: "fc2ffd482f5062fef373444f3878252a33367683"
license "Apache-2.0"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "e8286b3dca1fc493717c788e4b4c826966ad54bb09b46d2138cc0f1086a0797c" => :catalina
sha256 "865ba6d1a37ce690118b408c001310b31ee20f7d86e0c0cf5c2862f819512714" => :mojave
sha256 "bb55ff1ce984bab1079145f84fb1cd5c37ffe29538451f582ee9eaba9571569d" => :high_sierra
sha256 "0c509cde727b111e9614d2c48a97e2e2585042ac87b9a8636a245a159a1279b3" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.safe_popen_read("git", "rev-parse", "HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.safe_popen_read("#{bin}/mmctl", "completion", "bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read("#{bin}/mmctl", "completion", "zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
|
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
:tag => "v5.24",
:revision => "2aba70fdf7ba551b0c21019abd7da33f844ea61e"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "fcaa45d467dfe23b1416194abf9f925a2d0ba8f964ede6a9a5f3158e321261e8" => :catalina
sha256 "b26a63c5ff5f6b4c77cbfd9e8ea19b7891db87a3181771191062bb53a66e8286" => :mojave
sha256 "e7336db2804579040d3fb43e2afeecb1bcd3e4f500b7d3fe47a98612d96ffbde" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.safe_popen_read("git rev-parse HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.safe_popen_read("#{bin}/mmctl completion bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read("#{bin}/mmctl completion zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
mmctl 5.25.0
Closes #57376.
Signed-off-by: chenrui <5fd29470147430022ff146db88de16ee91dea376@gmail.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Mmctl < Formula
desc "Remote CLI tool for Mattermost server"
homepage "https://github.com/mattermost/mmctl"
url "https://github.com/mattermost/mmctl.git",
:tag => "v5.25.0",
:revision => "c81bdf8d687f76c5da0205f8f612dc7f48969ca0"
head "https://github.com/mattermost/mmctl.git"
bottle do
cellar :any_skip_relocation
sha256 "fcaa45d467dfe23b1416194abf9f925a2d0ba8f964ede6a9a5f3158e321261e8" => :catalina
sha256 "b26a63c5ff5f6b4c77cbfd9e8ea19b7891db87a3181771191062bb53a66e8286" => :mojave
sha256 "e7336db2804579040d3fb43e2afeecb1bcd3e4f500b7d3fe47a98612d96ffbde" => :high_sierra
end
depends_on "go" => :build
def install
ENV["GOBIN"] = buildpath/bin
ENV["ADVANCED_VET"] = "FALSE"
ENV["BUILD_HASH"] = Utils.safe_popen_read("git rev-parse HEAD").chomp
ENV["BUILD_VERSION"] = version.to_s
(buildpath/"src/github.com/mattermost/mmctl").install buildpath.children
cd "src/github.com/mattermost/mmctl" do
system "make", "install"
# Install the zsh and bash completions
output = Utils.safe_popen_read("#{bin}/mmctl completion bash")
(bash_completion/"mmctl").write output
output = Utils.safe_popen_read("#{bin}/mmctl completion zsh")
(zsh_completion/"_mmctl").write output
end
end
test do
output = pipe_output("#{bin}/mmctl help 2>&1")
assert_no_match /.*No such file or directory.*/, output
assert_no_match /.*command not found.*/, output
assert_match /.*mmctl \[command\].*/, output
end
end
|
class Morse < Formula
desc "Morse-code training program and QSO generator"
homepage "http://www.catb.org/~esr/morse/"
url "http://www.catb.org/~esr/morse/morse-2.5.tar.gz"
sha256 "476d1e8e95bb173b1aadc755db18f7e7a73eda35426944e1abd57c20307d4987"
bottle do
cellar :any_skip_relocation
sha256 "5fe911c0c4d71783759f9d8c4e6269c873a830d0511e0813edf7ec86f3c7f62f" => :mojave
sha256 "fb58a8af73002f98fe7ff1274c1712eb4bf0cab8b08640d2836fc6951c5cb2e9" => :high_sierra
sha256 "d779902b961e9ebbfa41b0906d8d41357232fd4da83a393e112cde87f5bcdcaa" => :sierra
sha256 "491a1ea5455d058af9adb607e0e49d95b94e52f0068cd5fb197c1ea71666b524" => :el_capitan
sha256 "c89c45cdc2ff59d6ac327188c484659c769fe94a07e5e1f38f4d568f0b1a943d" => :yosemite
end
depends_on :x11
def install
system "make", "all", "DEVICE=X11"
bin.install "morse"
man1.install "morse.1"
end
end
morse: update 2.5 bottle.
class Morse < Formula
desc "Morse-code training program and QSO generator"
homepage "http://www.catb.org/~esr/morse/"
url "http://www.catb.org/~esr/morse/morse-2.5.tar.gz"
sha256 "476d1e8e95bb173b1aadc755db18f7e7a73eda35426944e1abd57c20307d4987"
bottle do
cellar :any_skip_relocation
sha256 "b97853e5c7a071e81c9fc7845f9467ebe00fab07fd0c738532230d3463d1826b" => :catalina
sha256 "5fe911c0c4d71783759f9d8c4e6269c873a830d0511e0813edf7ec86f3c7f62f" => :mojave
sha256 "fb58a8af73002f98fe7ff1274c1712eb4bf0cab8b08640d2836fc6951c5cb2e9" => :high_sierra
sha256 "d779902b961e9ebbfa41b0906d8d41357232fd4da83a393e112cde87f5bcdcaa" => :sierra
sha256 "491a1ea5455d058af9adb607e0e49d95b94e52f0068cd5fb197c1ea71666b524" => :el_capitan
sha256 "c89c45cdc2ff59d6ac327188c484659c769fe94a07e5e1f38f4d568f0b1a943d" => :yosemite
end
depends_on :x11
def install
system "make", "all", "DEVICE=X11"
bin.install "morse"
man1.install "morse.1"
end
end
|
class Mpich < Formula
desc "Implementation of the MPI Message Passing Interface standard"
homepage "https://www.mpich.org/"
url "https://www.mpich.org/static/downloads/3.4.2/mpich-3.4.2.tar.gz"
mirror "https://fossies.org/linux/misc/mpich-3.4.2.tar.gz"
sha256 "5c19bea8b84e8d74cca5f047e82b147ff3fba096144270e3911ad623d6c587bf"
license "mpich2"
livecheck do
url "https://www.mpich.org/static/downloads/"
regex(%r{href=.*?v?(\d+(?:\.\d+)+)/?["' >]}i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "693fc352b5ab2f761fcc047c07e0f37b13d5a047783eb7aee2303dd27ba332c6"
sha256 cellar: :any, arm64_big_sur: "8c79cf6b224e90fbe5fbe111cdafb03d1b54f8f4a88c32a157af6d3ea29de7c4"
sha256 cellar: :any, monterey: "25c3d92e834e00f8d079605e366e4fa78328c542f01c052a77e381ce8c300526"
sha256 cellar: :any, big_sur: "23d95423962812214194ca039fdd2d7dcca76626d291e02566989460b748a8df"
sha256 cellar: :any, catalina: "3d5324ae766872fb9c2d6229ea97f49930c6a8239e38cc89bf5fb6a8f71d88d4"
sha256 cellar: :any, mojave: "80d73225cb6e7242190049addb88898f920991a637f6dd7af07f13ffa28095fd"
sha256 cellar: :any_skip_relocation, x86_64_linux: "36cc3d6dc61cf19eeea37e07ce456200353b034c89952abc89e8999e2c198274"
end
head do
url "https://github.com/pmodels/mpich.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "gcc" # for gfortran
depends_on "hwloc"
on_macos do
conflicts_with "libfabric", because: "both install `fabric.h`"
end
on_linux do
# Can't be enabled on mac:
# https://lists.mpich.org/pipermail/discuss/2021-May/006192.html
depends_on "libfabric"
end
conflicts_with "open-mpi", because: "both install MPI compiler wrappers"
if Hardware::CPU.arm?
# gfortran from 10.2.0 on arm64 does not seem to know about real128 and complex128
# the recommended solution by upstream is to comment out the declaration of
# real128 and complex128 in the source code as they do not have the resources
# to update the f08 binding generation script at the moment
# https://lists.mpich.org/pipermail/discuss/2021-March/006167.html
patch :DATA
end
def install
if build.head?
# ensure that the consistent set of autotools built by homebrew is used to
# build MPICH, otherwise very bizarre build errors can occur
ENV["MPICH_AUTOTOOLS_DIR"] = HOMEBREW_PREFIX + "bin"
system "./autogen.sh"
end
args = %W[
--disable-dependency-tracking
--enable-fast=all,O3
--enable-g=dbg
--enable-romio
--enable-shared
--with-pm=hydra
FC=gfortran-#{Formula["gcc"].any_installed_version.major}
F77=gfortran-#{Formula["gcc"].any_installed_version.major}
--disable-silent-rules
--prefix=#{prefix}
--mandir=#{man}
]
# Flag for compatibility with GCC 10
# https://lists.mpich.org/pipermail/discuss/2020-January/005863.html
args << "FFLAGS=-fallow-argument-mismatch"
args << "CXXFLAGS=-Wno-deprecated"
args << "CFLAGS=-fgnu89-inline -Wno-deprecated"
if OS.linux?
# Use libfabric https://lists.mpich.org/pipermail/discuss/2021-January/006092.html
args << "--with-device=ch4:ofi"
args << "--with-libfabric=#{Formula["libfabric"].opt_prefix}"
end
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"hello.c").write <<~EOS
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
(testpath/"hellof.f90").write <<~EOS
program hello
include 'mpif.h'
integer rank, size, ierror, tag, status(MPI_STATUS_SIZE)
call MPI_INIT(ierror)
call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror)
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror)
print*, 'node', rank, ': Hello Fortran world'
call MPI_FINALIZE(ierror)
end
EOS
system "#{bin}/mpif90", "hellof.f90", "-o", "hellof"
system "./hellof"
system "#{bin}/mpirun", "-np", "4", "./hellof"
end
end
__END__
--- a/src/binding/fortran/use_mpi_f08/mpi_f08_types.f90
+++ b/src/binding/fortran/use_mpi_f08/mpi_f08_types.f90
@@ -248,10 +248,8 @@
module procedure MPI_Sizeof_xint64
module procedure MPI_Sizeof_xreal32
module procedure MPI_Sizeof_xreal64
- module procedure MPI_Sizeof_xreal128
module procedure MPI_Sizeof_xcomplex32
module procedure MPI_Sizeof_xcomplex64
- module procedure MPI_Sizeof_xcomplex128
end interface
private :: MPI_Sizeof_character
@@ -263,10 +261,8 @@
private :: MPI_Sizeof_xint64
private :: MPI_Sizeof_xreal32
private :: MPI_Sizeof_xreal64
-private :: MPI_Sizeof_xreal128
private :: MPI_Sizeof_xcomplex32
private :: MPI_Sizeof_xcomplex64
-private :: MPI_Sizeof_xcomplex128
contains
@@ -350,16 +346,6 @@
ierror = 0
end subroutine MPI_Sizeof_xreal64
-subroutine MPI_Sizeof_xreal128 (x, size, ierror)
- use,intrinsic :: iso_fortran_env, only: real128
- real(real128),dimension(..) :: x
- integer, intent(out) :: size
- integer, optional, intent(out) :: ierror
-
- size = storage_size(x)/8
- ierror = 0
-end subroutine MPI_Sizeof_xreal128
-
subroutine MPI_Sizeof_xcomplex32 (x, size, ierror)
use,intrinsic :: iso_fortran_env, only: real32
complex(real32),dimension(..) :: x
@@ -380,16 +366,6 @@
ierror = 0
end subroutine MPI_Sizeof_xcomplex64
-subroutine MPI_Sizeof_xcomplex128 (x, size, ierror)
- use,intrinsic :: iso_fortran_env, only: real128
- complex(real128),dimension(..) :: x
- integer, intent(out) :: size
- integer, optional, intent(out) :: ierror
-
- size = storage_size(x)/8
- ierror = 0
-end subroutine MPI_Sizeof_xcomplex128
-
subroutine MPI_Status_f2f08(f_status, f08_status, ierror)
integer, intent(in) :: f_status(MPI_STATUS_SIZE)
type(MPI_Status), intent(out) :: f08_status
mpich 3.4.3
Closes #91625.
Signed-off-by: Michka Popoff <5d406f95fb0e0230f83654e4a22d0115cc205d59@users.noreply.github.com>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Mpich < Formula
desc "Implementation of the MPI Message Passing Interface standard"
homepage "https://www.mpich.org/"
url "https://www.mpich.org/static/downloads/3.4.3/mpich-3.4.3.tar.gz"
mirror "https://fossies.org/linux/misc/mpich-3.4.3.tar.gz"
sha256 "8154d89f3051903181018166678018155f4c2b6f04a9bb6fe9515656452c4fd7"
license "mpich2"
livecheck do
url "https://www.mpich.org/static/downloads/"
regex(%r{href=.*?v?(\d+(?:\.\d+)+)/?["' >]}i)
end
bottle do
sha256 cellar: :any, arm64_monterey: "693fc352b5ab2f761fcc047c07e0f37b13d5a047783eb7aee2303dd27ba332c6"
sha256 cellar: :any, arm64_big_sur: "8c79cf6b224e90fbe5fbe111cdafb03d1b54f8f4a88c32a157af6d3ea29de7c4"
sha256 cellar: :any, monterey: "25c3d92e834e00f8d079605e366e4fa78328c542f01c052a77e381ce8c300526"
sha256 cellar: :any, big_sur: "23d95423962812214194ca039fdd2d7dcca76626d291e02566989460b748a8df"
sha256 cellar: :any, catalina: "3d5324ae766872fb9c2d6229ea97f49930c6a8239e38cc89bf5fb6a8f71d88d4"
sha256 cellar: :any, mojave: "80d73225cb6e7242190049addb88898f920991a637f6dd7af07f13ffa28095fd"
sha256 cellar: :any_skip_relocation, x86_64_linux: "36cc3d6dc61cf19eeea37e07ce456200353b034c89952abc89e8999e2c198274"
end
head do
url "https://github.com/pmodels/mpich.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "gcc" # for gfortran
depends_on "hwloc"
on_macos do
conflicts_with "libfabric", because: "both install `fabric.h`"
end
on_linux do
# Can't be enabled on mac:
# https://lists.mpich.org/pipermail/discuss/2021-May/006192.html
depends_on "libfabric"
end
conflicts_with "open-mpi", because: "both install MPI compiler wrappers"
if Hardware::CPU.arm?
# gfortran from 10.2.0 on arm64 does not seem to know about real128 and complex128
# the recommended solution by upstream is to comment out the declaration of
# real128 and complex128 in the source code as they do not have the resources
# to update the f08 binding generation script at the moment
# https://lists.mpich.org/pipermail/discuss/2021-March/006167.html
patch :DATA
end
def install
if build.head?
# ensure that the consistent set of autotools built by homebrew is used to
# build MPICH, otherwise very bizarre build errors can occur
ENV["MPICH_AUTOTOOLS_DIR"] = HOMEBREW_PREFIX + "bin"
system "./autogen.sh"
end
args = %W[
--disable-dependency-tracking
--enable-fast=all,O3
--enable-g=dbg
--enable-romio
--enable-shared
--with-pm=hydra
FC=gfortran-#{Formula["gcc"].any_installed_version.major}
F77=gfortran-#{Formula["gcc"].any_installed_version.major}
--disable-silent-rules
--prefix=#{prefix}
--mandir=#{man}
]
# Flag for compatibility with GCC 10
# https://lists.mpich.org/pipermail/discuss/2020-January/005863.html
args << "FFLAGS=-fallow-argument-mismatch"
args << "CXXFLAGS=-Wno-deprecated"
args << "CFLAGS=-fgnu89-inline -Wno-deprecated"
if OS.linux?
# Use libfabric https://lists.mpich.org/pipermail/discuss/2021-January/006092.html
args << "--with-device=ch4:ofi"
args << "--with-libfabric=#{Formula["libfabric"].opt_prefix}"
end
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"hello.c").write <<~EOS
#include <mpi.h>
#include <stdio.h>
int main()
{
int size, rank, nameLen;
char name[MPI_MAX_PROCESSOR_NAME];
MPI_Init(NULL, NULL);
MPI_Comm_size(MPI_COMM_WORLD, &size);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
MPI_Get_processor_name(name, &nameLen);
printf("[%d/%d] Hello, world! My name is %s.\\n", rank, size, name);
MPI_Finalize();
return 0;
}
EOS
system "#{bin}/mpicc", "hello.c", "-o", "hello"
system "./hello"
system "#{bin}/mpirun", "-np", "4", "./hello"
(testpath/"hellof.f90").write <<~EOS
program hello
include 'mpif.h'
integer rank, size, ierror, tag, status(MPI_STATUS_SIZE)
call MPI_INIT(ierror)
call MPI_COMM_SIZE(MPI_COMM_WORLD, size, ierror)
call MPI_COMM_RANK(MPI_COMM_WORLD, rank, ierror)
print*, 'node', rank, ': Hello Fortran world'
call MPI_FINALIZE(ierror)
end
EOS
system "#{bin}/mpif90", "hellof.f90", "-o", "hellof"
system "./hellof"
system "#{bin}/mpirun", "-np", "4", "./hellof"
end
end
__END__
--- a/src/binding/fortran/use_mpi_f08/mpi_f08_types.f90
+++ b/src/binding/fortran/use_mpi_f08/mpi_f08_types.f90
@@ -248,10 +248,8 @@
module procedure MPI_Sizeof_xint64
module procedure MPI_Sizeof_xreal32
module procedure MPI_Sizeof_xreal64
- module procedure MPI_Sizeof_xreal128
module procedure MPI_Sizeof_xcomplex32
module procedure MPI_Sizeof_xcomplex64
- module procedure MPI_Sizeof_xcomplex128
end interface
private :: MPI_Sizeof_character
@@ -263,10 +261,8 @@
private :: MPI_Sizeof_xint64
private :: MPI_Sizeof_xreal32
private :: MPI_Sizeof_xreal64
-private :: MPI_Sizeof_xreal128
private :: MPI_Sizeof_xcomplex32
private :: MPI_Sizeof_xcomplex64
-private :: MPI_Sizeof_xcomplex128
contains
@@ -350,16 +346,6 @@
ierror = 0
end subroutine MPI_Sizeof_xreal64
-subroutine MPI_Sizeof_xreal128 (x, size, ierror)
- use,intrinsic :: iso_fortran_env, only: real128
- real(real128),dimension(..) :: x
- integer, intent(out) :: size
- integer, optional, intent(out) :: ierror
-
- size = storage_size(x)/8
- ierror = 0
-end subroutine MPI_Sizeof_xreal128
-
subroutine MPI_Sizeof_xcomplex32 (x, size, ierror)
use,intrinsic :: iso_fortran_env, only: real32
complex(real32),dimension(..) :: x
@@ -380,16 +366,6 @@
ierror = 0
end subroutine MPI_Sizeof_xcomplex64
-subroutine MPI_Sizeof_xcomplex128 (x, size, ierror)
- use,intrinsic :: iso_fortran_env, only: real128
- complex(real128),dimension(..) :: x
- integer, intent(out) :: size
- integer, optional, intent(out) :: ierror
-
- size = storage_size(x)/8
- ierror = 0
-end subroutine MPI_Sizeof_xcomplex128
-
subroutine MPI_Status_f2f08(f_status, f08_status, ierror)
integer, intent(in) :: f_status(MPI_STATUS_SIZE)
type(MPI_Status), intent(out) :: f08_status
|
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "https://marlam.de/msmtp/"
url "https://marlam.de/msmtp/releases/msmtp-1.8.15.tar.xz"
sha256 "2265dc639ebf2edf3069fffe0a3bd76749f8b58f4001d5cdeae19873949099ce"
license "GPL-3.0-or-later"
livecheck do
url "https://marlam.de/msmtp/download/"
regex(/href=.*?msmtp[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "9e321f6cbb4178c04ead2f89ee357d7a216d886e27e8902780180f7ee583b5e9"
sha256 big_sur: "f2f12ecc517a43485ad6b4de45bba8a3a0434f6e568ff40f0dcd9b0ca0aab7b3"
sha256 catalina: "905c4115c7457ef7a063a94b0eb7f31e5c9713858b75edf711410b39c4c0991e"
sha256 mojave: "beffeb0167849f87a790624c01ab67ad2e007c2c0b0b2e3bd9a7f7522ca1ea29"
sha256 x86_64_linux: "f0419a89b057dea2b2630330ca311e23b729c278b7253807c0841f589f32596d"
end
depends_on "pkg-config" => :build
depends_on "gnutls"
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--with-macosx-keyring
--prefix=#{prefix}
]
system "./configure", *args
system "make", "install"
(pkgshare/"scripts").install "scripts/msmtpq"
end
test do
system bin/"msmtp", "--help"
end
end
msmtp 1.8.16
Closes #85239.
Signed-off-by: rui <907c7afd57be493757f13ccd1dd45dddf02db069@chenrui.dev>
Signed-off-by: BrewTestBot <8a898ee6867e4f2028e63d2a6319b2224641c06c@users.noreply.github.com>
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "https://marlam.de/msmtp/"
url "https://marlam.de/msmtp/releases/msmtp-1.8.16.tar.xz"
sha256 "c04b5d89f3df0dee9772f50197c2602c97c5cdb439b6af539c37bf81b20f47d8"
license "GPL-3.0-or-later"
livecheck do
url "https://marlam.de/msmtp/download/"
regex(/href=.*?msmtp[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "9e321f6cbb4178c04ead2f89ee357d7a216d886e27e8902780180f7ee583b5e9"
sha256 big_sur: "f2f12ecc517a43485ad6b4de45bba8a3a0434f6e568ff40f0dcd9b0ca0aab7b3"
sha256 catalina: "905c4115c7457ef7a063a94b0eb7f31e5c9713858b75edf711410b39c4c0991e"
sha256 mojave: "beffeb0167849f87a790624c01ab67ad2e007c2c0b0b2e3bd9a7f7522ca1ea29"
sha256 x86_64_linux: "f0419a89b057dea2b2630330ca311e23b729c278b7253807c0841f589f32596d"
end
depends_on "pkg-config" => :build
depends_on "gnutls"
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--disable-silent-rules
--with-macosx-keyring
--prefix=#{prefix}
]
system "./configure", *args
system "make", "install"
(pkgshare/"scripts").install "scripts/msmtpq"
end
test do
system bin/"msmtp", "--help"
end
end
|
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "http://msmtp.sourceforge.net"
url "https://downloads.sourceforge.net/project/msmtp/msmtp/1.6.5/msmtp-1.6.5.tar.xz"
sha256 "76a0d60693c7e65d0c7a12f01d300882d280b1e1be0202f54730ae44d44a5006"
bottle do
sha256 "6d1eef02a990fc1355f9d47da7237870d43ce0b5d24cb30a45c15952fdd815c4" => :el_capitan
sha256 "d006ac74d71d76fb5c1881513c8204408c88863f38f37c5d2c1face8c7aeadfd" => :yosemite
sha256 "a87c7d5ee59c48fdb1151cca93acea417db67f17cde2994ad97c2b0ee43722e3" => :mavericks
end
option "with-gsasl", "Use GNU SASL authentication library"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gsasl" => :optional
def install
args = %W[
--disable-dependency-tracking
--with-macosx-keyring
--prefix=#{prefix}
--with-tls=openssl
]
args << "--with-libsasl" if build.with? "gsasl"
system "./configure", *args
system "make", "install"
(share/"msmtp/scripts").install "scripts/msmtpq"
end
end
msmtp: update 1.6.5 bottle.
class Msmtp < Formula
desc "SMTP client that can be used as an SMTP plugin for Mutt"
homepage "http://msmtp.sourceforge.net"
url "https://downloads.sourceforge.net/project/msmtp/msmtp/1.6.5/msmtp-1.6.5.tar.xz"
sha256 "76a0d60693c7e65d0c7a12f01d300882d280b1e1be0202f54730ae44d44a5006"
bottle do
sha256 "587b8765d86f2b497bbb081ac4532d274e89d9e4ebd0a3917a798c67e921d712" => :el_capitan
sha256 "5605ee05b8922d12d228c271cb3e8df87c2294cccc083501f19ea5316e19e539" => :yosemite
sha256 "fc48e443dfe5c98f3ca5d40e66e05b6def31b06f17927d5743ea9906fc228775" => :mavericks
end
option "with-gsasl", "Use GNU SASL authentication library"
depends_on "pkg-config" => :build
depends_on "openssl"
depends_on "gsasl" => :optional
def install
args = %W[
--disable-dependency-tracking
--with-macosx-keyring
--prefix=#{prefix}
--with-tls=openssl
]
args << "--with-libsasl" if build.with? "gsasl"
system "./configure", *args
system "make", "install"
(share/"msmtp/scripts").install "scripts/msmtpq"
end
end
|
class Mupdf < Formula
desc "Lightweight PDF and XPS viewer"
homepage "https://mupdf.com/"
revision 1
head "https://git.ghostscript.com/mupdf.git"
stable do
url "https://mupdf.com/downloads/mupdf-1.11-source.tar.gz"
sha256 "209474a80c56a035ce3f4958a63373a96fad75c927c7b1acdc553fc85855f00a"
# Upstream already. Remove on next stable release.
patch do
url "https://mirrors.ocf.berkeley.edu/debian/pool/main/m/mupdf/mupdf_1.11+ds1-1.1.debian.tar.xz"
mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/m/mupdf/mupdf_1.11+ds1-1.1.debian.tar.xz"
sha256 "cb274532e34f818b2f1871fee6303cfffda37251937dd7d731a898b2ca736433"
apply "patches/0003-Fix-698539-Don-t-use-xps-font-if-it-could-not-be-loa.patch",
"patches/0004-Fix-698540-Check-name-comment-and-meta-size-field-si.patch",
"patches/0005-Fix-698558-Handle-non-tags-in-tag-name-comparisons.patch"
end
end
bottle do
cellar :any_skip_relocation
sha256 "f1ed1cfc2a01b0137f1d91c116acadba78051908572e476590e8ba19df045f18" => :high_sierra
sha256 "3ac9b816de24b8eb26593811248333b22cae4804cfdaad4b6f1e71808e0aadb8" => :sierra
sha256 "6641ed8ff85c427a03008439645e36675d3dad30c187ada3278d2c384b9daac0" => :el_capitan
end
depends_on :x11
depends_on "openssl"
conflicts_with "mupdf-tools",
:because => "mupdf and mupdf-tools install the same binaries."
def install
# Work around bug: https://bugs.ghostscript.com/show_bug.cgi?id=697842
inreplace "Makerules", "RANLIB_CMD := xcrun", "RANLIB_CMD = xcrun"
system "make", "install",
"build=release",
"verbose=yes",
"CC=#{ENV.cc}",
"prefix=#{prefix}",
"HAVE_GLFW=no" # Do not build OpenGL viewer: https://bugs.ghostscript.com/show_bug.cgi?id=697842
bin.install_symlink "mutool" => "mudraw"
end
test do
assert_match "Homebrew test", shell_output("#{bin}/mudraw -F txt #{test_fixtures("test.pdf")}")
end
end
mupdf: patch CVE-2017-15587
class Mupdf < Formula
desc "Lightweight PDF and XPS viewer"
homepage "https://mupdf.com/"
revision 2
head "https://git.ghostscript.com/mupdf.git"
stable do
url "https://mupdf.com/downloads/mupdf-1.11-source.tar.gz"
sha256 "209474a80c56a035ce3f4958a63373a96fad75c927c7b1acdc553fc85855f00a"
# Upstream already. Remove on next stable release.
patch do
url "https://mirrors.ocf.berkeley.edu/debian/pool/main/m/mupdf/mupdf_1.11+ds1-2.debian.tar.xz"
mirror "https://mirrorservice.org/sites/ftp.debian.org/debian/pool/main/m/mupdf/mupdf_1.11+ds1-2.debian.tar.xz"
sha256 "da7445a8063d7c81b97d2c373aa112df69d3ad29989b67621387e88d9c38b668"
apply "patches/0004-Fix-698539-Don-t-use-xps-font-if-it-could-not-be-loa.patch",
"patches/0005-Fix-698540-Check-name-comment-and-meta-size-field-si.patch",
"patches/0006-Fix-698558-Handle-non-tags-in-tag-name-comparisons.patch"
end
end
bottle do
cellar :any_skip_relocation
sha256 "f1ed1cfc2a01b0137f1d91c116acadba78051908572e476590e8ba19df045f18" => :high_sierra
sha256 "3ac9b816de24b8eb26593811248333b22cae4804cfdaad4b6f1e71808e0aadb8" => :sierra
sha256 "6641ed8ff85c427a03008439645e36675d3dad30c187ada3278d2c384b9daac0" => :el_capitan
end
depends_on :x11
depends_on "openssl"
conflicts_with "mupdf-tools",
:because => "mupdf and mupdf-tools install the same binaries."
def install
# Work around bug: https://bugs.ghostscript.com/show_bug.cgi?id=697842
inreplace "Makerules", "RANLIB_CMD := xcrun", "RANLIB_CMD = xcrun"
# We're using an inreplace here because Debian's version of this patch
# breaks when using Clang as a compiler rather than GCC. This fixes
# CVE-2017-15587.
if build.stable?
inreplace "source/pdf/pdf-xref.c", "if (i0 < 0 || i1 < 0)",
"if (i0 < 0 || i1 < 0 || i0 > INT_MAX - i1)"
end
system "make", "install",
"build=release",
"verbose=yes",
"CC=#{ENV.cc}",
"prefix=#{prefix}",
"HAVE_GLFW=no" # Do not build OpenGL viewer: https://bugs.ghostscript.com/show_bug.cgi?id=697842
bin.install_symlink "mutool" => "mudraw"
end
test do
assert_match "Homebrew test", shell_output("#{bin}/mudraw -F txt #{test_fixtures("test.pdf")}")
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.