text stringlengths 1 1.05M |
|---|
<filename>zinbeiw.rb<gh_stars>1-10
#!/usr/bin/env ruby
# -*- coding: sjis -*-
# vim: filetype=ruby
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
puts '
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
))wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww((
(( ))
)) ========= ====== ||\\ || ||======== ||========= ====== ((
(( / / || || \\ || || )) || || ))
)) / / || || \\ || || )) || || ((
(( / / || || \\ || ||======== ||======= || ))
)) / / || || \\ || || )) || || ((
(( / / || || \\ || || )) || || ))
)) ========= ====== || \\|| ||======== ||========= ====== ((
(( ))
))wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww((
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
'
class Emerald
attr_accessor :fibonacci, :fizzbizz,:version, :kurage
def version
print 'Ruby‚̃o[ƒWƒ‡ƒ“î•ñ‚ðo—Í‚µ‚Ü‚·!'
print RUBY_VERSION
puts ''
end
def fibonacci
puts 'ƒtƒBƒ{ƒiƒbƒ`”—ñ‚ðo—Í‚µ‚Ü‚·! '
puts ''
lo = 1
hi = 1
print lo
print ' '
while (hi < 50)
print hi
hi = lo + hi
lo = hi - lo
print ' '
end
puts ''
end
def fizzbizz
puts 'FizzBizz–â‘è‚Ìo—ÍŒ‹‰Ê‚Å‚·!'
puts ''
(1..15).each do |t|
if t % 15 == 0
print 'FizzBizz'
elsif t % 3 == 0
print 'Fizz'
elsif t % 5 == 0
print 'Bizz'
else
print t
end
print ' '
end
puts ''
end
def kurage
a=rand(15)
print '`'
0.upto(a) do |s|
print '`'
end
puts 'ƒƒRFœc'
puts ''
end
end
saikoro = Random.rand(4)
case saikoro
when 2 then
Emerald.new.kurage
when 1 then
Emerald.new.fibonacci
when 0 then
Emerald.new.fizzbizz
else
Emerald.new.version
end
puts ''
puts '‚Ü‚¸‚ÍAhelp‚Æ“ü—Í‚µ‚Ä‚‚¾‚³‚¢!ƒRƒ}ƒ“ƒh‚ª‚í‚©‚è‚Ü‚·!'
# search words
begin
str = %w[sessi kashi exit help].map!(&:freeze).freeze
once = %w[find delete remove rename makefolder wordcount NumberGame Encode week].map!(&:freeze).freeze
lucky = %w[loto6 Numbers kuji].map!(&:freeze).freeze
dark = %w[OneRunner Eval].map!(&:freeze).freeze
denki = %w[Anpea Volt Rister].map!(&:freeze).freeze
print '>'
# search words exit!
# Wainting Action code!
while line = ARGF.gets
line.chomp!
case
when line.match(str[0])
def seru(b)
y = (b-32)*5/9
return y
end
puts '‰ØŽ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢!'.
print '>'
f = gets.to_i
m = seru(f)
puts "ÛŽ‚ÍA#{m.round}‚Å‚·B"
print '>'
when line.match(str[1])
def seru(b)
y = (1.8 * b) + 32
return y
end
puts 'ÛŽ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
print '>'
f = gets.to_i
m = seru(f)
puts "‰ØŽ‚ÍA#{m.round}‚Å‚·B"
print '>'
when line.match(str[2])
exit!
when line.match(lucky[0])
class Loto6
def sum (a,b,c,d,e,f)
x = Random.rand(a) + 1
y = Random.rand(b) + 1
z = Random.rand(c) + 1
s = Random.rand(d) + 1
t = Random.rand(e) + 1
d = Random.rand(f) + 1
return x,y,z,s,t,d
end
end
taka = Loto6.new
n = taka.sum(43,43,43,43,43,43)
puts 'Loto6‚ÅŽQl‚É‚·‚锎š‚ðƒ‰ƒ“ƒ_ƒ€‚Å•\ަ‚µ‚Ü‚·I'
puts ''
print n
print ' '
puts ''
puts ''
puts '¦“–‚½‚é‚à”ªŒTA“–‚½‚ç‚Ê‚à”ªŒTBŸ•‰‚ÍŽž‚̉^‚Å‚·B'
print '>'
when line.match(lucky[1])
puts 'ƒiƒ“ƒo[ƒY‚ÅŽQl‚É‚·‚锎š‚ðƒ‰ƒ“ƒ_ƒ€‚Å•\ަ‚µ‚Ü‚·I'
puts ''
class Numbers
def sum (a,b,c,d)
x = Random.rand(a)
y = Random.rand(b)
z = Random.rand(c)
s = Random.rand(d)
return x,y,z,s
end
end
taka = Numbers.new
p taka.sum(10,10,10,10)
puts ''
puts '¦“–‚½‚é‚à”ªŒTA“–‚½‚ç‚Ê‚à”ªŒTBŸ•‰‚ÍŽž‚̉^‚Å‚·B'
print '>'
when line.match(lucky[2])
puts '¡“ú‚̉^¨‚ðè‚Á‚Ă݂܂µ‚傤B'
puts '‘å‹g¨’†‹g¨‹g¨¬‹g¨‹¥¨‘å‹¥'
a=rand(16)
case a
when 0..2
puts '‘å‹g'
puts '‚³‚ŸAŠO‚Éo‚ÄŠæ’£‚낤BŽdŽ–‚ÖGo!'
when 3..5
puts '’†‹g'
puts '‚قǂقǂȕ’ÊBƒXƒgƒŒƒX‚ÆŒü‚«‡‚¦!'
when 6..8
puts '‹g'
puts '‚¾‚ñ‚¾‚ñ”æ‚ê‚Ä‚«‚½‚ñ‚¾‚ÈB‚ä‚Á‚‚è‹x‚ß!'
when 9..11
puts '¬‹g'
puts '‚ ‚܂肢‚¢‚±‚Æ‚ª‚È‚¢‚©‚çAƒWƒb‚Æ‚µ‚Ä‚¢‚é‚ÉŒÀ‚éB'
when 12..14
puts '‹¥'
puts 'ƒsƒ“ƒ`‚Ì‚ ‚Ƃ̃`ƒƒƒ“ƒX“ž—ˆB‚ ‚«‚ç‚ß‚é‚ÈB'
when 15..16
puts '‘å‹¥'
puts '¡“ú‚͉½‚©‚Ü‚¸‚¢‹C‚ð‚‚¯‚ë!'
else
puts '”Žš‚ª‚ ‚Ó‚ê‚Ä‚¢‚Ü‚·I’ˆÓI'
end
print '>'
when line.match(dark[0])
begin
puts '
ƒƒ“ƒ‰ƒCƒi[A‚PsƒR[ƒh‚̊قւ悤‚±‚»B‚³‚ŸA‚Í‚¶‚߂܂µ‚傤I'
print '>'
re = gets.chomp
puts ''
eval(re)
puts ''
rescue => et
p et
print '>'
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
puts ''
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(denki[0])
def anpea(a,b)
x = b / a
return x
end
puts
puts '’ïR‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
f = gets.to_i
puts
puts '“dˆ³‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
g = gets.to_i
m = anpea(f,g)
puts
puts "“d—¬‚Í#{m.round}‚Å‚·B"
print '>'
when line.match(denki[1])
def volt(c,d)
y = c * d
return y
end
puts
puts '’ïR‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
h = gets.to_i
puts
puts '“d—¬‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
i = gets.to_i
j = volt(h,i)
puts
puts "“dˆ³‚Í#{j.round}‚Å‚·B"
print '>'
when line.match(denki[2])
def rister(k,l)
z = k / l
return z
end
puts
puts '“d—¬‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
l = gets.to_i
puts
puts '“dˆ³‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
k = gets.to_i
q = rister(k,l)
puts
puts "’ïR‚Í#{q.round}‚Å‚·B"
print '>'
# It is a help document of zinbeic.
when line.match(str[3])
puts 'COMMAND'.center(60,'-')
puts '
sessi ‰ØŽ‚©‚çÛŽ‚Ö•ÏŠ·‚µ‚Ü‚·
kashi ÛŽ‚©‚ç‰ØŽ‚Ö•ÏŠ·‚µ‚Ü‚·
kuji ¡“ú‚̉^¨‚ð—”‚ðŽg‚Á‚Äè‚¢‚Ü‚·
Numbers Numbers‚S‚ÌŒó•â‚Æ‚Ȃ锎š‚ð—”‚ðŽg‚Á‚Äo—Í‚µ‚Ü‚·
loto6 Loto‚U‚ÌŒó•â‚Æ‚Ȃ锎š‚ð—”‚ðŽg‚Á‚Äo—Í‚µ‚Ü‚·
OneRunner ƒƒ“ƒ‰ƒCƒi[‚ÌŠÙ‚ÖˆÄ“à‚µ‚Ü‚·
delete ƒtƒ@ƒCƒ‹‚ðŽw’肵‚Ä휂ł«‚Ü‚·
find ’PŒê‚ð’T‚µ‚Ü‚·
remove ƒtƒHƒ‹ƒ_‚ðÁ‚µ‚Ü‚·
rename ƒtƒ@ƒCƒ‹‚Ì–¼‘O‚ð•Ï‚¦‚Ü‚·
makefolder ƒtƒHƒ‹ƒ_‚ð쬂µ‚Ü‚·
wordcount ‘Îۂ̃tƒ@ƒCƒ‹‘S‘Ì‚ð“‚Å’PŒêƒJƒEƒ“ƒg‚µ‚Ü‚·
NumberGame ”Žš“–‚ăQ[ƒ€‚Å‚·
Encode •¶Žš—ñ‚̃Gƒ“ƒR[ƒh‚ð•]‰¿‚µ‚Ü‚·
Eval eval‘°‚̊قւ悤‚±‚»A‘«‚µŽZ‚Ȃǂªs‚¦‚Ü‚·
Anpea ƒI[ƒ€‚Ì–@‘¥‚ðŽg‚¢A“d—¬‚𒲂ׂ܂·
Volt ƒI[ƒ€‚Ì–@‘¥‚ðŽg‚¢A“dˆ³‚𒲂ׂ܂·
Rister ƒI[ƒ€‚Ì–@‘¥‚ðŽg‚¢A’ïR‚𒲂ׂ܂·
week ‘¾—z—ï‚Å“ú•t‚Ì—j“ú‚ð^‚Æ‹U‚Å”»’肵‚Ü‚·
I—¹‚·‚邯‚«‚ÍAexit‚Æ“ü—Í‚µ‚Ü‚·'
puts 'COMMAND'.center(60,'-')
print '>'
# It is a help document of zinbeic exit!
# The command which can be called is only once. @
when line.match(once[0])
begin
print '’T‚µ‚½‚¢•¶Žš—ñ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
re = gets.chomp
rescue => et
p et
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
retry
end
begin
print '’T‚µ‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
fo = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(fo)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print 'ƒtƒ@ƒCƒ‹–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
filename = gets.chomp
File.open(filename) do |d|
puts ' '
while liner = d.gets
liner.chomp!
if liner.match(re)
printf('%4d s–Ú : %s ',d.lineno,liner)
print 'F Find!(”Œ©!)'
puts ' '
puts ' '
else
end
end
end
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[1])
begin
print 'Á‚µ‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
mo = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(mo)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print 'ƒtƒ@ƒCƒ‹–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
del = readline.chomp!
File.delete(del)
puts '휂µ‚Ü‚µ‚½B'
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[2])
begin
print '휂µ‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
st = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(st)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print '휂µ‚½‚¢ƒtƒHƒ‹ƒ_–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
rem = readline.chomp!
Dir.rmdir(rem)
puts '휂µ‚Ü‚µ‚½B'
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[3])
begin
print '–¼‘O‚ð•Ï‚¦‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
nem = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(nem)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print 'Œ»Ý‚̃tƒ@ƒCƒ‹–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
re = gets.chomp!
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
end
begin
print '•ÏX‚·‚éƒtƒ@ƒCƒ‹–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
no = gets.chomp!
File.rename(re,no)
puts '•ÏX‚µ‚Ü‚µ‚½B'
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[4])
begin
print '쬂µ‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
mk = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(mk)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print 'ƒtƒHƒ‹ƒ_–¼‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
ma = readline.chomp!
Dir.mkdir(ma)
puts '쬂µ‚Ü‚µ‚½B'
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[5])
begin
print '’T‚µ‚½‚¢ƒtƒHƒ‹ƒ_‚̃pƒX‚ðŽw’肵‚Ä‚‚¾‚³‚¢F@'
mik = gets.chomp
rescue => es
p es
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
begin
Dir.chdir(mik)
rescue Errno::EEXIST
Errno::EEXIST
end
begin
print '”‚¦‚½‚¢ƒtƒ@ƒCƒ‹–¼‚ðŠg’£Žq‚ðŠÜ‚ñ‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢F@'
count = Hash.new(0)
#•¶Žš—ñ‚ÌoŒ»•p“x‚𒲂ׂéB
def char_stat(str)
table = Hash.new(0)
str.scan(/./m){|ch|
table[ch] += 1
}
table.to_a.sort_by{|_,con| -con}
end
#’PŒê‚ÌWŒv
filename = gets.chomp
File.open(filename) do |d|
while line == d.gets
wa = line.split
wa.each do |w|
count[w] += 1
end
end
#Œ‹‰Ê‚Ìo—Í
count.sort do |a, b|
a[1] <=> b[1]
end.each do |web, con|
print "#{web.inspect}: "
print "#{con % 100 / wa.length}%"
puts ' '
puts ' '
end
end
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·Bƒtƒ@ƒCƒ‹–¼‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢B\‚Ž'
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[6])
x = Random.rand(100) + 1
numb = 0
numb = numb + 1
puts '‰½‰ñ‚Ü‚ÅANumberGame‚ÅŒJ‚è•Ô‚·‚©Žw’肵‚Ä‚‚¾‚³‚¢'
print '>'
num = gets.chomp.to_i
puts "#{num}‰ñAŒJ‚è•Ô‚·‚܂łɓ–‚ĂĂ‚¾‚³‚¢"
puts
puts '1~100‚Ü‚Å‚Ì”Žš‚ð‚¢‚ê‚Ä‚‚¾‚³‚¢NumberGameƒXƒ^[ƒgI'
print '>'
num.times do |z|
begin
i_num = gets.chomp.to_i
if i_num < z
puts 'ƒRƒ“ƒsƒ…[ƒ^‚Ì‘I‚ñ‚¾”Žš‚Ì•û‚ª‘å‚«‚¢‚½‚ߑ傫‚¢”Žš‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
numb = numb + 1
elsif i_num > z
puts 'ƒRƒ“ƒsƒ…[ƒ^‚Ì‘I‚ñ‚¾”Žš‚Ì•û‚ª¬‚³‚¢‚½‚߬‚³‚¢”Žš‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
numb = numb + 1
else i_num == x
puts
puts '‚¨‚߂łƂ¤‚²‚´‚¢‚Ü‚·A³‰ð‚Å‚·'
puts
puts "#{numb}‰ñ‚ų‰ð‚µ‚Ü‚µ‚½"
puts
break
end
if i == (num - 1)
puts
puts '³‰ð‚Í' + x.to_s + '‚Å‚µ‚½'
puts
puts 'Žc”OA‚à‚¤ˆê“xƒ`ƒƒƒŒƒ“ƒWI'
puts
end
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·A”Žš‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢\‚Ž'
retry
end
end
puts '‚¨”æ‚ꂳ‚܂łµ‚½B'
print '>'
when line.match(once[7])
puts
class Enco
begin
puts '•¶Žš—ñ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
filename = gets.chomp
rescue => ee
p ee
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
puts ''
puts '•ÏX‘O‚̃Gƒ“ƒR[ƒh‚ðŽw’肵‚Ä‚‚¾‚³‚¢'
puts 'ISO-2022-JP,SJIS,eucJP,UTF-8@‚ªŒó•â‚Å‚·B'
print '>'
befenco = gets.chomp
begin
puts ''
puts 'ƒGƒ“ƒR[ƒh‚ðŽw’肵‚Ä‚‚¾‚³‚¢'
puts 'ISO-2022-JP,SJIS,eucJP,UTF-8@‚ªŒó•â‚Å‚·B'
print '>'
aftenco = gets.chomp
puts
rescue => ed
p ed
puts 'ƒGƒ‰[‚Å‚·BƒGƒ“ƒR[ƒfƒBƒ“ƒO‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
ut = Encoding::Converter.new(befenco, aftenco)
p ut.convert(filename)
ut.finish.dump
puts
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
puts
end
print '>'
when line.match(dark[1])
begin
puts 'eval‘°‚̊قւ悤‚±‚»I
ŠÈ’P‚È•¶Žš—ñ‚ð•]‰¿‚µ‚Ä‚‚¾‚³‚¢'
print '>'
re = gets.chomp
puts ''
p eval(re)
puts ''
rescue => et
p et
print '>'
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
puts ''
retry
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
print '>'
when line.match(once[8])
puts ''
puts '“ú•t‚Ì—j“ú‚ðtrue,false‚Å”»’肵‚Ü‚·'
puts '”¼Šp”Žš‚Å“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
puts '‚»‚ê‚Å‚ÍAƒXƒ^[ƒgI'
puts ''
begin
puts '’²‚ׂ½‚¢”N‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
nen = gets.chomp.to_i
rescue => ee
p ee
puts 'ƒGƒ‰[‚Å‚·B”Žš‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
puts ''
begin
puts '’²‚ׂ½‚¢ŒŽ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
mon = gets.chomp.to_i
rescue => ey
p ey
puts 'ƒGƒ‰[‚Å‚·B”Žš‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
puts ''
begin
puts '’²‚ׂ½‚¢“ú‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢'
print '>'
days = gets.chomp.to_i
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·B”Žš‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
end
puts ''
t = Time.local(nen, mon, days)
print'“ú—j“ú‚Å‚·‚©HF'
puts t.sunday?
print 'ŒŽ—j“ú‚Å‚·‚©HF'
puts t.monday?
print '‰Î—j“ú‚Å‚·‚©HF'
puts t.tuesday?
print '…—j“ú‚Å‚·‚©HF'
puts t.wednesday?
print '–Ø—j“ú‚Å‚·‚©HF'
puts t.thursday?
print '‹à—j“ú‚Å‚·‚©HF'
puts t.friday?
print '“y—j“ú‚Å‚·‚©HF'
puts t.saturday?
puts
print '>'
# The command which can be called is only once.
else
puts "Can't Enter this Words. Please input help"
print '>'
end
end
# Wainting Action Code exit!
# Exception throw!
rescue => ex
p ex
puts 'ƒGƒ‰[‚Å‚·B•¶Žš—ñ‚ðŠm”F‚µ‚Ä‚‚¾‚³‚¢I'
retry
print '>'
# Ruby ensure is Java Exception of finally.
ensure
puts 'I—¹‚Í exit ‚ð“ü—Í‚µ‚Ä‚‚¾‚³‚¢I'
end
__END__
|
# -*- coding: utf-8 -*-
"""Handy.Clamp()."""
import gi
gi.require_version(namespace='Gtk', version='3.0')
gi.require_version('Handy', '1')
from gi.repository import Gtk, Gio
from gi.repository import Handy
class MainWindow(Gtk.ApplicationWindow):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.set_title(title='Handy.Clamp')
self.set_default_size(width=1366 / 2, height=768 / 2)
self.set_position(position=Gtk.WindowPosition.CENTER)
self.set_default_icon_from_file(filename='../../assets/icons/icon.png')
hdy_clamp = Handy.Clamp.new()
self.add(widget=hdy_clamp)
button = Gtk.Button.new_with_label(label='Botão')
hdy_clamp.add(widget=button)
self.show_all()
def on_headerbar_squeezer_notify(self):
print('aqui')
class Application(Gtk.Application):
def __init__(self):
super().__init__(application_id='br.natorsc.Exemplo',
flags=Gio.ApplicationFlags.FLAGS_NONE)
def do_startup(self):
Gtk.Application.do_startup(self)
def do_activate(self):
win = self.props.active_window
if not win:
win = MainWindow(application=self)
win.present()
def do_shutdown(self):
Gtk.Application.do_shutdown(self)
if __name__ == '__main__':
import sys
app = Application()
app.run(sys.argv)
|
/* Small parts were taken from Mesa's glext.h and gl.h, here's the license: */
/*
* Mesa 3-D graphics library
* Version: 6.5.1
*
* Copyright (C) 1999-2006 <NAME> All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* <NAME> BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/* Some parts derived from files copyright (c) 2001-2002 <NAME> under this license: */
/* ----------------------------------------------------------------------------
Copyright (c) 2002, <NAME>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* The name of the author may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
------------------------------------------------------------------------------*/
/*
GL_draw_range_elements support added by <NAME>
<NAME> contact information:
<EMAIL>
http://www.uni-karlsruhe.de/~uli2/
*/
#ifndef __EXTGL_H__
#define __EXTGL_H__
#include <jni.h>
#include <string.h>
#include <stddef.h>
#include "common_tools.h"
#include <EGL/egl.h>
//#include <GLES2/gl2.h>
#include <GLES2/gl2platform.h>
#include "extgl_types.h"
/* initializes everything, call this right after the rc is created. the function returns true if successful */
extern bool extgl_Open(JNIEnv *env);
extern void extgl_Close(void);
extern void extgl_InitializeClass(JNIEnv *env, jclass clazz, int num_functions, JavaMethodAndExtFunction *functions);
extern bool extgl_InitializeFunctions(int num_functions, ExtFunction *functions);
extern bool extgl_QueryExtension(const char *extensions, const char *name);
extern void *extgl_GetProcAddress(const char *name);
#endif /* __EXTGL_H__ */
|
<reponame>softls/FogFrame-2.0
package at.ac.tuwien.infosys.reasoner.resourceProvisioning.impl;
import at.ac.tuwien.infosys.communication.impl.CommunicationService;
import at.ac.tuwien.infosys.database.impl.DatabaseService;
import at.ac.tuwien.infosys.model.*;
import at.ac.tuwien.infosys.model.exception.ResourceProvisioningException;
import at.ac.tuwien.infosys.reasoner.resourceProvisioning.IResourceProvisioning;
import at.ac.tuwien.infosys.util.Constants;
import at.ac.tuwien.infosys.watchdog.WatchdogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Primary;
import org.springframework.stereotype.Service;
import java.util.*;
/**
* Created by <NAME> on 02/11/2016.
* Modified for multiple colonies <NAME>
*/
@Primary
@Service
@Slf4j
public class ResourceProvisioningFirstFit implements IResourceProvisioning {
@Autowired
private CommunicationService commService;
@Autowired
private DatabaseService dbService;
@Autowired
private WatchdogService watchdogService;
private String logstarter = "--- RESPROV: ";
// sort incoming services according to service-types and then deploy them until the threshold is nearly exceeded
public ApplicationAssignment handleTaskRequests(Set<Fogdevice> children, Set<TaskRequest> requests)
throws ResourceProvisioningException {
ApplicationAssignment ass = new ApplicationAssignment();
try {
List<TaskAssignment> taskAssignments = new ArrayList<TaskAssignment>();
Fogdevice thisFN = dbService.getDeviceInformation();
// 1. sort requests according to service-type (comparable interface)
ArrayList<TaskRequest> sortedRequests = new ArrayList<TaskRequest>(requests);
Collections.sort(sortedRequests);
Iterator<TaskRequest> requestIt = sortedRequests.iterator();
log.info(logstarter + "sorted task requests: " + sortedRequests);
// 2. sort children according to service-type (comparable interface)
ArrayList<Fogdevice> sortedDevices = new ArrayList<Fogdevice>(children);
if (thisFN.getServiceTypes().size()>0) {
sortedDevices.add(thisFN);}
Collections.sort(sortedDevices);
Iterator<Fogdevice> deviceIt = sortedDevices.iterator();
log.info(logstarter + "sorted devices by service type: " + sortedDevices.toString());
long startTime = System.currentTimeMillis();
long computTime = 0;
int round = 0;
// 3. assign requests to children
while (deviceIt.hasNext()) {
Fogdevice fd = deviceIt.next();
ArrayList<String> deviceServiceTypes = fd.getServiceTypes();
log.info(logstarter + "------- device: " + fd.getIp() + " with types:" + fd.getServiceTypes() + " -------");
for (String deviceServiceType : fd.getServiceTypes()) {
requestIt = sortedRequests.iterator();
while (requestIt.hasNext()) {
TaskRequest req = requestIt.next();
String reqType = req.getServiceType();
if (deviceServiceType.equals(reqType)) {
// check utilization
Utilization u = null;
do {
u = commService.getChildUtilization(fd);
if (u == null || u.getStorage() == 0 || u.getCpu() == 0 || u.getRam() == 0)
Thread.sleep(10);
} while (u == null || u.getStorage() == 0 || u.getCpu() == 0 || u.getRam() == 0);
log.info(logstarter + "util of " + fd.getIp() + ": " + u.toString());
Set<DockerContainer> deployedContainers = commService.requestDeployedContainers(fd);
int containerCount = 0;
if (deployedContainers != null) {
containerCount = deployedContainers.size();
}
log.info(logstarter + containerCount + " already deployed containers");
if (containerCount >= Constants.MAX_CONTAINERS) {
break;
}
if (watchdogService.checkRules(u)) {
// assign it to the child
log.info(logstarter + "send deployment request to " + fd.getIp() + ": " + req);
long intermediateTime=System.currentTimeMillis();
computTime=computTime+intermediateTime-startTime;
startTime=intermediateTime;
DockerContainer container = commService.sendServiceDeploymentRequest(fd, req);
TaskAssignment taskAssignment = new TaskAssignment(fd, req, container, false);
taskAssignments.add(taskAssignment);
// remove request from list that it does not get assigned anymore
requestIt.remove();
}
} else {
// do nothing
}
}
}
// retry if the task requests are not all deployed and the iterator is empty
if (!deviceIt.hasNext() && sortedRequests.size() > 0 && round < Constants.PROVISIONING_ROUNDS) {
round++;
deviceIt = sortedDevices.iterator();
log.info(logstarter + "-------------- ROUND " + (round) + " --------------");
}
}
log.info(logstarter+"Computational time, millis = "+computTime);
if (sortedRequests.size() > 0) {
log.info(logstarter + "The following task requests could not be deployed\n" + sortedRequests + "\n----------------------------------------");
}
log.info(logstarter + "finished the resource provisioning of the fog tasks");
ass.setAssignedTasks(taskAssignments);
ass.setOpenRequests(sortedRequests);
} catch(Exception e){
throw new ResourceProvisioningException("", ass, e);
}
return ass;
}
}
|
#!/bin/bash
# Creates a tree of directories and symlinks
# in the current directory so that any dir
# found at ./dir1/dir2/dir3 etc. (for any depth)
# can also be found at any permutation of those names
# e.g. ./dir3/dir2/dir1 and ./dir2/dir3/dir1 will
# both be symlinks to ./dir1/dir2/dir3
#
# This is useful as a simple tagging system when you're interested in
# files with a specific combination of tags and excluding all others
# returns all dirs (not symlinks) breadth-first
function get_real_dirs {
# http://stackoverflow.com/questions/11703979/sort-files-by-depth-bash
find . -type d | # find all real directories
sed '/^\.$/d; s|^\./||g' | # remove . line and leading ./'s
perl -lne 'print tr:/::, " $_"' | # add / count to each line
sort -n | # sort by / count
cut -d' ' -f2 # cut down to just path
}
# links all other possible permutations to the given dir
function canonize { # args: <path of dir to canonize>
realdir=$1
# get all locations that should link to realdir
allpaths=( $(get_permutations $realdir) )
for path in ${allpaths[@]}; do
make_link $realdir $path
done
}
# ouputs all other possible permutations of the given dir
function get_permutations { # args: <path of dir to permute>
# split path into array of names
elements=$( echo $1 | tr "/" "\n" )
# use perl magic to get each permutation
IFS=$'\n'
for perm in `permute.pl ${elements[@]}`; do
# permute.pl output separates elements with spaces
# replace with / to form path and return
echo $perm | tr " " "/"
done
}
# makes a link to the first arg at the path given in the second
# creates any dirs in the path which do not already exist, and
# recursively canonizes them to avoid making two dirs with the
# same combination (an inconsistent state)
function make_link { # args: <path to target> <path to link>
target=$1
linkpath=$2
# if it exists, great
if [ ! -e $linkpath ]; then
# otherwise, split path into array of parts
parts=( ${linkpath//\// } ) # split on /
i=1
# for each partial path up to but excluding linkpath
while [ $i -lt ${#parts[@]} ]; do
# generate the path up to i dirs deep
partialpath=$(echo ${parts[@]:0:$i} | tr " " "/")
# check if it exists (and is a dir)
if [ ! -d $partialpath ]; then
# if not, make it
mkdir $partialpath
# and canonize the new dir. Yay recursion!
( canonize $partialpath )
# this is guaranteed to end because partialpath is
# strictly shorter than linkpath, and when linkpath
# has depth 1 this while loop never runs
fi
let "i+=1" # increment i
done
# add leading ../'s to get back to base dir from target
for j in $( seq 2 ${#parts[@]} ); do
target="../$target"
done
# now that we're sure all the dirs up the link exist,
# just make the link
ln -s "$target" $linkpath
fi
}
# canonizes all existing dirs, shallowest first
function canonize_all {
for d in $(get_real_dirs); do
canonize $d
done
}
canonize_all
|
<reponame>defudef/typexpress
import createdAtColumn from './CreatedAtColumn';
import pkUuidColumn from './PkUuidColumn';
import updatedAtColumn from './UpdatedAtColumn';
export {
createdAtColumn,
pkUuidColumn,
updatedAtColumn
};
|
package org.mvel2;
public class ParserContext {} |
#!/bin/sh
curl --cacert etc/server.crt --data-binary @curlreq.json https://localhost:8179/requestvoucher
|
<reponame>CaptainM777/thunder
# This file contains the schema for the database.
# Under most circumstances, you shouldn't need to run this file directly.
require 'sequel'
module Schema
Sequel.sqlite(ENV['DB_PATH']) do |db|
db.create_table?(:muted_users) do
primary_key :user_id
String :job_id, :size=>255
DateTime :mute_start
DateTime :mute_end
String :reason, :size=>255
end
end
end |
from traits.api import Any, HasTraits
class Person(HasTraits):
temp_ = Any # Using a wildcard with a Trait attribute name |
import { Controller, Get, Post, Body, Patch, Param, Delete, ValidationPipe, UsePipes } from '@nestjs/common';
import { CommitsService } from './commits.service';
import { CreateCommitDto } from './dto/create-commit.dto';
@Controller('commits')
export class CommitsController {
constructor(private commitsService: CommitsService) {}
@Post()
@UsePipes(new ValidationPipe())
async create(@Body() createCommitDto: CreateCommitDto) {
return await this.commitsService.create(createCommitDto);
}
}
|
public static Node createTree(int[] preorder)
{
return constructTree(preorder, 0, preorder.length-1);
}
// The main function to construct BST from given preorder traversal.
// This function mainly uses constructTree()
public static Node constructTree(int preorder[], int low, int high)
{
// Base case
if (low > high) {
return null;
}
// The first node in pre order traverse is root.
Node root = new Node(preorder[low]);
// If the current subarray has only one element,
// no need to recur
if (low == high) {
return root;
}
// Search for the first element greater than root
int i=0;
for(i=low; i<=high; i++)
{
if (preorder[i] > root.data) {
break;
}
}
// Use the index of element found in
// preorder to divide preorder array in
// two parts. Left part and right part
root.left = constructTree(preorder, low+1, i-1);
root.right = constructTree(preorder, i, high);
return root;
} |
/**
* Author: <NAME> <<EMAIL>>
* Copyright (c) 2020 Gothel Software e.K.
* Copyright (c) 2020 ZAFENA AB
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.tinyb.AdapterSettings;
import org.tinyb.BluetoothAdapter;
import org.tinyb.BluetoothAddressType;
import org.tinyb.BluetoothDevice;
import org.tinyb.AdapterStatusListener;
import org.tinyb.BLERandomAddressType;
import org.tinyb.BTMode;
import org.tinyb.BluetoothException;
import org.tinyb.BluetoothFactory;
import org.tinyb.BluetoothGattCharacteristic;
import org.tinyb.BluetoothGattService;
import org.tinyb.BluetoothManager;
import org.tinyb.BluetoothNotification;
import org.tinyb.BluetoothType;
import org.tinyb.BluetoothUtils;
import org.tinyb.EIRDataTypeSet;
import org.tinyb.GATTCharacteristicListener;
import org.tinyb.HCIStatusCode;
import org.tinyb.HCIWhitelistConnectType;
import direct_bt.tinyb.DBTManager;
/**
* This Java scanner example uses the Direct-BT fully event driven workflow
* and adds multithreading, i.e. one thread processes each found device found
* as notified via the event listener.
* <p>
* This example represents the recommended utilization of Direct-BT.
* </p>
*/
public class ScannerTinyB10 {
static final String EUI48_ANY_DEVICE = "00:00:00:00:00:00";
final List<String> waitForDevices = new ArrayList<String>();
final boolean isDirectBT;
final BluetoothManager manager;
long timestamp_t0;
int MULTI_MEASUREMENTS = 8;
boolean KEEP_CONNECTED = true;
boolean REMOVE_DEVICE = true;
boolean USE_WHITELIST = false;
final List<String> whitelist = new ArrayList<String>();
final List<String> characteristicList = new ArrayList<String>();
boolean SHOW_UPDATE_EVENTS = false;
int dev_id = -1; // use default
int shutdownTest = 0;
static void printf(final String format, final Object... args) {
final Object[] args2 = new Object[args.length+1];
args2[0] = BluetoothUtils.getElapsedMillisecond();
System.arraycopy(args, 0, args2, 1, args.length);
System.err.printf("[%,9d] "+format, args2);
// System.err.printf("[%,9d] ", BluetoothUtils.getElapsedMillisecond());
// System.err.printf(format, args);
}
static void println(final String msg) {
System.err.printf("[%,9d] %s%s", BluetoothUtils.getElapsedMillisecond(), msg, System.lineSeparator());
}
Collection<String> devicesInProcessing = Collections.synchronizedCollection(new ArrayList<>());
Collection<String> devicesProcessed = Collections.synchronizedCollection(new ArrayList<>());
final AdapterStatusListener statusListener = new AdapterStatusListener() {
@Override
public void adapterSettingsChanged(final BluetoothAdapter adapter, final AdapterSettings oldmask,
final AdapterSettings newmask, final AdapterSettings changedmask, final long timestamp) {
println("****** SETTINGS: "+oldmask+" -> "+newmask+", changed "+changedmask);
println("Status Adapter:");
println(adapter.toString());
if( changedmask.isSet(AdapterSettings.SettingType.POWERED) &&
newmask.isSet(AdapterSettings.SettingType.POWERED) )
{
// powered on adapter ....
if( !adapter.startDiscovery( true ) ) {
println("Adapter (powered-on): Start discovery failed");
}
}
}
@Override
public void discoveringChanged(final BluetoothAdapter adapter, final boolean enabled, final boolean keepAlive, final long timestamp) {
println("****** DISCOVERING: enabled "+enabled+", keepAlive "+keepAlive+" on "+adapter);
}
@Override
public void deviceFound(final BluetoothDevice device, final long timestamp) {
println("****** FOUND__: "+device.toString());
if( BluetoothAddressType.BDADDR_LE_PUBLIC != device.getAddressType()
&& BLERandomAddressType.STATIC_PUBLIC != device.getBLERandomAddressType() ) {
println("****** FOUND__-2: Skip 'non public' or 'random static public' LE "+device.toString());
return;
}
if( !devicesInProcessing.contains( device.getAddress() ) &&
( waitForDevices.isEmpty() ||
( waitForDevices.contains(device.getAddress()) &&
( 0 < MULTI_MEASUREMENTS || !devicesProcessed.containsAll(waitForDevices) )
)
)
)
{
println("****** FOUND__-0: Connecting "+device.toString());
{
final long td = BluetoothUtils.getCurrentMilliseconds() - timestamp_t0; // adapter-init -> now
println("PERF: adapter-init -> FOUND__-0 " + td + " ms");
}
final Thread deviceConnectTask = new Thread( new Runnable() {
@Override
public void run() {
connectDiscoveredDevice(device);
}
}, "DBT-Connect-"+device.getAddress());
deviceConnectTask.setDaemon(true); // detach thread
deviceConnectTask.start();
} else {
println("****** FOUND__-1: NOP "+device.toString());
}
}
@Override
public void deviceUpdated(final BluetoothDevice device, final EIRDataTypeSet updateMask, final long timestamp) {
if( SHOW_UPDATE_EVENTS ) {
println("****** UPDATED: "+updateMask+" of "+device);
}
}
@Override
public void deviceConnected(final BluetoothDevice device, final short handle, final long timestamp) {
if( !devicesInProcessing.contains( device.getAddress() ) &&
( waitForDevices.isEmpty() ||
( waitForDevices.contains(device.getAddress()) &&
( 0 < MULTI_MEASUREMENTS || !devicesProcessed.containsAll(waitForDevices) )
)
)
)
{
println("****** CONNECTED-0: Processing "+device.toString());
{
final long td = BluetoothUtils.getCurrentMilliseconds() - timestamp_t0; // adapter-init -> now
println("PERF: adapter-init -> CONNECTED-0 " + td + " ms");
}
final Thread deviceProcessingTask = new Thread( new Runnable() {
@Override
public void run() {
processConnectedDevice(device);
}
}, "DBT-Process-"+device.getAddress());
devicesInProcessing.add(device.getAddress());
deviceProcessingTask.setDaemon(true); // detach thread
deviceProcessingTask.start();
} else {
println("****** CONNECTED-1: NOP " + device.toString());
}
}
@Override
public void deviceDisconnected(final BluetoothDevice device, final HCIStatusCode reason, final short handle, final long timestamp) {
println("****** DISCONNECTED: Reason "+reason+", old handle 0x"+Integer.toHexString(handle)+": "+device+" on "+device.getAdapter());
}
};
private void connectDiscoveredDevice(final BluetoothDevice device) {
println("****** Connecting Device: Start " + device.toString());
{
final boolean r = device.getAdapter().stopDiscovery();
println("****** Connecting Device: stopDiscovery result "+r);
}
HCIStatusCode res;
if( !USE_WHITELIST ) {
res = device.connect();
} else {
res = HCIStatusCode.SUCCESS;
}
println("****** Connecting Device Command, res "+res+": End result "+res+" of " + device.toString());
if( !USE_WHITELIST && 0 == devicesInProcessing.size() && HCIStatusCode.SUCCESS != res ) {
final boolean r = device.getAdapter().startDiscovery( true );
println("****** Connecting Device: startDiscovery result "+r);
}
}
void execute(final Runnable task, final boolean offThread) {
if( offThread ) {
final Thread t = new Thread(task);
t.setDaemon(true);
t.start();
} else {
task.run();
}
}
void shutdownTest() {
switch( shutdownTest ) {
case 1:
shutdownTest01();
break;
case 2:
shutdownTest02();
break;
default:
// nop
}
}
void shutdownTest01() {
execute( () -> {
final DBTManager mngr = (DBTManager) DBTManager.getBluetoothManager();
mngr.shutdown();
}, true);
}
void shutdownTest02() {
execute( () -> {
System.exit(1);
}, true);
}
private void processConnectedDevice(final BluetoothDevice device) {
println("****** Processing Device: Start " + device.toString());
{
// make sure for pending connections on failed connect*(..) command
final boolean r = device.getAdapter().stopDiscovery();
println("****** Processing Device: stopDiscovery result "+r);
}
final long t1 = BluetoothUtils.getCurrentMilliseconds();
boolean success = false;
//
// GATT Service Processing
//
try {
final List<BluetoothGattService> primServices = device.getServices(); // implicit GATT connect...
if( null == primServices || 0 == primServices.size() ) {
// Cheating the flow, but avoiding: goto, do-while-false and lastly unreadable intendations
// And it is an error case nonetheless ;-)
throw new RuntimeException("Processing Device: getServices() failed " + device.toString());
}
final long t5 = BluetoothUtils.getCurrentMilliseconds();
{
final long td01 = t1 - timestamp_t0; // adapter-init -> processing-start
final long td15 = t5 - t1; // get-gatt-services
final long tdc5 = t5 - device.getLastDiscoveryTimestamp(); // discovered to gatt-complete
final long td05 = t5 - timestamp_t0; // adapter-init -> gatt-complete
println(System.lineSeparator()+System.lineSeparator());
println("PERF: GATT primary-services completed\n");
println("PERF: adapter-init to processing-start " + td01 + " ms,"+System.lineSeparator()+
"PERF: get-gatt-services " + td15 + " ms,"+System.lineSeparator()+
"PERF: discovered to gatt-complete " + tdc5 + " ms (connect " + (tdc5 - td15) + " ms),"+System.lineSeparator()+
"PERF: adapter-init to gatt-complete " + td05 + " ms"+System.lineSeparator());
}
{
for(final String characteristic : characteristicList) {
final BluetoothGattCharacteristic char0 = (BluetoothGattCharacteristic)
manager.find(BluetoothType.GATT_CHARACTERISTIC, null, characteristic, null);
final BluetoothGattCharacteristic char1 = (BluetoothGattCharacteristic)
manager.find(BluetoothType.GATT_CHARACTERISTIC, null, characteristic, device.getAdapter());
final BluetoothGattCharacteristic char2 = (BluetoothGattCharacteristic)
manager.find(BluetoothType.GATT_CHARACTERISTIC, null, characteristic, device);
println("Char UUID "+characteristic);
println(" over manager: "+char0);
println(" over adapter: "+char1);
println(" over device : "+char2);
}
}
{
final GATTCharacteristicListener myCharacteristicListener = new GATTCharacteristicListener(null) {
@Override
public void notificationReceived(final BluetoothGattCharacteristic charDecl,
final byte[] value, final long timestamp) {
println("****** GATT notificationReceived: "+charDecl+
", value "+BluetoothUtils.bytesHexString(value, true, true));
shutdownTest();
}
@Override
public void indicationReceived(final BluetoothGattCharacteristic charDecl,
final byte[] value, final long timestamp, final boolean confirmationSent) {
println("****** GATT indicationReceived: "+charDecl+
", value "+BluetoothUtils.bytesHexString(value, true, true));
shutdownTest();
}
};
final boolean addedCharacteristicListenerRes =
BluetoothGattService.addCharacteristicListenerToAll(device, primServices, myCharacteristicListener);
println("Added GATTCharacteristicListener: "+addedCharacteristicListenerRes);
}
try {
int i=0, j=0;
for(final Iterator<BluetoothGattService> srvIter = primServices.iterator(); srvIter.hasNext(); i++) {
final BluetoothGattService primService = srvIter.next();
printf(" [%02d] Service %s\n", i, primService.toString());
printf(" [%02d] Service Characteristics\n", i);
final List<BluetoothGattCharacteristic> serviceCharacteristics = primService.getCharacteristics();
for(final Iterator<BluetoothGattCharacteristic> charIter = serviceCharacteristics.iterator(); charIter.hasNext(); j++) {
final BluetoothGattCharacteristic serviceChar = charIter.next();
printf(" [%02d.%02d] Decla: %s\n", i, j, serviceChar.toString());
final List<String> properties = Arrays.asList(serviceChar.getFlags());
if( properties.contains("read") ) {
final byte[] value = serviceChar.readValue();
final String svalue = BluetoothUtils.decodeUTF8String(value, 0, value.length);
printf(" [%02d.%02d] Value: %s ('%s')\n",
i, j, BluetoothUtils.bytesHexString(value, true, true), svalue);
}
}
}
} catch( final Exception ex) {
println("Caught "+ex.getMessage());
ex.printStackTrace();
}
// FIXME sleep 1s for potential callbacks ..
try {
Thread.sleep(1000);
} catch (final InterruptedException e) {
e.printStackTrace();
}
success = true;
} catch (final Throwable t ) {
println("****** Processing Device: Exception caught for " + device.toString() + ": "+t.getMessage());
t.printStackTrace();
}
devicesInProcessing.remove(device.getAddress());
if( !USE_WHITELIST && 0 == devicesInProcessing.size() ) {
final boolean r = device.getAdapter().startDiscovery( true );
println("****** Processing Device: startDiscovery result "+r);
}
if( KEEP_CONNECTED && success ) {
while( device.pingGATT() ) {
println("****** Processing Device: pingGATT OK: "+device.getAddress());
try {
Thread.sleep(1000);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
println("****** Processing Device: pingGATT failed: "+device.getAddress());
}
println("****** Processing Device: disconnecting: "+device.getAddress());
device.disconnect(); // will implicitly purge the GATT data, including GATTCharacteristic listener.
while( device.getConnected() ) {
try {
Thread.sleep(100);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
if( REMOVE_DEVICE ) {
println("****** Processing Device: removing: "+device.getAddress());
device.remove();
}
if( 0 < MULTI_MEASUREMENTS ) {
MULTI_MEASUREMENTS--;
println("****** Processing Device: MULTI_MEASUREMENTS left "+MULTI_MEASUREMENTS+": "+device.getAddress());
}
println("****** Processing Device: End: Success " + success +
" on " + device.toString() + "; devInProc "+devicesInProcessing.size());
if( success ) {
devicesProcessed.add(device.getAddress());
}
}
public ScannerTinyB10(final String bluetoothManagerClazzName) {
BluetoothManager _manager = null;
final BluetoothFactory.ImplementationIdentifier implID = BluetoothFactory.getImplementationIdentifier(bluetoothManagerClazzName);
if( null == implID ) {
System.err.println("Unable to find BluetoothManager "+bluetoothManagerClazzName);
System.exit(-1);
}
isDirectBT = BluetoothFactory.DirectBTImplementationID.equals(implID);
System.err.println("Using BluetoothManager "+bluetoothManagerClazzName);
System.err.println("Using Implementation "+implID+", isDirectBT "+isDirectBT);
try {
_manager = BluetoothFactory.getBluetoothManager( implID );
} catch (BluetoothException | NoSuchMethodException | SecurityException
| IllegalAccessException | IllegalArgumentException
| InvocationTargetException | ClassNotFoundException e) {
System.err.println("Unable to instantiate BluetoothManager via "+implID);
e.printStackTrace();
System.exit(-1);
}
manager = _manager;
println("BluetoothManager "+bluetoothManagerClazzName+" initialized!");
}
public void runTest() {
final BluetoothAdapter adapter;
{
final List<BluetoothAdapter> adapters = manager.getAdapters();
for(int i=0; i < adapters.size(); i++) {
println("Adapter["+i+"]: "+adapters.get(i));
}
if( adapters.size() <= dev_id ) {
println("No adapter dev_id "+dev_id+" available, adapter count "+adapters.size());
System.exit(-1);
}
if( 0 > dev_id ) {
adapter = manager.getDefaultAdapter();
} else {
adapter = adapters.get(dev_id);
}
if( !adapter.isEnabled() ) {
println("Adapter not enabled: device "+adapter.getName()+", address "+adapter.getAddress()+": "+adapter.toString());
System.exit(-1);
}
}
timestamp_t0 = BluetoothUtils.getCurrentMilliseconds();
adapter.addStatusListener(statusListener, null);
adapter.enableDiscoverableNotifications(new BooleanNotification("Discoverable", timestamp_t0));
adapter.enableDiscoveringNotifications(new BooleanNotification("Discovering", timestamp_t0));
adapter.enablePairableNotifications(new BooleanNotification("Pairable", timestamp_t0));
adapter.enablePoweredNotifications(new BooleanNotification("Powered", timestamp_t0));
boolean done = false;
if( USE_WHITELIST ) {
for(final Iterator<String> wliter = whitelist.iterator(); wliter.hasNext(); ) {
final String addr = wliter.next();
final boolean res = adapter.addDeviceToWhitelist(addr, BluetoothAddressType.BDADDR_LE_PUBLIC, HCIWhitelistConnectType.HCI_AUTO_CONN_ALWAYS);
println("Added to whitelist: res "+res+", address "+addr);
}
} else {
if( !adapter.startDiscovery( true ) ) {
println("Adapter start discovery failed");
done = true;
}
}
while( !done ) {
if( 0 == MULTI_MEASUREMENTS ||
( -1 == MULTI_MEASUREMENTS && !waitForDevices.isEmpty() && devicesProcessed.containsAll(waitForDevices) )
)
{
println("****** EOL Test MULTI_MEASUREMENTS left "+MULTI_MEASUREMENTS+
", processed "+devicesProcessed.size()+"/"+waitForDevices.size());
println("****** WaitForDevices "+Arrays.toString(waitForDevices.toArray()));
println("****** DevicesProcessed "+Arrays.toString(devicesProcessed.toArray()));
done = true;
} else {
try {
Thread.sleep(3000);
} catch (final InterruptedException e) {
e.printStackTrace();
}
}
}
// All implicit via destructor or shutdown hook!
// manager.shutdown(); /* implies: adapter.close(); */
}
public static void main(final String[] args) throws InterruptedException {
String bluetoothManagerClazzName = null;
for(int i=0; i< args.length; i++) {
final String arg = args[i];
if( arg.equals("-bluetoothManager") && args.length > (i+1) ) {
bluetoothManagerClazzName = args[++i];
} else if( arg.equals("-debug") ) {
System.setProperty("org.tinyb.verbose", "true");
System.setProperty("org.tinyb.debug", "true");
} else if( arg.equals("-verbose") ) {
System.setProperty("org.tinyb.verbose", "true");
} else if( arg.equals("-dbt_debug") && args.length > (i+1) ) {
System.setProperty("direct_bt.debug", args[++i]);
} else if( arg.equals("-dbt_verbose") && args.length > (i+1) ) {
System.setProperty("direct_bt.verbose", args[++i]);
} else if( arg.equals("-dbt_gatt") && args.length > (i+1) ) {
System.setProperty("direct_bt.gatt", args[++i]);
} else if( arg.equals("-dbt_hci") && args.length > (i+1) ) {
System.setProperty("direct_bt.hci", args[++i]);
} else if( arg.equals("-dbt_mgmt") && args.length > (i+1) ) {
System.setProperty("direct_bt.mgmt", args[++i]);
} else if( arg.equals("-default_dev_id") && args.length > (i+1) ) {
final int default_dev_id = Integer.valueOf(args[++i]).intValue();
if( 0 <= default_dev_id ) {
System.setProperty("org.tinyb.default_adapter", String.valueOf(default_dev_id));
System.err.println("Setting 'org.tinyb.default_adapter' to "+default_dev_id);
}
} else if( arg.equals("-btmode") && args.length > (i+1) ) {
final BTMode btmode = BTMode.get(args[++i]);
System.setProperty("org.tinyb.btmode", btmode.toString());
System.err.println("Setting 'org.tinyb.btmode' to "+btmode.toString());
}
}
// Drop BluetoothGattCharacteristic value cache and notification compatibility using direct_bt.
System.setProperty("direct_bt.tinyb.characteristic.compat", "false");
if( null == bluetoothManagerClazzName ) {
bluetoothManagerClazzName = BluetoothFactory.DirectBTImplementationID.BluetoothManagerClassName;
}
final ScannerTinyB10 test = new ScannerTinyB10(bluetoothManagerClazzName);
boolean waitForEnter=false;
{
for(int i=0; i< args.length; i++) {
final String arg = args[i];
if( arg.equals("-wait") ) {
waitForEnter = true;
} else if( arg.equals("-show_update_events") ) {
test.SHOW_UPDATE_EVENTS = true;
} else if( arg.equals("-dev_id") && args.length > (i+1) ) {
test.dev_id = Integer.valueOf(args[++i]).intValue();
} else if( arg.equals("-shutdown") && args.length > (i+1) ) {
test.shutdownTest = Integer.valueOf(args[++i]).intValue();
} else if( arg.equals("-mac") && args.length > (i+1) ) {
test.waitForDevices.add(args[++i]);
} else if( arg.equals("-wl") && args.length > (i+1) ) {
final String addr = args[++i];
println("Whitelist + "+addr);
test.whitelist.add(addr);
test.USE_WHITELIST = true;
} else if( arg.equals("-char") && args.length > (i+1) ) {
test.characteristicList.add(args[++i]);
} else if( arg.equals("-disconnect") ) {
test.KEEP_CONNECTED = false;
} else if( arg.equals("-keepDevice") ) {
test.REMOVE_DEVICE = false;
} else if( arg.equals("-count") && args.length > (i+1) ) {
test.MULTI_MEASUREMENTS = Integer.valueOf(args[++i]).intValue();
} else if( arg.equals("-single") ) {
test.MULTI_MEASUREMENTS = -1;
}
}
println("Run with '[-default_dev_id <adapter-index>] [-dev_id <adapter-index>] (-mac <device_address>)* "+
"[-disconnect] [-count <number>] [-single] (-wl <device_address>)* (-char <uuid>)* [-show_update_events] "+
"[-bluetoothManager <BluetoothManager-Implementation-Class-Name>] "+
"[-verbose] [-debug] "+
"[-dbt_verbose [true|false]] "+
"[-dbt_debug [true|false|hci.event,mgmt.event,adapter.event,gatt.data]] "+
"[-dbt_mgmt cmd.timeout=3000,ringsize=64,... "+
"[-dbt_hci cmd.complete.timeout=10000,cmd.status.timeout=3000,ringsize=64,... "+
"[-dbt_gatt cmd.read.timeout=500,cmd.write.timeout=500,cmd.init.timeout=2500,ringsize=128,... "+
"[-shutdown <int>]'");
}
println("BluetoothManager "+bluetoothManagerClazzName);
println("MULTI_MEASUREMENTS "+test.MULTI_MEASUREMENTS);
println("KEEP_CONNECTED "+test.KEEP_CONNECTED);
println("REMOVE_DEVICE "+test.REMOVE_DEVICE);
println("USE_WHITELIST "+test.USE_WHITELIST);
println("dev_id "+test.dev_id);
println("waitForDevice: "+Arrays.toString(test.waitForDevices.toArray()));
println("characteristicList: "+Arrays.toString(test.characteristicList.toArray()));
if( waitForEnter ) {
println("Press ENTER to continue\n");
try{ System.in.read();
} catch(final Exception e) { }
}
test.runTest();
}
static class BooleanNotification implements BluetoothNotification<Boolean> {
private final long t0;
private final String name;
private boolean v;
public BooleanNotification(final String name, final long t0) {
this.t0 = t0;
this.name = name;
this.v = false;
}
@Override
public void run(final Boolean v) {
synchronized(this) {
final long t1 = BluetoothUtils.getCurrentMilliseconds();
this.v = v.booleanValue();
System.out.println("###### "+name+": "+v+" in td "+(t1-t0)+" ms!");
this.notifyAll();
}
}
public boolean getValue() {
synchronized(this) {
return v;
}
}
}
}
|
<reponame>elliotsegler/altimeter<filename>altimeter/aws/resource/ec2/internet_gateway.py
"""Resource for Internet Gateways"""
from typing import Type
from botocore.client import BaseClient
from altimeter.aws.resource.resource_spec import ListFromAWSResult
from altimeter.aws.resource.ec2 import EC2ResourceSpec
from altimeter.aws.resource.ec2.vpc import VPCResourceSpec
from altimeter.core.graph.field.dict_field import EmbeddedDictField
from altimeter.core.graph.field.list_field import ListField
from altimeter.core.graph.field.resource_link_field import ResourceLinkField
from altimeter.core.graph.field.scalar_field import ScalarField
from altimeter.core.graph.field.tags_field import TagsField
from altimeter.core.graph.schema import Schema
class InternetGatewayResourceSpec(EC2ResourceSpec):
"""Resource for InternetGateways"""
type_name = "internet-gateway"
schema = Schema(
ScalarField("OwnerId"),
ListField(
"Attachments",
EmbeddedDictField(ScalarField("State"), ResourceLinkField("VpcId", VPCResourceSpec),),
optional=True,
alti_key="attachment",
),
TagsField(),
)
@classmethod
def list_from_aws(
cls: Type["InternetGatewayResourceSpec"], client: BaseClient, account_id: str, region: str
) -> ListFromAWSResult:
"""Return a dict of dicts of the format:
{'igw_1_arn': {igw_1_dict},
'igw_2_arn': {igw_2_dict},
...}
Where the dicts represent results from describe_internet_gateways."""
igws = {}
paginator = client.get_paginator("describe_internet_gateways")
for resp in paginator.paginate():
for igw in resp["InternetGateways"]:
resource_arn = cls.generate_arn(
resource_id=igw["InternetGatewayId"], account_id=account_id, region=region
)
igws[resource_arn] = igw
return ListFromAWSResult(resources=igws)
|
<reponame>NoftScarlet/mwc-gatsby-netlifycms<filename>src/components/PractitionerServiceCard.js<gh_stars>0
import React, {useEffect} from 'react';
import PropTypes from 'prop-types';
import {makeStyles, useTheme, withStyles} from '@material-ui/core/styles';
import Card from '@material-ui/core/Card';
import CardContent from '@material-ui/core/CardContent';
import IconButton from '@material-ui/core/IconButton';
import Typography from '@material-ui/core/Typography';
import SkipPreviousIcon from '@material-ui/icons/SkipPrevious';
import PlayArrowIcon from '@material-ui/icons/PlayArrow';
import SkipNextIcon from '@material-ui/icons/SkipNext';
import {Row, Col, Container, Button} from "reactstrap"
import geImage from "../img/geoff.jpg"
import SwipeableViews from 'react-swipeable-views';
import AppBar from '@material-ui/core/AppBar';
import Tabs from '@material-ui/core/Tabs';
import Tab from '@material-ui/core/Tab';
import Box from '@material-ui/core/Box';
function TabPanel(props) {
const { children, value, index, ...other } = props;
const [opacity, setOpacity] = React.useState(0);
useEffect(()=>{
if (value ===index) {
setOpacity(1);
}
else {
setOpacity(0);
}
//console.log('Hook - ComponentDidMount');
})
return (
<div
className={"practitioner-panel"}
style={{'-webkit-transition':'opacity 1.4s ease-out',opacity: `${opacity}`}}
role="tabpanel"
hidden={value !== index}
id={`full-width-tabpanel-${index}`}
aria-labelledby={`full-width-tab-${index}`}
{...other}
>
{value === index && (
<Box className="pt-3">
<Typography>{children}</Typography>
</Box>
)}
</div>
);
}
TabPanel.propTypes = {
children: PropTypes.node,
index: PropTypes.any.isRequired,
value: PropTypes.any.isRequired,
opacity: PropTypes.string
};
function a11yProps(index) {
return {
id: `full-width-tab-${index}`,
'aria-controls': `full-width-tabpanel-${index}`,
};
}
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
details: {
display: 'flex',
flexDirection: 'column',
},
content: {
flex: '1 0 auto',
},
cover: {
width: 151,
},
controls: {
display: 'flex',
alignItems: 'center',
paddingLeft: theme.spacing(1),
paddingBottom: theme.spacing(1),
},
playIcon: {
height: 38,
width: 38,
}
}));
export default function MediaControlCard(props) {
const classes = useStyles();
const theme = useTheme();
const [value, setValue] = React.useState(0);
const handleChange = (event, newValue) => {
setValue(newValue);
};
const handleChangeIndex = (index) => {
setValue(index);
};
return (
<>
<img src={"https://media.macphun.com/img/uploads/customer/how-to/579/15531840725c93b5489d84e9.43781620.jpg"}/>
<Button className="btn-warning border-radius-0">
Book Appointment
</Button><br/>
<Card className={` border-radius-0 practitioner-card`}>
<div>
<Typography component="h6" variant="h5">
{props.cardItems[0].name}
</Typography>
<Typography variant="subtitle1" color="textSecondary">
{props.cardItems[0].role}
</Typography>
<AppBar position="static" color="default">
<Tabs
value={value}
onChange={handleChange}
indicatorColor="primary"
textColor="primary"
variant="fullWidth"
aria-label="full width tabs example"
>
<Tab label="Service Info" {...a11yProps(0)} />
<Tab label="Experience" {...a11yProps(1)} />
<Tab label="Video Introduction" {...a11yProps(2)} />
</Tabs>
</AppBar>
<SwipeableViews
axis='x'
index={value}
onChangeIndex={handleChangeIndex}
>
<TabPanel value={value} index={0} dir={theme.direction}>
<div className={`${classes.details} `}>
<Container>
<Row>
<Col md="4" lg={"4"}>
<div style={{height:"1px",backgroundColor:"gray"}}> </div>
</Col>
<Col md={"8"} lg={"8"}>
<div className={classes.content}>
<strong>Language: </strong>{props.cardItems[0].language}<br/>
<strong>Location: </strong>{props.cardItems[0].location}<br/>
<strong>Service: </strong>{props.cardItems[0].serviceProvided}<br/>
</div>
</Col>
</Row>
</Container>
<div className={classes.controls}>
<IconButton aria-label="previous">
<SkipPreviousIcon/>
</IconButton>
<IconButton aria-label="play/pause">
<PlayArrowIcon className={classes.playIcon}/>
</IconButton>
<IconButton aria-label="next">
<SkipNextIcon/>
</IconButton>
</div>
</div>
</TabPanel>
<TabPanel value={value} index={1} dir={theme.direction}>
<div >
<strong>Education/Training: </strong><p>{props.cardItems[0].eduTraining}</p>
<strong>Specialization: </strong><p>{props.cardItems[0].specialization}</p>
<strong>Experience with Musicians: </strong><p>{props.cardItems[0].experienceWithMusicians}</p>
</div>
</TabPanel>
<TabPanel value={value} index={2} dir={theme.direction}>
<div>
Item Three
</div>
</TabPanel>
</SwipeableViews>
</div>
</Card>
</>
);
} |
class HeapBufferStorage {
private var _heapBufferBridgedPtr: UnsafeMutablePointer<Void>?
private var _nativeStorage: _ContiguousArrayStorageBase
internal var _heapBufferBridged: HeapBufferStorage? {
if let ref = _stdlib_atomicLoadARCRef(object: _heapBufferBridgedPtr) {
return unsafeBitCast(ref, to: HeapBufferStorage.self)
}
return nil
}
@_versioned
@nonobjc
internal init(_nativeStorage: _ContiguousArrayStorageBase) {
self._nativeStorage = _nativeStorage
}
} |
import styled from 'styled-components/native';
import { Image } from 'react-native'
export const Container = styled.View``;
export const Avatar = styled(Image)`
width: 54px;
height: 54px;
border-radius: 4px;
border-width: 4px;
border-color: #fff;
`;
export const Info = styled.View`
width: 260px;
`;
export const Name = styled.Text`
font-weight: bold;
font-size: 16px;
`;
export const Bio = styled.Text`
color: #666;
margin-top:5px;
`;
export const Techs = styled.Text`
margin-top: 5px;
`; |
#!/usr/bin/env bash
GREEN='\033[0;32m'
NC='\033[0m'
if [ -z "$RTE_SDK" ]; then
echo "Please follow onvm install instructions to export \$RTE_SDK"
exit 1
fi
if [ -z "$RTE_TARGET" ]; then
echo "Please follow onvm install instructions to export \$RTE_TARGET"
exit 1
fi
# Get to script directory
cd $(dirname ${BASH_SOURCE[0]})/
printf "${GREEN}Checking ldflags.txt...\n$NC"
if [ ! -f $RTE_SDK/$RTE_TARGET/lib/ldflags.txt ]; then
echo "File $RTE_SDK/$RTE_TARGET/lib/ldflags.txt does not exist, please reinstall dpdk."
sed -i -e 's/O_TO_EXE_STR =/\$(shell if [ \! -d \${RTE_SDK}\/\${RTE_TARGET}\/lib ]\; then mkdir \${RTE_SDK}\/\${RTE_TARGET}\/lib\; fi)\nLINKER_FLAGS = \$(call linkerprefix,\$(LDLIBS))\n\$(shell echo \${LINKER_FLAGS} \> \${RTE_SDK}\/\${RTE_TARGET}\/lib\/ldflags\.txt)\nO_TO_EXE_STR =/g' $RTE_SDK/mk/rte.app.mk
exit 1
fi
printf "${GREEN}RTE_SDK$NC env variable is set to $RTE_SDK\n"
printf "${GREEN}RTE_TARGET$NC env variable is set to $RTE_TARGET\n"
|
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.debug.gui.copying;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Range;
import docking.action.DockingActionIf;
import generic.Unique;
import ghidra.app.plugin.core.debug.gui.AbstractGhidraHeadedDebuggerGUITest;
import ghidra.app.plugin.core.debug.gui.action.AutoReadMemorySpec;
import ghidra.app.plugin.core.debug.gui.action.NoneAutoReadMemorySpec;
import ghidra.app.plugin.core.debug.gui.copying.DebuggerCopyIntoProgramDialog.RangeEntry;
import ghidra.app.plugin.core.debug.gui.listing.DebuggerListingPlugin;
import ghidra.app.plugin.core.debug.gui.listing.DebuggerListingProvider;
import ghidra.app.plugin.core.debug.service.modules.DebuggerStaticMappingServicePlugin;
import ghidra.app.services.ActionSource;
import ghidra.app.services.DebuggerStaticMappingService;
import ghidra.dbg.DebuggerModelListener;
import ghidra.dbg.target.TargetObject;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.Program;
import ghidra.program.model.mem.MemoryBlock;
import ghidra.program.util.ProgramLocation;
import ghidra.test.ToyProgramBuilder;
import ghidra.trace.database.memory.DBTraceMemoryManager;
import ghidra.trace.model.DefaultTraceLocation;
import ghidra.trace.model.TraceLocation;
import ghidra.trace.model.memory.TraceMemoryFlag;
import ghidra.util.database.UndoableTransaction;
public class DebuggerCopyActionsPluginTest extends AbstractGhidraHeadedDebuggerGUITest {
DebuggerCopyActionsPlugin copyActionsPlugin;
DebuggerListingPlugin listingPlugin;
DebuggerStaticMappingService mappingService;
DebuggerListingProvider listingProvider;
@Before
public void setupCopyActionsPluginTest() throws Exception {
mappingService = addPlugin(tool, DebuggerStaticMappingServicePlugin.class);
copyActionsPlugin = addPlugin(tool, DebuggerCopyActionsPlugin.class);
listingPlugin = addPlugin(tool, DebuggerListingPlugin.class);
listingProvider = waitForComponentProvider(DebuggerListingProvider.class);
}
protected void assertDisabled(DockingActionIf action) {
assertDisabled(listingProvider, action);
}
protected void performEnabledAction(DockingActionIf action) {
performEnabledAction(listingProvider, action, false);
}
protected void select(Address min, Address max) {
select(listingProvider, min, max);
}
protected void select(AddressSetView set) {
select(listingProvider, set);
}
@Test
public void testActionCopyIntoCurrentProgramWithoutRelocationCreateBlocks() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
createProgram();
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
programManager.openProgram(program);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
createAndOpenTrace();
try (UndoableTransaction tid = tb.startTransaction()) {
tb.trace.getMemoryManager()
.createRegion(".text", 0, tb.range(0x00400000, 0x0040ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
}
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
select(tb.addr(0x00400000), tb.addr(0x0040ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoCurrentProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
dialog.setRelocate(false);
dialog.reset();
RangeEntry entry = Unique.assertOne(dialog.tableModel.getModelData());
assertEquals(tb.range(stSpace, 0x00400000, 0x0040ffff), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0x00400000, 0x0040ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text", entry.getBlockName());
assertTrue(entry.isCreate());
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
MemoryBlock text = Unique.assertOne(Arrays.asList(program.getMemory().getBlocks()));
assertEquals(".text", text.getName());
}
@Test
public void testActionCopyIntoCurrentProgramWithoutRelocationCrossLanguage() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
createProgram(getSLEIGH_X86_LANGUAGE());
createAndOpenTrace(ToyProgramBuilder._X64);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
try (UndoableTransaction tid = UndoableTransaction.start(program, "Add blocks", true)) {
program.getMemory()
.createInitializedBlock(".text", tb.addr(stSpace, 0x00400000), 0x8000,
(byte) 0, monitor, false);
program.getMemory()
.createInitializedBlock(".text2", tb.addr(stSpace, 0x00408000), 0x8000,
(byte) 0, monitor, false);
}
try (UndoableTransaction tid = tb.startTransaction()) {
DBTraceMemoryManager mm = tb.trace.getMemoryManager();
mm.createRegion(".text", 0, tb.range(0x00400000, 0x0040ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
mm.putBytes(0, tb.addr(0x00401234), tb.buf(1, 2, 3, 4));
// This region should be excluded, since it cannot be mapped identically into 32-bits
mm.createRegion("lib:.text", 0, tb.range(0x7fff00400000L, 0x7fff0040ffffL),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
// This region should be partially excluded, because 32-bits
// This is not likely to ever happen in practice, but be prepared
mm.createRegion(".straddle", 0, tb.range(0xfffff000L, 0x100000fffL),
TraceMemoryFlag.READ, TraceMemoryFlag.WRITE);
}
programManager.openProgram(program);
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
select(tb.set(
tb.range(0x00400000, 0x0040ffff),
tb.range(0x7fff00400000L, 0x7fff0040ffffL),
tb.range(0xfffff000L, 0x100000fffL)));
performEnabledAction(copyActionsPlugin.actionCopyIntoCurrentProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
dialog.setRelocate(false);
dialog.reset();
List<RangeEntry> entries = List.copyOf(dialog.tableModel.getModelData());
assertEquals(3, entries.size());
RangeEntry entry;
entry = entries.get(0);
assertEquals(tb.range(0x00400000, 0x00407fff), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0x00400000, 0x00407fff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text *", entry.getBlockName());
assertFalse(entry.isCreate());
entry = entries.get(1);
assertEquals(tb.range(0x00408000, 0x0040ffff), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0x00408000, 0x0040ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text2 *", entry.getBlockName());
assertFalse(entry.isCreate());
entry = entries.get(2);
assertEquals(tb.range(0xfffff000L, 0xffffffffL), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0xfffff000L, 0xffffffffL), entry.getDstRange());
assertEquals(".straddle", entry.getRegionName());
assertEquals(".straddle", entry.getBlockName());
assertTrue(entry.isCreate());
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
byte[] dest = new byte[4];
program.getMemory().getBytes(tb.addr(stSpace, 0x00401234), dest);
assertArrayEquals(tb.arr(1, 2, 3, 4), dest);
}
@Test
public void testActionCopyIntoCurrentProgramWithRelocationExistingBlocks() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
createAndOpenTrace();
createProgramFromTrace();
intoProject(program);
intoProject(tb.trace);
try (UndoableTransaction tid = tb.startTransaction()) {
tb.trace.getMemoryManager()
.createRegion(".text", 0, tb.range(0x55550000, 0x5555ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
}
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
programManager.openProgram(program);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
MemoryBlock block;
try (UndoableTransaction tid = UndoableTransaction.start(program, "Create block", true)) {
block = program.getMemory()
.createUninitializedBlock(".text", tb.addr(stSpace, 0x00400000), 0x10000,
false);
}
TraceLocation tloc =
new DefaultTraceLocation(tb.trace, null, Range.atLeast(0L), tb.addr(0x55550000));
ProgramLocation ploc = new ProgramLocation(program, tb.addr(stSpace, 0x00400000));
try (UndoableTransaction tid = tb.startTransaction()) {
mappingService.addMapping(tloc, ploc, 0x10000, true);
}
waitForValue(() -> mappingService
.getOpenMappedViews(tb.trace, tb.set(tb.range(0x55550000, 0x5555ffff)), 0)
.get(program));
select(tb.addr(0x55550000), tb.addr(0x5555ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoCurrentProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
dialog.setRelocate(true);
dialog.reset();
RangeEntry entry = Unique.assertOne(dialog.tableModel.getModelData());
assertEquals(tb.range(stSpace, 0x55550000, 0x5555ffff), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0x00400000, 0x0040ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text *", entry.getBlockName());
assertFalse(entry.isCreate());
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
MemoryBlock text = Unique.assertOne(Arrays.asList(program.getMemory().getBlocks()));
assertEquals(block, text);
}
@Test
public void testActionCopyIntoCurrentProgramWithRelocationOverlayBlocks() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
createAndOpenTrace();
createProgramFromTrace();
intoProject(program);
intoProject(tb.trace);
try (UndoableTransaction tid = tb.startTransaction()) {
tb.trace.getMemoryManager()
.createRegion(".text", 0, tb.range(0x55550000, 0x5555ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
}
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
programManager.openProgram(program);
assertDisabled(copyActionsPlugin.actionCopyIntoCurrentProgram);
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
MemoryBlock block;
try (UndoableTransaction tid = UndoableTransaction.start(program, "Create block", true)) {
block = program.getMemory()
.createUninitializedBlock(".text", tb.addr(stSpace, 0x00400000), 0x10000,
false);
}
TraceLocation tloc =
new DefaultTraceLocation(tb.trace, null, Range.atLeast(0L), tb.addr(0x55550000));
ProgramLocation ploc = new ProgramLocation(program, tb.addr(stSpace, 0x00400000));
try (UndoableTransaction tid = tb.startTransaction()) {
mappingService.addMapping(tloc, ploc, 0x10000, true);
}
waitForValue(() -> mappingService
.getOpenMappedViews(tb.trace, tb.set(tb.range(0x55550000, 0x5555ffff)), 0)
.get(program));
select(tb.addr(0x55550000), tb.addr(0x5555ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoCurrentProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
dialog.setRelocate(true);
dialog.setUseOverlays(true);
dialog.reset();
RangeEntry entry = Unique.assertOne(dialog.tableModel.getModelData());
assertEquals(tb.range(stSpace, 0x55550000, 0x5555ffff), entry.getSrcRange());
assertEquals(tb.range(stSpace, 0x00400000, 0x0040ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text_2", entry.getBlockName());
assertTrue(entry.isCreate());
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
MemoryBlock text2 =
Unique.assertOne(Arrays.asList(program.getMemory().getBlock(".text_2")));
assertNotEquals(block, text2);
assertTrue(text2.isOverlay());
}
@Test
public void testActionCopyIntoNewProgram() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
createAndOpenTrace();
try (UndoableTransaction tid = tb.startTransaction()) {
tb.trace.getMemoryManager()
.createRegion(".text", 0, tb.range(0x55550000, 0x5555ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
}
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
select(tb.addr(0x55550000), tb.addr(0x5555ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoNewProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
dialog.setDestination(DebuggerCopyIntoProgramDialog.TEMP_PROGRAM);
RangeEntry entry = Unique.assertOne(dialog.tableModel.getModelData());
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getSrcRange());
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text", entry.getBlockName());
assertTrue(entry.isCreate());
entry.setBlockName(".my_text");
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
// Declare my own, or the @After will try to release it erroneously
Program program = waitForValue(() -> programManager.getCurrentProgram());
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
MemoryBlock text = Unique.assertOne(Arrays.asList(program.getMemory().getBlocks()));
assertEquals(tb.addr(stSpace, 0x55550000), text.getStart());
assertEquals(".my_text", text.getName());
}
@Test
public void testActionCopyIntoNewProgramAdjacentRegions() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
createAndOpenTrace();
try (UndoableTransaction tid = tb.startTransaction()) {
tb.trace.getMemoryManager()
.createRegion(".text", 0, tb.range(0x55550000, 0x5555ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.EXECUTE);
tb.trace.getMemoryManager()
.createRegion(".data", 0, tb.range(0x55560000, 0x5556ffff),
TraceMemoryFlag.READ, TraceMemoryFlag.WRITE);
}
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
select(tb.addr(0x55550000), tb.addr(0x5556ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoNewProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
assertFalse(dialog.cbCapture.isEnabled());
assertFalse(dialog.cbCapture.isSelected());
dialog.setDestination(DebuggerCopyIntoProgramDialog.TEMP_PROGRAM);
assertEquals(2, dialog.tableModel.getRowCount());
RangeEntry entry;
entry = dialog.tableModel.getRowObject(0);
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getSrcRange());
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getDstRange());
assertEquals(".text", entry.getRegionName());
assertEquals(".text", entry.getBlockName());
assertTrue(entry.isCreate());
entry = dialog.tableModel.getRowObject(1);
assertEquals(tb.range(0x55560000, 0x5556ffff), entry.getSrcRange());
assertEquals(tb.range(0x55560000, 0x5556ffff), entry.getDstRange());
assertEquals(".data", entry.getRegionName());
assertEquals(".data", entry.getBlockName());
assertTrue(entry.isCreate());
dialog.okCallback();
dialog.lastTask.get(1000, TimeUnit.MILLISECONDS);
waitForSwing();
// Declare my own, or the @After will try to release it erroneously
Program program = waitForValue(() -> programManager.getCurrentProgram());
assertEquals(2, program.getMemory().getBlocks().length);
}
@Test
public void testActionCopyIntoNewProgramCaptureLive() throws Exception {
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
createTestModel();
var listener = new DebuggerModelListener() {
int count = 0;
@Override
public void memoryUpdated(TargetObject memory, Address address, byte[] data) {
count++;
}
};
mb.testModel.addModelListener(listener);
mb.createTestProcessesAndThreads();
modelService.recordTarget(mb.testProcess1, createTargetTraceMapper(mb.testProcess1),
ActionSource.AUTOMATIC);
mb.testProcess1.memory.addRegion(".text", mb.rng(0x55550000, 0x5555ffff), "rx");
mb.testProcess1.memory.setMemory(mb.addr(0x55550000), mb.arr(1, 2, 3, 4, 5, 6, 7, 8));
waitForPass(() -> {
assertEquals(1, tb.trace.getMemoryManager().getAllRegions().size());
});
listingProvider.setAutoReadMemorySpec(
AutoReadMemorySpec.fromConfigName(NoneAutoReadMemorySpec.CONFIG_NAME));
traceManager.openTrace(tb.trace);
traceManager.activateTrace(tb.trace);
assertDisabled(copyActionsPlugin.actionCopyIntoNewProgram);
select(tb.addr(0x55550000), tb.addr(0x5555ffff));
performEnabledAction(copyActionsPlugin.actionCopyIntoNewProgram);
DebuggerCopyIntoProgramDialog dialog =
waitForDialogComponent(DebuggerCopyIntoProgramDialog.class);
assertTrue(dialog.cbCapture.isEnabled());
assertTrue(dialog.cbCapture.isSelected());
dialog.setDestination(DebuggerCopyIntoProgramDialog.TEMP_PROGRAM);
RangeEntry entry = Unique.assertOne(dialog.tableModel.getModelData());
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getSrcRange());
assertEquals(tb.range(0x55550000, 0x5555ffff), entry.getDstRange());
assertEquals("[.text]", entry.getRegionName());
assertEquals("[.text]", entry.getBlockName());
assertTrue(entry.isCreate());
entry.setBlockName(".my_text");
assertEquals(0, listener.count);
dialog.okCallback();
dialog.lastTask.get(10000, TimeUnit.MILLISECONDS);
waitForSwing();
assertEquals(16, listener.count);
// Declare my own, or the @After will try to release it erroneously
Program program = waitForValue(() -> programManager.getCurrentProgram());
AddressSpace stSpace = program.getAddressFactory().getDefaultAddressSpace();
MemoryBlock text = Unique.assertOne(Arrays.asList(program.getMemory().getBlocks()));
assertEquals(tb.addr(stSpace, 0x55550000), text.getStart());
assertEquals(".my_text", text.getName());
byte[] arr = new byte[8];
text.getBytes(tb.addr(stSpace, 0x55550000), arr);
assertArrayEquals(tb.arr(1, 2, 3, 4, 5, 6, 7, 8), arr);
}
}
|
<reponame>TheLMiffy1111/RSLargePatterns
package thelm.rslargepatterns.network;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.fml.common.network.IGuiHandler;
import thelm.rslargepatterns.RSLargePatterns;
import thelm.rslargepatterns.client.gui.IGuiProvider;
public class GuiHandler implements IGuiHandler {
public static final GuiHandler INSTANCE = new GuiHandler();
protected GuiHandler() {
}
@Override
public Object getClientGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) {
EnumFacing side = null;
if(ID <= 5) {
side = EnumFacing.byIndex(ID);
}
BlockPos pos = new BlockPos(x, y, z);
TileEntity tile = world.getTileEntity(pos);
if(tile == null) {
return null;
}
else if(tile instanceof IGuiProvider) {
return ((IGuiProvider)tile).getClientGuiElement(player);
}
return null;
}
@Override
public Object getServerGuiElement(int ID, EntityPlayer player, World world, int x, int y, int z) {
EnumFacing side = null;
if(ID <= 5) {
side = EnumFacing.byIndex(ID);
}
BlockPos pos = new BlockPos(x, y, z);
TileEntity tile = world.getTileEntity(pos);
if(tile == null) {
return null;
}
else if(tile instanceof IGuiProvider) {
return ((IGuiProvider)tile).getServerGuiElement(player);
}
return null;
}
public void launchGui(int ID, EntityPlayer player, World world, int x, int y, int z) {
player.openGui(RSLargePatterns.instance, ID, world, x, y, z);
}
}
|
def removeEmptyElements(obj):
return {k:v for (k, v) in obj.items() if v != ""} |
<reponame>Marmelatze/docker-controller<filename>docker-controller/src/main/java/de/schub/docker_controller/Metadata/Storage/MetadataStorage.java
package de.schub.docker_controller.Metadata.Storage;
import de.schub.docker_controller.Metadata.ContainerMetadata;
import java.util.List;
public interface MetadataStorage
{
/**
* get metadata from storage by container id
* @param containerId
* @return
*/
ContainerMetadata get(String containerId);
/**
* get metadata for all containers
* @return
*/
List<ContainerMetadata> getAll();
/**
* persist metadata
* @param metadata
*/
void add(ContainerMetadata metadata);
/**
* override all metadata saved for this node
* @param metadatas
*/
void set(List<ContainerMetadata> metadatas);
/**
* delete metadata for a single container
* @param metadata
*/
void delete(ContainerMetadata metadata);
}
|
import json
import os
import random
import shutil
from collections import defaultdict
from tqdm import tqdm
import settings
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-s',
'--seed',
help='seed for random split',
type=int,
default=7)
parser.add_argument('-p',
'--path',
help='path to the record to be split',
type=str,
default=os.path.join(settings.DATA_DIR, 'raw', 'train'))
parser.add_argument('-f',
'--factor',
help='factor for random split',
type=float,
default=0.1)
args = parser.parse_args()
p_dict = make_image_dict(os.path.join(args.path, 'annos'))
split_dataset(args.path, args.factor, args.seed, p_dict)
def make_image_dict(annotations_dir: str) -> dict:
image_dict = defaultdict(list)
for filename in tqdm(os.listdir(annotations_dir)):
with open(os.path.join(annotations_dir, filename)) as json_file:
data = json.load(json_file)
image_dict[data['pair_id']].append(filename)
return image_dict
def split_dataset(data_dir: str, factor: float, seed: int, pairs_dict: dict):
test_dir = os.path.join(os.path.dirname(data_dir), 'test')
image_dir = os.path.join(test_dir, 'image')
annotations_dir = os.path.join(test_dir, 'annos')
os.makedirs(test_dir, exist_ok=True)
os.makedirs(image_dir, exist_ok=True), os.makedirs(annotations_dir, exist_ok=True)
pair_ids = list(pairs_dict.keys())
random.seed(seed)
random.shuffle(pair_ids)
selected = []
for pair_id in pair_ids[:int(len(pair_ids) * factor)]:
selected.extend(pairs_dict[pair_id])
selected_annotations = [os.path.join(data_dir, 'annos', i) for i in selected]
selected_images = [os.path.join(data_dir, 'image', i.split(".")[0] + '.jpg') for i in selected]
for img, annotation in zip(selected_images, selected_annotations):
shutil.move(img, os.path.join(image_dir, os.path.split(img)[-1]))
shutil.move(annotation, os.path.join(annotations_dir, os.path.split(annotation)[-1]))
if __name__ == '__main__':
main()
|
#!/bin/bash
function isValidSymlink() {
if [ -L "$1" ]; then
return 0
else
return 1
fi
}
# if conf is not a symlink, create backup.
NGINX_CONF=/etc/nginx/nginx.conf
if ! isValidSymlink $NGINX_CONF; then
if [ -f "$NGINX_CONF" ]; then
TSTAMP=`date '+%s'`
NEW_CONF="$NGINX_CONF.$TSTAMP"
echo "Existing nginx.conf detected; moving to $NEW_CONF..."
mv $NGINX_CONF $NEW_CONF
fi
echo "Linking our nginx.conf config..."
ln -s /etc/wlanpi-webui/nginx/nginx.conf $NGINX_CONF
fi
# if default site is enabled, disable it.
DEFAULT_FILE=/etc/nginx/sites-enabled/default
if isValidSymlink $DEFAULT_FILE; then
echo "Unlinking $DEFAULT_FILE"
unlink $DEFAULT_FILE
fi
WLANPI_SPEEDTEST=/etc/nginx/sites-enabled/wlanpi_speedtest.conf
if ! isValidSymlink $WLANPI_SPEEDTEST; then
echo "Linking wlanpi_speedtest.conf..."
ln -s /etc/wlanpi-webui/nginx/sites-enabled/wlanpi_speedtest.conf $WLANPI_SPEEDTEST
fi
WLANPI_WEBUI=/etc/nginx/sites-enabled/wlanpi_webui.conf
if ! isValidSymlink $WLANPI_WEBUI; then
echo "Linking wlanpi_webui.conf..."
ln -s /etc/wlanpi-webui/nginx/sites-enabled/wlanpi_webui.conf $WLANPI_WEBUI
fi
nginx -t
systemctl restart nginx.service |
/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {elementByTag, waitForBodyPromise} from '../../../../src/dom';
import {isExperimentOn} from '../../../../src/experiments';
import {dev} from '../../../../src/log';
const ELIGIBLE_TAGS = [
'amp-img',
'amp-anim',
'amp-ad',
'amp-dailymotion',
'amp-jwplayer',
'amp-kaltura-player',
'amp-o2-player',
'amp-pinterest',
'amp-reach-player',
'amp-vimeo',
'amp-vine',
'amp-youtube',
'amp-video',
'amp-twitter',
'amp-facebook',
'amp-instagram',
];
const ELIGIBLE_TAP_TAGS = {
'amp-img': true,
'amp-anim': true,
};
const DEFAULT_VIEWER_ID = 'amp-lightbox-viewer';
const VIEWER_TAG = 'amp-lightbox-viewer';
/**
* Finds elements in the document that meet our heuristics for automatically
* becoming lightboxable and adds `lightbox` attribute to them.
* It may also install a tap handler on elements that meet our heuristics
* to automatically open in lightbox on tap.
* @param {!../../../../src/service/ampdoc-impl.AmpDoc} ampdoc
* @return {!Promise}
*/
export function autoDiscoverLightboxables(ampdoc) {
// Extra safety check, manager should not call this if experiments are off
dev().assert(isExperimentOn(ampdoc.win, 'amp-lightbox-viewer'));
dev().assert(isExperimentOn(ampdoc.win, 'amp-lightbox-viewer-auto'));
return maybeInstallLightboxViewer(ampdoc).then(viewerId => {
const tagsQuery = ELIGIBLE_TAGS.join(',');
const matches = ampdoc.getRootNode().querySelectorAll(tagsQuery);
for (let i = 0; i < matches.length; i++) {
const element = matches[i];
if (element.hasAttribute('lightbox') || !meetsHeuristics(element)) {
continue;
}
element.setAttribute('lightbox', '');
// TODO(aghassemi): This is best to do via default action. E.g. we can add
// a tap listener via Action service and invoke lightbox if conditions are
// met.
if (meetsHeuristicsForTap(element)) {
element.setAttribute('on', 'tap:' + viewerId + '.activate');
}
}
});
}
/**
* Decides whether an element meets the heuristics to become lightboxable.
* @param {!Element} element
* @return {!boolean}
*/
function meetsHeuristics(element) {
dev().assert(element);
// TODO(aghassemi): This will become complicated soon, create a pluggable
// system for this.
if (element.getLayoutBox) {
const layoutBox = element.getLayoutBox();
if (layoutBox.left < 0 ||
layoutBox.width < 50 ||
layoutBox.height < 50
) {
return false;
}
}
return true;
}
/**
* Decides whether an already lightboxable element should automatically get
* a tap handler to open in the lightbox.
* @param {!Element} element
* @return {!boolean}
*/
function meetsHeuristicsForTap(element) {
dev().assert(element);
dev().assert(element.hasAttribute('lightbox'));
if (!ELIGIBLE_TAP_TAGS[element.tagName.toLowerCase()]) {
return false;
}
if (element.hasAttribute('on')) {
return false;
}
return true;
}
/**
* Tries to find an existing amp-lightbox-viewer, if there is none, it adds a
* default one.
* @param {!Element} elem
* @return {!string} Returns the id of the amp-lightbox-viewer.
*/
function maybeInstallLightboxViewer(ampdoc) {
// TODO(aghassemi): Use the upcoming ampdoc.waitForBody
return waitForBodyPromise(ampdoc.getRootNode()).then(() => {
const existingViewer = elementByTag(ampdoc.getRootNode(), VIEWER_TAG);
if (existingViewer) {
if (!existingViewer.id) {
existingViewer.id = DEFAULT_VIEWER_ID;
}
return existingViewer.id;
}
const viewer = ampdoc.getRootNode().createElement(VIEWER_TAG);
viewer.setAttribute('layout', 'nodisplay');
viewer.setAttribute('id', DEFAULT_VIEWER_ID);
ampdoc.getRootNode().body.appendChild(viewer);
return viewer.id;
});
}
|
class Queue {
constructor() {
this.data = [];
}
/**
* @param {*} element
* @returns {Queue}
*/
enqueue(element) {
return this.data.push(element);
}
/**
* @returns {(?Queue)}
*/
dequeue() {
if (this.isEmpty()) {
console.log('Queue has no elements')
return null;
}
return this.data.shift();
}
/**
* @returns {(?*)}
*/
peek() {
if (this.isEmpty()) {
console.log('Queue has no elements');
return null;
}
return this.data[0];
}
/**
* @returns {(?*)}
*/
poll() {
if (this.isEmpty()) {
console.log('Queue has no elements');
return null;
}
return this.dequeue();
}
/**
* @returns {boolean}
*/
isEmpty() {
if (this.data.length !== 0) {
return false;
}
return true;
}
/**
* @returns {number} length of the queue
*/
size() {
return this.data.length;
}
/**
* @returns {Queue} returns a clone of the queue
*/
clone() {
let newQueue = new Queue();
this.data.forEach((element) => {
newQueue.data.push(element);
});
return newQueue;
}
print() {
let log = '';
this.data.forEach((element) => {
// process.stdout.write(`${element} `); ~nodejs exclusive
log = `${log} ${element}`;
});
return console.log(log);
}
}
module.exports = Queue; |
#!/bin/bash
dieharder -d 101 -g 25 -S 3009296997
|
<reponame>sergiomarchio/SolvdTACourseProjects<filename>CarFactory/src/main/java/com/solvd/carfactory/dao/IDepartmentDAO.java
package com.solvd.carfactory.dao;
import com.solvd.carfactory.models.employee.Department;
public interface IDepartmentDAO extends IBaseDAO<Department> {
}
|
def check_monotonic(list):
is_monotonic = True
for i in range(len(list)-1):
if(list[i] > list[i+1]):
is_monotonic = False
return is_monotonic
list = [1, 3, 4, 5, 9, 10]
print(check_monotonic(list)) # Output: True |
<reponame>tedzhou-okta/okta-idx-js<filename>babel.config.js
const SDK_VERSION = require('./package.json').version;
module.exports = {
presets: [[ '@babel/env', {
// targets or browserlist recommended per babel-preset-env docs
targets: { // Last update: 2021-03-09
edge: '17',
firefox: '70',
chrome: '77',
safari: '13',
ie: '11',
},
useBuiltIns: 'usage',
corejs: '3.8', // Minor version recommended per babel-preset-env docs
}]],
plugins: [
['@babel/plugin-transform-runtime', {
corejs: 3,
proposals: true, // required for proposal polyfills
}],
['inline-replace-variables', {
'SDK_VERSION': SDK_VERSION
}]
],
};
|
#!/usr/bin/env bash
##########################################################################
# This is the Fake bootstrapper script for Linux and OS X.
##########################################################################
# Define directories.
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
TOOLS_DIR=$SCRIPT_DIR/tools
NUGET_EXE=$TOOLS_DIR/nuget.exe
NUGET_URL=https://dist.nuget.org/win-x86-commandline/v4.0.0/nuget.exe
FAKE_VERSION=4.61.2
FAKE_EXE=$TOOLS_DIR/FAKE/tools/FAKE.exe
DOTNET_VERSION=1.0.4
DOTNET_INSTALLER_URL=https://raw.githubusercontent.com/dotnet/cli/rel/1.0.0/scripts/obtain/dotnet-install.sh
# Define default arguments.
TARGET="Default"
CONFIGURATION="Release"
VERBOSITY="verbose"
DRYRUN=
SCRIPT_ARGUMENTS=()
# Parse arguments.
for i in "$@"; do
case $1 in
-t|--target) TARGET="$2"; shift ;;
-c|--configuration) CONFIGURATION="$2"; shift ;;
-v|--verbosity) VERBOSITY="$2"; shift ;;
-d|--dryrun) DRYRUN="-dryrun" ;;
--) shift; SCRIPT_ARGUMENTS+=("$@"); break ;;
*) SCRIPT_ARGUMENTS+=("$1") ;;
esac
shift
done
# Make sure the tools folder exist.
if [ ! -d "$TOOLS_DIR" ]; then
mkdir "$TOOLS_DIR"
fi
###########################################################################
# INSTALL .NET CORE CLI
###########################################################################
echo "Installing .NET CLI..."
if [ ! -d "$SCRIPT_DIR/.dotnet" ]; then
mkdir "$SCRIPT_DIR/.dotnet"
fi
curl -Lsfo "$SCRIPT_DIR/.dotnet/dotnet-install.sh" $DOTNET_INSTALLER_URL
bash "$SCRIPT_DIR/.dotnet/dotnet-install.sh" --version $DOTNET_VERSION --install-dir .dotnet --no-path
export PATH="$SCRIPT_DIR/.dotnet":$PATH
export DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1
export DOTNET_CLI_TELEMETRY_OPTOUT=1
chmod -R 0755 ".dotnet"
"$SCRIPT_DIR/.dotnet/dotnet" --info
###########################################################################
# INSTALL NUGET
###########################################################################
# Download NuGet if it does not exist.
if [ ! -f "$NUGET_EXE" ]; then
echo "Downloading NuGet..."
curl -Lsfo "$NUGET_EXE" $NUGET_URL
if [ $? -ne 0 ]; then
echo "An error occured while downloading nuget.exe."
exit 1
fi
fi
###########################################################################
# INSTALL FAKE
###########################################################################
if [ ! -f "$FAKE_EXE" ]; then
mono "$NUGET_EXE" install Fake -ExcludeVersion -Version $FAKE_VERSION -OutputDirectory "$TOOLS_DIR"
if [ $? -ne 0 ]; then
echo "An error occured while installing Cake."
exit 1
fi
fi
# Make sure that Fake has been installed.
if [ ! -f "$FAKE_EXE" ]; then
echo "Could not find Fake.exe at '$FAKE_EXE'."
exit 1
fi
###########################################################################
# WORKAROUND FOR MONO
###########################################################################
export FrameworkPathOverride=/usr/lib/mono/4.5/
###########################################################################
# RUN BUILD SCRIPT
###########################################################################
# Start Fake
exec mono "$FAKE_EXE" build.fsx "${SCRIPT_ARGUMENTS[@]}" --verbosity=$VERBOSITY --configuration=$CONFIGURATION --target=$TARGET $DRYRUN
|
/*
* Copyright (c) 2019, Livio, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the
* distribution.
*
* Neither the name of the Livio Inc. nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
import { RpcStruct } from '../RpcStruct.js';
import { ImageResolution } from './ImageResolution.js';
import { FileType } from '../enums/FileType.js';
import { ImageFieldName } from '../enums/ImageFieldName.js';
class ImageField extends RpcStruct {
constructor (parameters) {
super(parameters);
}
/**
* @param {ImageFieldName} imageFieldName
* @return {ImageField}
*/
setImageFieldName (imageFieldName) {
this.validateType(ImageFieldName, imageFieldName);
this.setParameter(ImageField.KEY_NAME, imageFieldName);
return this;
}
/**
* @return {ImageFieldName}
*/
getImageFieldName () {
return this.getObject(ImageFieldName, ImageField.KEY_NAME);
}
/**
* @param {FileType[]} imageTypeSupported
* @return {ImageField}
*/
setImageTypeSupported (imageTypeSupported) {
this.validateType(FileType, imageTypeSupported, true);
this.setParameter(ImageField.KEY_IMAGE_TYPE_SUPPORTED, imageTypeSupported);
return this;
}
/**
* @return {FileType}
*/
getImageTypeSupported () {
return this.getObject(FileType, ImageField.KEY_IMAGE_TYPE_SUPPORTED);
}
/**
* @param {ImageResolution} imageResolution
* @return {ImageField}
*/
setImageResolution (imageResolution) {
this.validateType(ImageResolution, imageResolution);
this.setParameter(ImageField.KEY_IMAGE_RESOLUTION, imageResolution);
return this;
}
/**
* @return {ImageResolution}
*/
getImageResolution () {
return this.getObject(ImageResolution, ImageField.KEY_IMAGE_RESOLUTION);
}
}
ImageField.KEY_NAME = 'name';
ImageField.KEY_IMAGE_TYPE_SUPPORTED = 'imageTypeSupported';
ImageField.KEY_IMAGE_RESOLUTION = 'imageResolution';
export { ImageField };
|
#!/usr/bin/env bash
#
# 1) do a "$source activate.sh" to activate this project's Python virtual environment
# 2) do a "$zappa init" before executing these steps. That will create a zappa_settings.json.
# 3) edit the zappa_settings.json to add the "domain" and "certificate_arn" items.
# The certificate ARN can be found at https://console.aws.amazon.com/acm (navigate to the certificate for this
# app's domain).
#
zappa deploy dev
zappa certify --yes
zappa update dev
|
<gh_stars>0
'use strict';
exports.__esModule = true;
var _assign = require('babel-runtime/core-js/object/assign');
var _assign2 = _interopRequireDefault(_assign);
exports.default = function (sfc) {
sfc.components = (0, _assign2.default)(sfc.components || {}, {
Icon: _icon2.default,
Loading: _loading2.default,
Cell: _cell2.default,
CellGroup: _cellGroup2.default
});
return (0, _createBasic2.default)(sfc);
};
var _createBasic = require('./create-basic');
var _createBasic2 = _interopRequireDefault(_createBasic);
var _icon = require('../icon');
var _icon2 = _interopRequireDefault(_icon);
var _loading = require('../loading');
var _loading2 = _interopRequireDefault(_loading);
var _cell = require('../cell');
var _cell2 = _interopRequireDefault(_cell);
var _cellGroup = require('../cell-group');
var _cellGroup2 = _interopRequireDefault(_cellGroup);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Create a component with common options
*/
; |
#!/bin/bash
set -e
[ -z "${OPENTRACING_VERSION}" ] && export OPENTRACING_VERSION="v1.6.0"
NAME_SUFFIX=${NAME_SUFFIX:-""}
NGINX_VERSIONS=(1.21.0 1.20.1 1.20.0 1.19.10 1.19.9 1.19.8 1.19.7 1.19.6 1.19.5 1.19.4 1.19.3 1.19.2 1.18.0 1.17.8 1.17.3 1.17.2 1.17.1 1.17.0 1.16.1 1.16.0 1.15.8 1.15.1 1.15.0 1.14.2 1.13.6)
# Compile for a portable cpu architecture
export CFLAGS="-march=x86-64 -fPIC"
export CXXFLAGS="-march=x86-64 -fPIC"
export LDFLAGS="-fPIC"
./ci/build_static_opentracing.sh
for NGINX_VERSION in ${NGINX_VERSIONS[*]}
do
./ci/build_module_binary.sh "${NGINX_VERSION}" "${NAME_SUFFIX}"
done
|
"""
Analyze the following code and suggest how to optimize it.
The given code could be optimized by using a regular expression to check for numbers and special characters instead of iterating over each character in the password. This would reduce the number of iterations and hence improve the performance of the code.
The code could also be optimized by checking for the password length and the requirement of special characters and numbers simultaneously instead of checking for them separately.
""" |
<gh_stars>0
import React from 'react';
import { Link } from 'react-router';
import { connect } from 'react-redux';
import { deleteMinionThunk } from '../store/minions';
const AllMinions = props => {
const minions = props.minions.map(minion => {
let name = minion.name.match(/.{1,11}/g).join('\n');
return (
<div className="minion-grid" key={minion.id}>
<Link to={`/minions/${minion.id}`} >
<img className="button minion-thumbnail" src="public/img/minion.svg" alt="minion" />
<p>{name}</p>
<p>ID #{minion.id}</p>
</Link>
<img onClick={() => props.deleteMinion(minion.id)} className="button x-button" src="public/img/x_button.svg" alt="" />
</div>
)
});
return (
<div>
<div id="minions-landing">
<div id="minions-title" className="label minions-label">MINIONS.exe</div>
<div id="all-minions">
{ minions }
<div id="add-minion-button-grid" className="minion-grid">
<Link to="/minions/new">
<img id="add-minion-button" className="button" src="public/img/add_button.svg" alt="" />
</Link>
</div>
</div>
</div>
<div className="button back-button">
<Link to="/">
<img className="button" src="public/img/arrow.svg" />
</Link>
</div>
</div>
)
}
const mapState = ({minions}) => ({
minions
});
const mapDispatch = dispatch => ({
deleteMinion: minionId => {
dispatch(deleteMinionThunk(minionId));
}
})
export default connect(mapState, mapDispatch)(AllMinions); |
function _do_docker_compose() {
local repo=${1?'repo arg required'}
shift 1
# shellcheck disable=SC2068
# shellcheck disable=SC2086
_do_repo_plugin_cmd_add "${repo}" 'docker-compose' ${_DO_DOCKER_COMPOSE_CMDS} $@
}
|
<gh_stars>0
import {
IBuildState,
IPlugin,
IStep
} from '@spikedpunch/forge'
import { Readable, Writable, Transform } from 'readable-stream'
import { JsonChunkReadStream, JsonObjectReadStream, JsonSaxReadStream } from './Readable'
import { JsonWritableStream } from './Writable'
import { JsonStreamOptions } from './JsonStreamOptions'
import { JsonTransformStream } from './Transform'
export enum StreamMode {
Object = 0,
Sax = 1,
Chunk = 2
}
/**
*
*/
export class JsonPlugin implements IPlugin {
readonly name: string = 'forge-plugin-json'
constructor() {
}
// async createEnvoy(state: IBuildState, info: StepInfo): Promise<IEnvoy> {
// let options = await JsonStreamOptions.fromStep(state, info)
// return new JsonEnvoy(options)
// }
async read(state: IBuildState, step: IStep): Promise<Readable> {
let options = await JsonStreamOptions.fromStep(state, step.info)
switch (options.mode) {
case StreamMode.Chunk: {
return new JsonChunkReadStream(options.files)
}
case StreamMode.Object: {
return new JsonObjectReadStream(options.files)
}
case StreamMode.Sax: {
return new JsonSaxReadStream(options.files)
}
default: {
throw new Error(`Unsupported 'mode' encoutnered when processing a JSON step.`)
}
}
}
async write(state: IBuildState, step: IStep): Promise<Writable> {
let options = await JsonStreamOptions.fromStep(state, step.info)
return new JsonWritableStream(options.outFile)
}
async transform(state: IBuildState, step: IStep): Promise<Transform> {
let options = await JsonStreamOptions.fromStep(state, step.info)
return new JsonTransformStream(options.outFile)
}
} |
func Factorial(num: Int) -> Int {
if (num == 1) {
return num
}
return num * Factorial(num - 1)
}
let result = Factorial(num: 7)
print(result) // Output: 5040 |
<reponame>scoslo5512/LIRI
require("dotenv").config();
const keys = require("./keys.js");
const Spotify = require('node-spotify-api');
const Twitter = require('twitter');
let request = require("request");
let fs = require("fs");
let spotify = new Spotify(keys.spotify);
let client = new Twitter(keys.twitter);
let action = process.argv[2];
console.log(keys.spotify);
console.log(action);
// functions depending on the input
// twitter
function twitter(){
client.get('statuses/user_timeline', {count: 10}, function(error, tweets, response) {
if (!error) {
tweets.forEach((results) => {
// need to console log text and the date time (figure out that element)
console.log(results.text)
console.log(results.created_at)
})
//console.log("these are my tweets", tweets);
}
})
}
// spotify function
function music(songName){
spotify.search({ type: 'track', query: songName }, function(err, data) {
if (err) {
return console.log('Error occurred: ' + err);
}
console.log(data.tracks.items);
})
};
// omdb function
function movie(movieName) {
request(`http://www.omdbapi.com/?t=${movieName}&apikey=trilogy`, function(error, response, body) {
// If the request is successful (i.e. if the response status code is 200)
if (!error && response.statusCode === 200) {
// Parse the body of the site and recover just the imdbRating
// (Note: The syntax below for parsing isn't obvious. Just spend a few moments dissecting it).
let movieData = {
"Title of the movie: ": JSON.parse(body).Title,
"Year the movie came out: ": JSON.parse(body).Year,
"IMDB rating of this movie: ": JSON.parse(body).imdbRating,
"Rotten tomatoes rating of thie movie :": JSON.parse(body).Ratings[2],
"Country where the movie was produced :": JSON.parse(body).Country,
"Language of the movie: ": JSON.parse(body).Language,
"Plot of the movie: ": JSON.parse(body).Plot,
"Actors in the movie: ": JSON.parse(body).Actors
}
console.log(movieData);
}
});
}
// bonus
// fs.appendFile will work to move data to the .txt file
// need to run different functions based off of argv
switch (action) {
case "my-tweets":
twitter();
break;
case "spotify-this-song":
music(process.argv[3]);
break;
case "movie-this":
movie(process.argv[3]);
break;
case "do-what-it-says":
says();
break;
} |
#
# SPDX-License-Identifier: Apache-2.0
#
#!/bin/bash
while [ 1 ];do
sleep 10
process_num=$(ps -elf | grep -v grep | grep main.js | wc -l)
if [ ${process_num} -eq 0 ];then
./start.sh
fi
done
|
package com.soothsayer.authn.oauth2;
import org.springframework.security.oauth2.common.*;
import org.springframework.security.oauth2.common.util.RandomValueStringGenerator;
import org.springframework.security.oauth2.provider.OAuth2Authentication;
import org.springframework.security.oauth2.provider.token.TokenEnhancer;
import java.util.Date;
public class OAuthAcessTokenEnhancer implements TokenEnhancer {
private RandomValueStringGenerator generator = new RandomValueStringGenerator(40);
@Override
public OAuth2AccessToken enhance(OAuth2AccessToken accessToken, OAuth2Authentication authentication) {
DefaultOAuth2AccessToken result = new DefaultOAuth2AccessToken(accessToken);
result.setValue(generator.generate());
OAuth2RefreshToken refreshToken = result.getRefreshToken();
if (refreshToken != null) {
DefaultOAuth2RefreshToken encodeRefreshToken = new DefaultOAuth2RefreshToken(generator.generate());
if (refreshToken instanceof ExpiringOAuth2RefreshToken) {
Date expiration = ((ExpiringOAuth2RefreshToken) refreshToken).getExpiration();
encodeRefreshToken = new DefaultExpiringOAuth2RefreshToken(encodeRefreshToken.getValue(), expiration);
}
result.setRefreshToken(encodeRefreshToken);
}
return result;
}
}
|
var assert = require('assert');
var DispatchManager = require('./../index');
var testToken;
describe('DispatchManager', () => {
describe('#constructor', () => {
it('should only return one and the same instance', () => {
var dm = require('./../index');
assert.deepStrictEqual(DispatchManager, dm);
})
});
describe('#register()', () => {
it('should return token', () => {
testToken = DispatchManager.register('test', () => { });
assert.notEqual(testToken, undefined);
assert.notEqual(testToken, null);
});
});
describe('#dispatch()', () => {
it('can find existing dispatcher', () => {
var dispatched = DispatchManager.dispatch('test');
assert.equal(dispatched, true);
});
it('returns false if dipatcher does not exist for action', () => {
var dispatched = DispatchManager.dispatch('test-not-registered');
assert.notEqual(dispatched, 1);
});
});
describe('#unregister', () => {
it('returns true if dispatcher found, and called unregister with token', () => {
var exists = DispatchManager.unregister('test', testToken);
assert.equal(exists, 1);
});
it('returns false if unregistering an event that doesn\'t exist', () => {
var doesntExistAnymore = DispatchManager.unregister('test-not-registered', testToken);
assert.equal(doesntExistAnymore, false);
});
});
}); |
export * from './extractPrInfoFromContext';
|
using System;
public class CustomErrorCodeEnumeration
{
public enum ErrorCode
{
[ResxKey("RET_GIT_COMMAND_FAILED")]
FailedToRunGitCommand = 2,
[ResxKey("RET_NUGET_COMMAND_FAILED")]
FailedToRunNugetCommand = 3,
[ResxKey("RET_BUILD_SCRIPT_FAILED")]
FailedToRunBuildScript // Unspecified value, will be assigned automatically
}
[AttributeUsage(AttributeTargets.Field, Inherited = false, AllowMultiple = false)]
public class ResxKeyAttribute : Attribute
{
public string Key { get; }
public ResxKeyAttribute(string key)
{
Key = key;
}
}
} |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.coach;
/*
* Initial date: 17 Jun 2020<br>
* @author aboeckle, <EMAIL>
*/
public interface RoleSecurityCallback {
/**
* Is this role allowed to reset passwords?
*
* @return boolean
*/
public boolean canResetPassword();
/**
* Is this role allowed to view efficiency statements?
*
* @return
*/
public boolean canViewEfficiencyStatements();
/**
* Is this role allowed to show the user's calendar?
*
* @return
*/
public boolean canViewCalendar();
/**
* Is this role allowed to receive certificates per mail?
*
* @return
*/
public boolean canReceiveCertificatesMail();
/**
* Is this role allowed to contact the user?
*
* @return
*/
public boolean canContact();
/**
* Is this role allowed to view the progress and status of a course?
*
* @return
*/
public boolean canViewCourseProgressAndStatus();
/**
* Is this role allowed to see a list of courses and curriculums?
*
* @return
*/
public boolean canViewCoursesAndCurriculum();
/**
* Is this role allowed to see a user's lectures and absences?
*
* @return
*/
public boolean canViewLecturesAndAbsences();
/**
* Is this role allowed to check the quality report?
*
* @return
*/
public boolean canViewQualityReport();
/**
* Is this role allowed to view resources and bookings?
*
* @return
*/
public boolean canViewResourcesAndBookings();
/**
* Is this role allowed to view and edit a profile?
*
* @return
*/
public boolean canViewAndEditProfile();
/**
* Is this role allowed to list a user's group memberships?
*
* @return
*/
public boolean canViewGroupMemberships();
/**
* Is this user allowed to see administrative properties?
*
* @return
*/
public boolean isAdministrativeUser();
/**
* Is this user allowed to upload external certificates in the efficiency statement?
*
* @return
*/
public boolean canUploadExternalCertificate();
}
|
//创建Java对象
var HashMap = Java.type("java.util.HashMap");
var mapDef = new HashMap();
var map100 = new HashMap(100);
print(Java.type("java.util.Map").Entry);
print('内部类:' + Java.type("java.util.Map$Entry")); //存取内部类
|
<gh_stars>0
package ytt
import (
. "code.cloudfoundry.org/yttk8smatchers/matchers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("External Prometheus scraping access", func() {
var ctx RenderingContext
var data map[string]interface{}
var templates []string
BeforeEach(func() {
templates = []string{
pathToFile("config/capi"),
pathToFile("config/metrics"),
pathToFile("config/uaa"),
pathToFile("config/namespaces.star"),
pathToFile("tests/ytt/capi/capi-values.yml"),
pathToFile("tests/ytt/metrics/metrics-values.yml"),
pathToFile("tests/ytt/uaa/uaa-values.yml"),
}
})
JustBeforeEach(func() {
ctx = NewRenderingContext(templates...).WithData(data)
})
Context("disabled", func() {
BeforeEach(func() {
data = map[string]interface{}{
"allow_prometheus_metrics_access": false,
}
})
It("should not have Istio proxy inbound exclusion rule", func() {
Expect(ctx).To(ProduceYAML(
And(
Not(WithDeployment("cf-api-server", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9102"`)),
Not(WithDeployment("metric-proxy", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9090"`)),
Not(WithDeployment("uaa", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9102"`)),
)))
})
})
Context("enabled", func() {
BeforeEach(func() {
data = map[string]interface{}{
"allow_prometheus_metrics_access": true,
}
})
It("should have Istio proxy inbound exclusion rule", func() {
Expect(ctx).To(ProduceYAML(
And(
WithDeployment("cf-api-server", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9102"`),
WithDeployment("metric-proxy", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9090"`),
WithDeployment("uaa", "cf-system").WithSpecYaml(`
template:
metadata:
annotations:
traffic.sidecar.istio.io/excludeInboundPorts: "9102"`),
)))
})
})
})
|
function solve() {
let outputElement = document.getElementsByTagName('textarea')[0];
let newTruckButton = document.getElementsByTagName('button')[0];
newTruckButton.addEventListener('click', addTruck);
let newTiresButton = document.getElementsByTagName('button')[1];
newTiresButton.addEventListener('click', addNewTires);
let workButton = document.getElementsByTagName('button')[2];
workButton.addEventListener('click', calculateWork);
let endOfTheShiftButton = document.getElementsByTagName('button')[3];
endOfTheShiftButton.addEventListener('click', showResult);
let trucks = {};
let trucksElement = document.querySelector('#exercise section:nth-child(2) > fieldset:nth-child(2)');
let spareTiresElement = document.querySelector('#exercise section:nth-child(2) > fieldset');
let spareTires = [];
function addTruck() {
let plateNumberInput = document.getElementById('newTruckPlateNumber').value;
let tiresConditionInput = document.getElementById('newTruckTiresCondition').value;
if (!trucks.hasOwnProperty(plateNumberInput)) {
trucks[plateNumberInput] = {tires: tiresConditionInput, distance: 0};
}
let divElement = document.createElement('div');
divElement.setAttribute('class', 'truck');
divElement.textContent = plateNumberInput;
trucksElement.appendChild(divElement);
}
function addNewTires() {
let newTiresInput = document.getElementById('newTiresCondition').value;
let divElement = document.createElement('div');
spareTires.push([newTiresInput]);
divElement.setAttribute('class', 'tireSet');
divElement.textContent = newTiresInput;
spareTiresElement.appendChild(divElement);
}
function calculateWork() {
let inputPlateNumber = document.getElementById('workPlateNumber').value;
let inputDistance = Number(document.getElementById('distance').value);
if (trucks.hasOwnProperty(inputPlateNumber)) {
let tiresCondition = trucks[inputPlateNumber].tires.split(' ');
let neededCondition = Math.ceil(inputDistance / 1000);
tiresCondition = tiresCondition
.map(x => x - neededCondition)
.filter(f => f >= 0);
if (tiresCondition.length === 8) {
trucks[inputPlateNumber].distance += inputDistance;
tiresCondition = tiresCondition.join(' ');
trucks[inputPlateNumber].tires = tiresCondition;
} else {
// if we have spare tires
if (spareTires.length > 0) {
let newTires = spareTires
.shift()[0];
let allTiresElement = Array.from(document.querySelectorAll('#exercise section:nth-child(2) > fieldset div'));
for (let currentTires of allTiresElement) {
if (currentTires.textContent === newTires) {
currentTires.parentNode.removeChild(currentTires);
}
}
trucks[inputPlateNumber].tires = newTires;
let tiresCondition = trucks[inputPlateNumber].tires.split(' ');
tiresCondition = tiresCondition
.map(x => x - neededCondition)
.filter(f => f >= 0);
if (tiresCondition.length === 8) {
trucks[inputPlateNumber].distance += inputDistance;
tiresCondition = tiresCondition.join(' ');
trucks[inputPlateNumber].tires = tiresCondition;
}
}
}
}
}
function showResult() {
for (let [plate, tiresAndDistance] of Object.entries(trucks)) {
let distance = tiresAndDistance.distance;
outputElement.value += `Truck ${plate} has traveled ${distance}.\n`;
}
outputElement.value += `You have ${spareTires.length} sets of tires left.\n`;
}
} |
import { api, authorize, entityProvider, HttpStatusError, route, val } from "plumier"
import { getRepository } from "typeorm"
import { Cart } from "../carts/carts-entity"
import { Product } from "../products/products-entity"
import { CartItem } from "./carts-items-entity"
@api.tag("Shopping Cart")
@route.root("carts/:pid/items")
@authorize.route("ResourceOwner")
export class CartItemController {
@route.post("")
@entityProvider(Cart, "pid")
async save(@val.required() pid: number, data: CartItem) {
const cartItemRepo = getRepository(CartItem)
const cartRepo = getRepository(Cart)
const itemRepo = getRepository(Product)
const cart = await cartRepo.findOne(pid)
if (!cart) throw new HttpStatusError(404, "Cart not found")
const item = await itemRepo.findOne(data.product.id, { relations: ["shop"] })
if (!item) throw new HttpStatusError(400, "Invalid item id provided")
const exists = await cartItemRepo.findOne({ where: { cart: cart.id, product: item.id } })
if (exists) {
exists.quantity += data.quantity
await cartItemRepo.save(exists)
return exists
}
else {
const inserted = await cartItemRepo.save({ ...data, cart: { id: cart.id }, shop: item.shop })
return { id: inserted.id }
}
}
}
|
#pragma once
#include <argparse/argparse.hpp>
namespace argparse {
using StringList = std::list<std::string>;
ArgumentParserResult parse(
const ArgumentParser &parserInstance,
const std::vector<std::string> &args, argparse::ArgumentParser::Arguments ®isteredArguments,
const std::string &shortOptionPrefix, const std::string &longOptionPrefix,
const std::string &terminator,
StringList &missingArguments, StringList &loseArguments, StringList &remainingArguments);
} // namespace argparse
|
mpirun -np 2 \
-H localhost:2 \
python ./t2t_bert/distributed_bin/hvd_train_eval_api.py \
--buckets "/data/xuht" \
--config_file "./data/textcnn/textcnn.json" \
--init_checkpoint "" \
--vocab_file "chinese_L-12_H-768_A-12/vocab.txt" \
--label_id "/data/xuht/data_security/model/textcnn/data/label_dict.json" \
--max_length 64 \
--train_file "data_security/model/textcnn/data/train_tfrecords" \
--dev_file "data_security/model/textcnn/data/dev_tfrecords" \
--model_output "data_security/model/textcnn/model/textcnn_20190722" \
--epoch 50 \
--num_classes 12 \
--train_size 6844 \
--eval_size 767 \
--batch_size 32 \
--model_type "textcnn" \
--if_shard 1 \
--is_debug 1 \
--run_type "sess" \
--opt_type "hvd" \
--num_gpus 2 \
--parse_type "parse_batch" \
--rule_model "normal" \
--profiler "no" \
--train_op "adam" \
--running_type "train" \
--cross_tower_ops_type "paisoar" \
--distribution_strategy "MirroredStrategy" \
--load_pretrained "no" \
--w2v_path "chinese_L-12_H-768_A-12/vocab_w2v.txt" \
--with_char "no_char" \
--input_target "a" \
--decay "no" \
--warmup "no" \
--distillation "normal" \
--temperature 2.0 \
--distillation_ratio 0.5 \
--task_type "single_sentence_classification" \
--classifier order_classifier \
--mode 'single_task'
|
<filename>src/MSFileMerger.h
/******************************************************************************
Copyright 2015 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
******************************************************************************/
#ifndef MARACLUSTER_MSFILEMERGER_H_
#define MARACLUSTER_MSFILEMERGER_H_
#include <iostream>
#include <string>
#include <vector>
#include <algorithm>
#include <map>
#include <cstdio>
#include "pwiz/data/msdata/MSDataFile.hpp"
#include "pwiz/data/msdata/MSDataMerger.hpp"
#include <boost/lexical_cast.hpp>
#include <boost/foreach.hpp>
#include "Globals.h"
#include "MSFileHandler.h"
#include "MSClusterMerge.h"
#include "ScanId.h"
namespace maracluster {
struct MergeScanIndex {
MergeScanIndex(unsigned int _spectrumIndex, unsigned int _posInCluster, unsigned int _mergeIdx) :
spectrumIndex(_spectrumIndex), posInCluster(_posInCluster), mergeIdx(_mergeIdx) {}
unsigned int spectrumIndex, posInCluster, mergeIdx;
};
class MSFileMerger : public MSFileHandler {
public:
static int mergeMethod_;
static bool normalize_;
static int maxMSFilePtrs_, maxSpectraPerFile_;
static unsigned int maxConsensusSpectraPerFile_;
MSFileMerger(std::string& spectrumOutFN) :
numClusterBins_(0), numBatches_(0),
numMSFilePtrsPerBatch_(0), MSFileHandler(spectrumOutFN) {}
void parseClusterFileForMerge(const std::string& clusterFile,
const size_t minClusterSize);
void mergeSpectra();
void mergeAllSpectra(const std::string& spectrumInFN);
void mergeSpectraSmall();
void mergeSpectraScalable();
inline size_t getClusterBin(const ScanId& si) {
return si.scannr % numClusterBins_;
}
inline static bool lessIndex(const MergeScanIndex& a,
const MergeScanIndex& b) { return (a.spectrumIndex < b.spectrumIndex); }
protected:
unsigned int numClusterBins_, numBatches_, numMSFilePtrsPerBatch_;
void mergeTwoSpectra(
std::vector<MZIntensityPair>& mziPairsIn,
std::vector<MZIntensityPair>& mziPairsFrom,
double weight);
void mergeSpectraSet(std::vector<pwiz::msdata::SpectrumPtr>& spectra,
pwiz::msdata::SpectrumPtr& consensusSpec);
void mergeSpectraSetMSCluster(
std::vector<pwiz::msdata::SpectrumPtr>& spectra,
ScanId scannr, pwiz::msdata::SpectrumListSimplePtr mergedSpectra);
void mergeSpectraBin(size_t clusterBin,
std::vector<pwiz::msdata::MSDataPtr>& msdVector,
pwiz::msdata::SpectrumListSimplePtr mergedSpectra);
void mergeSplitSpecFiles();
inline size_t calcNumBatches(size_t total, size_t batchSize) {
return (total - 1u) / batchSize + 1u;
}
virtual void mergeMccs(std::vector<MassChargeCandidate>& allMccs,
std::vector<MassChargeCandidate>& consensusMccs, int clusterIdx);
void splitSpecFilesByConsensusSpec(
std::map<ScanId, ScanId>& scannrToMergedScannr);
void createScannrToMergedScannrMap(
std::map<ScanId, ScanId>& scannrToMergedScannr);
void writeClusterBins(unsigned int batchIdx,
std::vector<pwiz::msdata::MSDataPtr>& msDataPtrs,
std::vector<pwiz::msdata::SpectrumListSimplePtr>& spectrumListPtrMap);
};
} /* namespace maracluster */
#endif /* MARACLUSTER_MSFILEMERGER_H_ */
|
#!/bin/bash
# build libamrfile.so, for use with python
# updated: 2020-05-17
#
# build on a CentOS6 host, for forward compatibility.
#
# bisicles build instructions:
#
# http://davis.lbl.gov/Manuals/BISICLES-DOCS/readme.html
#
# bisicles and chombo source can be checked out via:
#
# svn co https://anag-repo.lbl.gov/svn/BISICLES/public/trunk
# svn co https://anag-repo.lbl.gov/svn/Chombo/release/3.2.patch8
#
# this requires an account, which can be obtained here:
#
# https://anag-repo.lbl.gov/
# verion information:
#
# bisicles 20200504:
#
# > r3925 | dmartin | 2020-05-04 09:34:38 +0100 (Mon, 04 May 2020) | 3 lines
# >
# > first cut at all sectors done...
#
# chombo 3.2.patch8:
#
# > r23611 | dmartin | 2019-08-05 20:58:03 +0100 (Mon, 05 Aug 2019) | 3 lines
# >
# > added patch8 branch, which is copied from the 3.2.patch7 branch...
# source directory:
SRC_DIR=$(readlink -f $(pwd)/../src)
# build directory:
BUILD_DIR=$(pwd)
# where to output directory containing python files + libamrfile.so:
OUT_DIR=${BUILD_DIR}
# set up modules / environment:
module purge
module load licenses bit gnu/4.8.1 hdf5
# build variables:
CFLAGS='-O2 -fPIC'
CXXFLAGS='-O2 -fPIC'
CPPFLAGS='-O2 -fPIC'
FFLAGS='-O2 -fPIC'
FCFLAGS='-O2 -fPIC'
export CFLAGS CXXFLAGS CPPFLAGS FFLAGS FCFLAGS
# make build directory, and cd:
mkdir -p ${BUILD_DIR}
cd ${BUILD_DIR}
# extract bisicles and chombo:
export BISICLES_HOME=${BUILD_DIR}
tar xzf ${SRC_DIR}/bisicles-20200504.tar.gz
tar xzf ${SRC_DIR}/chombo-3.2.patch8.tar.gz
mv bisicles-20200504 BISICLES
mv chombo-3.2.patch8 Chombo
# make definitions:
\cp ${BISICLES_HOME}/BISICLES/docs/Make.defs.local \
${BISICLES_HOME}/Make.defs.local
# update configuration ... :
sed -i "s|^\(BISICLES_HOME\).*$|\1 = ${BISICLES_HOME}|g" \
${BISICLES_HOME}/Make.defs.local
sed -i "s|^\(MPICXX\).*$|\1 =|g" \
${BISICLES_HOME}/Make.defs.local
sed -i "s|^\(HDFINCFLAGS\).*$|\1 = -I${HDF5_HOME}/include|g" \
${BISICLES_HOME}/Make.defs.local
HDF5_LIBS="${HDF5_HOME}/lib/libhdf5hl_fortran.a ${HDF5_HOME}/lib/libhdf5_hl.a ${HDF5_HOME}/lib/libhdf5_fortran.a ${HDF5_HOME}/lib/libhdf5.a -lz -ldl"
sed -i "s|^\(HDFLIBFLAGS\).*$|\1 = -L${HDF5_HOME}/lib ${HDF5_LIBS}|g" \
${BISICLES_HOME}/Make.defs.local
sed -i "s|^\(HDFMPIINCFLAGS\).*$|\1 =|g" \
${BISICLES_HOME}/Make.defs.local
sed -i "s|^\(HDFMPILIBFLAGS\).*$|\1 =|g" \
${BISICLES_HOME}/Make.defs.local
ln -s ${BISICLES_HOME}/Make.defs.local \
${BISICLES_HOME}/Chombo/lib/mk/Make.defs.local
# '-march=native' seems to cause issues ... :
\cp ${BISICLES_HOME}/Chombo/lib/mk/compiler/Make.defs.GNU \
${BISICLES_HOME}/Chombo/lib/mk/compiler/Make.defs.GNU.original
sed -i 's|-march=native||g' \
${BISICLES_HOME}/Chombo/lib/mk/compiler/Make.defs.GNU
# build libamrfile:
cd ${BISICLES_HOME}/BISICLES/code/libamrfile
# static link stdc++ and fortran libraries:
\cp GNUmakefile GNUmakefile.original
sed -i "s|\(\$(HDFLIBFLAGS)\)|\1 ${GNU_HOME}/lib64/libstdc++.a ${GNU_HOME}/lib64/libgfortran.a|g" \
GNUmakefile
make libamrfile.so OPT=TRUE MPI=FALSE USE_PETSC=FALSE
# remove dynamic libstdc++ from requirements:
\cp libamrfile.so libamrfile.so.original
patchelf --remove-needed libstdc++.so.6 libamrfile.so
strip libamrfile.so
# python files:
cd python/AMRFile
/usr/bin/python setup.py build
cp -r build/lib/amrfile ${OUT_DIR}/
cd ../..
cp libamrfile.so ${OUT_DIR}/amrfile/
sed -i 's|^\(import numpy\)|import os\n\1|g' \
${OUT_DIR}/amrfile/io.py
sed -i \
's|^libamrfile = .*$|amr_dir = os.path.dirname(__file__)\namr_lib = "libamrfile.so"\nlibamrfile = CDLL(os.path.sep.join([amr_dir, amr_lib]))|g' \
${OUT_DIR}/amrfile/io.py
cd ${OUT_DIR}
chmod 644 amrfile/*
tar czf amrfile.tar.gz amrfile/
|
<reponame>tomasbasham/ember-cli-persistence<gh_stars>1-10
'use strict';
module.exports = {
description: 'Generates a persistence container adapter unit test.',
/*
* Define a series of custom
* template variables.
*
* @method locals
*
* @params {Object} options
* Object containing general and entity-specific options.
*/
locals: function(options) {
return {
friendlyTestDescription: 'Unit | Container | ' + options.entity.name
};
}
};
|
#!/usr/bin/env bash
# Copyright 2020 The Knative Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Documentation about this script and how to use it can be found
# at https://github.com/knative/test-infra/tree/master/ci
source $(dirname $0)/../vendor/knative.dev/hack/release.sh
# Yaml files to generate, and the source config dir for them.
declare -A COMPONENTS
COMPONENTS=(
["net-contour.yaml"]="config"
["contour.yaml"]="config/contour"
)
readonly COMPONENTS
declare -A RELEASES
RELEASES=(
["release.yaml"]="net-contour.yaml contour.yaml"
)
readonly RELEASES
function build_release() {
# Update release labels if this is a tagged release
if [[ -n "${TAG}" ]]; then
echo "Tagged release, updating release labels to serving.knative.dev/release: \"${TAG}\""
LABEL_YAML_CMD=(sed -e "s|serving.knative.dev/release: devel|serving.knative.dev/release: \"${TAG}\"|")
else
echo "Untagged release, will NOT update release labels"
LABEL_YAML_CMD=(cat)
fi
# Build the components
local all_yamls=()
for yaml in "${!COMPONENTS[@]}"; do
local config="${COMPONENTS[${yaml}]}"
echo "Building Knative net-contour - ${config}"
ko resolve --platform=all ${KO_FLAGS} -f ${config}/ | "${LABEL_YAML_CMD[@]}" | run_go_tool github.com/dprotaso/dekupe dekupe > ${yaml}
all_yamls+=(${yaml})
done
# Assemble the release
for yaml in "${!RELEASES[@]}"; do
echo "Assembling Knative net-contour - ${yaml}"
echo "" > ${yaml}
for component in ${RELEASES[${yaml}]}; do
echo "---" >> ${yaml}
echo "# ${component}" >> ${yaml}
cat ${component} >> ${yaml}
done
all_yamls+=(${yaml})
done
ARTIFACTS_TO_PUBLISH="${all_yamls[@]}"
}
main $@
|
<gh_stars>1-10
import networkx as nx
import pickle
import main_pipelines as mp
def main():
G = nx.Graph()
for i in range(1, 21):
G.add_edge(0, i)
mp.quick_display(G)
with open("output_files/star_pickle", 'wb') as pickle_file:
pickle.dump(G, pickle_file)
with open("output_files/star_edgelist.txt", 'w') as txt_file:
num_of_nodes = len(G.nodes)
directed = 0
txt_file.write("{}\t{}\n".format(num_of_nodes, directed))
for edge in G.edges:
txt_file.write("{}\t{}\n".format(edge[0], edge[1]))
return
if __name__ == '__main__':
main()
|
<reponame>hejack0207/ssh-web-console<filename>src/utils/config.go
package utils
import (
"gopkg.in/yaml.v2"
"io/ioutil"
"log"
"os"
)
const (
KEY_SSH_IO_MODE = "ssh_io_mode"
)
var Config struct {
Site struct {
AppName string `yaml:"app_name"`
RunMode string `yaml:"runmode"`
DeployHost string `yaml:"deploy_host"`
ListenAddr string `yaml:"listen_addr"`
StaticPrefix string `yaml:"static_prefix"` // http prefix
// hard static is that generate the files content into go code, and compile into go binary.
HardStaticDir string `yaml:"hard_static_dir"` // filesystem dir
// soft static is reading static files in this dir into memory.
SoftStaticDir string `yaml:"soft_static_dir"`
} `yaml:"site"`
VPN struct {
Enable bool `yaml:"enable"`
} `yaml:"vpn_juniper"`
SSH struct {
BufferCheckerCycleTime int `yaml:"buffer_checker_cycle_time"`
} `yaml:"ssh"`
Jwt struct {
Secret string `yaml:"jwt_secret"`
TokenLifetime int64 `yaml:"token_lifetime"`
Issuer string `yaml:"issuer"`
QueryTokenKey string `yaml:"query_token_key"`
} `yaml:"jwt"`
}
func init() {
f, err := os.Open("conf/config.yaml")
if err != nil {
log.Fatal(err)
}
defer f.Close()
content, err := ioutil.ReadAll(f)
if err != nil {
log.Fatal(err)
}
err = yaml.Unmarshal(content, &Config)
if err != nil {
log.Fatalf("error: %v", err)
}
}
|
import React from 'react';
import { ComponentMeta, Story } from '@storybook/react';
import { withThemeProvider } from 'stories/decorators';
import { Icon, IconName, IIconProps } from '.';
export default {
title: 'Components/Icon',
component: Icon,
decorators: [withThemeProvider],
} as ComponentMeta<typeof Icon>;
export const IconDefault = () => {
// Get all SVG file names
const svgs = require.context('./icons', true, /\.svg$/);
const svgFileNames = svgs
.keys()
.map(path => path.replace('./', '').replace('.svg', '')) as IconName[];
return (
<>
{svgFileNames.map(svgFileName => (
<Icon name={svgFileName} key={svgFileName} />
))}
</>
);
};
const IconWithCustomColorAndSizeTemplate: Story<IIconProps> = args => <Icon {...args} />;
export const IconWithCustomColorAndSize = IconWithCustomColorAndSizeTemplate.bind({});
IconWithCustomColorAndSize.args = {
name: 'mask',
size: '32px',
color: '#345345',
};
|
def insertionSort(arr):
for i in range(1,len(arr)):
key = arr[i]
j = i-1
while j >=0 and key < arr[j] :
arr[j+1] = arr[j]
j -= 1
arr[j+1] = key
# optimization
# check if element at index i
# is greater than its preceding element
if i > 0 and arr[i] > arr[i-1]:
continue |
import java.util.Scanner;
public class Fibonacci {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.print("Enter an integer limit: ");
int limit = scanner.nextInt();
int prevNum = 0;
int currNum = 1;
System.out.print(prevNum + " " + currNum + " ");
for (int i = 2; i < limit; i++) {
int nextNum = prevNum + currNum;
System.out.print(nextNum + " ");
prevNum = currNum;
currNum = nextNum;
}
System.out.println();
}
} |
<filename>MySNiPs/app/models/gene.rb
class Gene < ApplicationRecord
has_many :genotypes
validates :title, presence: true, uniqueness: true
def gmaf_text
if gmaf.nil?
""
else
", " + ((gmaf * 200).to_i).to_s + "% common"
end
end
end
|
import matplotlib.pyplot as plt
class Ecosystem:
def __init__(self):
self.rabbit_population = 0
self.fox_population = 0
self.years = 0
self.rabbit_population_log = []
self.fox_population_log = []
def initialize_population(self, rabbit_count, fox_count):
self.rabbit_population = rabbit_count
self.fox_population = fox_count
def simulate(self, years):
for _ in range(years):
self.rabbit_population = int(self.rabbit_population * 1.10)
self.fox_population = int(self.fox_population * 1.05)
rabbits_consumed = int(self.rabbit_population * 0.10)
self.rabbit_population -= rabbits_consumed
self.rabbit_population_log.append(self.rabbit_population)
self.fox_population_log.append(self.fox_population)
self.years += 1
def plot_population(self):
plt.plot(range(1, self.years + 1), self.rabbit_population_log, label='Rabbit Population')
plt.plot(range(1, self.years + 1), self.fox_population_log, label='Fox Population')
plt.xlabel('Years')
plt.ylabel('Population')
plt.title('Ecosystem Population Simulation')
plt.legend()
plt.show()
# Example usage
ecosystem = Ecosystem()
ecosystem.initialize_population(100, 20)
ecosystem.simulate(10)
ecosystem.plot_population() |
# Import the SDK and required libraries
import boto3
import json
import os
import logging
import sys
import socket
import requests
import ssl
from botocore.exceptions import ClientError
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# qradar configurations
# TODO pull tokens from secret store instead of config
console_main=os.environ['MAIN_SITE_ADDRESS']
token_main=os.environ['MAIN_SITE_TOKEN']
console_dest=os.environ['DEST_SITE_ADDRESS']
token_dest=os.environ['DEST_SITE_TOKEN']
# Configure the SNS topic which you want to use for sending notifications
namespace = os.environ['NAMESPACE']
sns_arn = os.environ['SNS_TOPIC']
def lambda_handler(event, context):
"""
Main Lambda handler
"""
message = event['Records'][0]['Sns']['Message']
global sns_client
#print("Event: " + json.dumps(event, indent=2))
print("Message: " + json.dumps(message, indent=2))
try:
sns_client = boto3.client('sns')
except ClientError as e:
logger.error(e.response['Error']['Message'])
if "AlarmName" in message:
json_message = json.loads(message)
accountid = str(json_message['AWSAccountId'])
alarm_name = str(json_message['AlarmName'])
alarm_trigger = str(json_message['NewStateValue'])
timestamp = str(json_message['StateChangeTime'])
elb_name = ""
az_name = ""
region = os.environ["AWS_REGION"]
if namespace == "AWS/ApplicationELB":
for entity in json_message['Trigger']['Dimensions']:
if entity['name'] == "LoadBalancer":
elb_name = str(entity['value'])
if entity['name'] == "AvailabilityZone":
az_name = str(entity['value'])
elif namespace == "AWS/NetworkELB":
for entity in json_message['Trigger']['Dimensions']:
if entity['name'] == "LoadBalancer":
elb_name = str(entity['value'])
if entity['name'] == "AvailabilityZone":
az_name = str(entity['value'])
logger.info("AccountID: {}".format(accountid))
logger.info("Region: {}".format(region))
logger.info("AvailabilityZone: {}".format(az_name))
logger.info("LoadBalancer: {}, {}".format(namespace,elb_name))
logger.info("Alarm Name: {}".format(alarm_name))
logger.info("Alarm State: {}".format(alarm_trigger))
sns_message = ""
api_in_error = False
# Take actions when an Alarm is triggered
if alarm_trigger == 'ALARM':
dest_console_url="https://{}/api/".format(console_dest)
main_console_url="https://{}/api/".format(console_main)
dr_config_dest=requests.get("{}{}".format(dest_console_url,"config/disaster_recovery/disaster_recovery_config"),verify=False,headers={"SEC":token_dest,"Allow-Hidden":"true"})
if dr_config_dest.status_code != 200:
print("failed dest config")
api_in_error = True
else:
dr_config_dest_json=dr_config_dest.json()
logger.info(json.dumps(dr_config_dest_json,indent=3,sort_keys=True))
# if the destination site is DR-enabled and in STANDBY, activate it
if dr_config_dest_json['is_dr'] == 'DR' and dr_config_dest_json['site_state'] == 'STANDBY':
dr_config_dest_json['site_state'] = 'ACTIVE'
dr_config_dest_json['is_dr'] = 'PRIMARY'
dr_config_dest_json['ariel_copy_enabled'] = True
logger.info(json.dumps(dr_config_dest_json,indent=3,sort_keys=True))
dest_result=requests.post("{}{}".format(dest_console_url,"staged_config/disaster_recovery/disaster_recovery_config"),verify=False,headers={"SEC":token_dest,"Allow-Hidden":"true"},data=json.dumps(dr_config_dest_json))
if dest_result.status_code == 200:
# deploy dest site asap, before attemtping to reach the main site
dest_result=requests.post("{}{}".format(dest_console_url,"config/deploy_action?type=INCREMENTAL"),verify=False,headers={"SEC":token_dest,"Allow-Hidden":"true"})
if dest_result.status_code != 200:
print("dest deploy failed")
api_in_error = True
else:
print("dest config post failed")
api_in_error = True
if api_in_error:
sns_message = sns_message + "Attempted failover to destination failed, API failure!!"
send_sns(accountid,region,az_name,elb_name,timestamp,alarm_name,sns_message)
return
# TODO optionally remove the main site hosts from the target group to avoid mixed recovery
# assuming we can still reach the main site console, deactivate it
dr_config_main=requests.get("{}{}".format(main_console_url,"config/disaster_recovery/disaster_recovery_config"),verify=False,headers={"SEC":token_main,"Allow-Hidden":"true"})
if dr_config_main.status_code != 200:
print("main config failed")
api_in_error = True
else:
dr_config_main_json = dr_config_main.json()
logger.info(json.dumps(dr_config_main_json,indent=3,sort_keys=True))
if dr_config_main_json['is_dr'] == 'PRIMARY' and dr_config_main_json['site_state'] == 'ACTIVE':
dr_config_main_json['site_state'] = 'STANDBY'
dr_config_main_json['ariel_copy_enabled'] = False
logger.info(json.dumps(dr_config_main_json,indent=3,sort_keys=True))
main_result = requests.post("{}{}".format(main_console_url,"staged_config/disaster_recovery/disaster_recovery_config"),verify=False,headers={"SEC":token_main,"Allow-Hidden":"true"},data=json.dumps(dr_config_main_json))
if main_result.status_code != 200:
print("main post config failed")
api_in_error = True
else:
ariel_copy_main=requests.get("{}{}".format(main_console_url,"disaster_recovery/ariel_copy_profiles"),verify=False,headers={"SEC":token_main,"Allow-Hidden":"true"})
if ariel_copy_main.status_code == 200:
ariel_copy_main_json = ariel_copy_main.json()
for profile in ariel_copy_main_json:
pid=profile['id']
new_profile={elem: profile[elem] for elem in ('bandwidth_limit','destination_host_ip','destination_port','enabled','end_date','exclude_event_retention_bucket_ids','exclude_flow_retention_bucket_ids','frequency','start_date')}
new_profile['enabled'] = False
logger.info(json.dumps(new_profile,indent=3,sort_keys=True))
copy_result=requests.post("{}{}/{}".format(main_console_url,"disaster_recovery/ariel_copy_profiles",pid),verify=False,headers={"SEC":token_main,"Allow-Hidden":"true"},data=json.dumps(new_profile))
logger.info("Profile {} status: {}".format(pid,copy_result.status_code))
if main_result.status_code == 200:
main_result=requests.post("{}{}".format(main_console_url,"config/deploy_action?type=INCREMENTAL"),verify=False,headers={"SEC":token_main,"Allow-Hidden":"true"})
if main_result.status_code != 200:
print("main deploy failed")
api_in_error = True
else:
api_in_error = True
if api_in_error:
sns_message = sns_message + "Attempted failover to destination failed, API failure!!"
send_sns(accountid,region,az_name,elb_name,timestamp,alarm_name,sns_message)
return
return
def send_sns(accountid,region,az_name,elb_name,timestamp,alarm_name,message):
logger.info("SNS message:")
notif="accountid:{}\n region:{}\n az_name:{}\n elb_name:{}\n timestamp:{}\n alarm_name:{}\n\n message:{}\n".format(accountid, region, az_name, elb_name, timestamp, alarm_name, message)
logger.info(notif)
try:
sns_client.publish(
TopicArn=sns_arn,
Message=notif,
Subject='DR Failover: attempted failover to Destination',
MessageStructure='string'
)
except ClientError as e:
logger.error(e.response['Error']['Message'])
|
<gh_stars>1-10
package bus_test
import (
"context"
"github.com/stretchr/testify/assert"
"testing"
"github.com/GabrielCarpr/cqrs/bus"
"github.com/GabrielCarpr/cqrs/bus/message"
"github.com/stretchr/testify/require"
)
type routingCmd struct {
bus.CommandType
}
func (routingCmd) Command() string {
return "routingCmd"
}
func (routingCmd) Valid() error {
return nil
}
type routingCmd2 struct {
bus.CommandType
}
func (routingCmd2) Command() string {
return "routingCmd2"
}
func (routingCmd2) Valid() error {
return nil
}
type routingCmdHandler struct {
}
func routingCmdMiddleware(next bus.CommandHandler) bus.CommandHandler {
return bus.CmdMiddlewareFunc(func(ctx context.Context, c bus.Command) (bus.CommandResponse, []message.Message) {
return next.Execute(ctx, c)
})
}
func (routingCmdHandler) Execute(ctx context.Context, c bus.Command) (res bus.CommandResponse, msgs []message.Message) {
return
}
func TestRouteBasicCommand(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.IsType(t, routingCmdHandler{}, c.Handler)
}
func TestRouteBasicCommandWithGlobalMiddleware(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Command(routingCmd{}).Handled(routingCmdHandler{})
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.Len(t, c.Middleware, 1)
assert.IsType(t, routingCmdHandler{}, c.Handler)
}
func TestRouteBasicCommandGlobalMiddlewareDeclarative(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Use(routingCmdMiddleware)
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.Len(t, c.Middleware, 1)
}
func TestRouteMultipleCmdMiddleware(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware, routingCmdMiddleware)
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Use(routingCmdMiddleware)
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.Len(t, c.Middleware, 3)
}
func TestRouteCmdInGroup(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd2{}).Handled(routingCmdHandler{})
b.Group(func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Use(routingCmdMiddleware)
})
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.IsType(t, routingCmdHandler{}, c.Handler)
assert.IsType(t, routingCmd{}, c.Command)
assert.Len(t, c.Middleware, 1)
c2, ok := r.Route(routingCmd2{})
require.True(t, ok)
assert.IsType(t, routingCmdHandler{}, c2.Handler)
assert.IsType(t, routingCmd2{}, c2.Command)
assert.Len(t, c2.Middleware, 0)
}
func TestRouteGroupNoCmd(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Group(func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
})
}(r)
_, ok := r.Route(routingCmd2{})
require.False(t, ok)
}
func TestRouteCmdWith(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.With(routingCmdMiddleware).Command(routingCmd2{}).Handled(routingCmdHandler{})
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
assert.Len(t, c.Middleware, 0)
c2, ok := r.Route(routingCmd2{})
require.True(t, ok)
assert.Len(t, c2.Middleware, 1)
assert.IsType(t, routingCmd2{}, c2.Command)
assert.IsType(t, routingCmdHandler{}, c2.Handler)
}
func TestPanicsDuplicateCommands(t *testing.T) {
r := bus.NewCommandContext()
panicked := false
defer func() {
if err := recover(); err != nil {
panicked = true
}
}()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Command(routingCmd{}).Handled(routingCmdHandler{})
}(r)
require.True(t, panicked, "Did not panic")
}
func TestEmptyCmdBuilder(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
}(r)
}
func TestAppliesEachContextCmdMiddleware(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Group(func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Group(func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Command(routingCmd{}).Handled(routingCmdHandler{})
})
})
}(r)
c, ok := r.Route(routingCmd{})
require.True(t, ok)
require.Len(t, c.Middleware, 3)
}
func TestCreatesCmdRoutingTable(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Group(func(b bus.CmdBuilder) {
b.Use(routingCmdMiddleware)
b.Command(routingCmd2{}).Handled(routingCmdHandler{})
})
}(r)
routes := r.Routes()
require.Len(t, routes, 2)
assert.Len(t, routes[routingCmd{}.Command()].Middleware, 1)
assert.Len(t, routes[routingCmd2{}.Command()].Middleware, 2)
}
func TestCannotTakeMultipleCommands(t *testing.T) {
r := bus.NewCommandContext()
panicked := false
defer func() {
if err := recover(); err != nil {
panicked = true
}
}()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.Command(routingCmd{}).Handled(routingCmdHandler{})
}(r)
require.True(t, panicked)
}
func TestSelfTestMultipleCommands(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
b.With().Command(routingCmd{}).Handled(routingCmdHandler{})
}(r)
err := r.SelfTest()
require.Error(t, err)
}
func TestSelfTestMultipleCommandsSiblings(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.With().Command(routingCmd{}).Handled(routingCmdHandler{})
b.Group(func(b bus.CmdBuilder) {
b.Command(routingCmd{}).Handled(routingCmdHandler{})
})
}(r)
err := r.SelfTest()
require.Error(t, err)
}
func TestSelfTestCommandNoHandler(t *testing.T) {
r := bus.NewCommandContext()
func(b bus.CmdBuilder) {
b.Command(routingCmd{})
}(r)
err := r.SelfTest()
require.Error(t, err)
}
|
#!/usr/bin/env bash
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# A shell script installing/building all needed dependencies to build Blender, for some Linux distributions.
##### Args and Help Handling #####
# Parse command line!
ARGS=$( \
getopt \
-o s:i:t:h \
--long source:,install:,tmp:,info:,threads:,help,show-deps,no-sudo,no-build,no-confirm,\
with-all,with-opencollada,with-jack,with-embree,with-oidn,\
ver-ocio:,ver-oiio:,ver-llvm:,ver-osl:,ver-osd:,ver-openvdb:,\
force-all,force-python,force-numpy,force-boost,\
force-ocio,force-openexr,force-oiio,force-llvm,force-osl,force-osd,force-openvdb,\
force-ffmpeg,force-opencollada,force-alembic,force-embree,force-oidn,\
build-all,build-python,build-numpy,build-boost,\
build-ocio,build-openexr,build-oiio,build-llvm,build-osl,build-osd,build-openvdb,\
build-ffmpeg,build-opencollada,build-alembic,build-embree,build-oidn,\
skip-python,skip-numpy,skip-boost,\
skip-ocio,skip-openexr,skip-oiio,skip-llvm,skip-osl,skip-osd,skip-openvdb,\
skip-ffmpeg,skip-opencollada,skip-alembic,skip-embree,skip-oidn \
-- "$@" \
)
COMMANDLINE=$@
DISTRO=""
RPM=""
SRC="$HOME/src/blender-deps"
INST="/opt/lib"
TMP="/tmp"
CWD=$PWD
INFO_PATH=$CWD
SCRIPT_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
# Do not install some optional, potentially conflicting libs by default...
WITH_ALL=false
# Do not yet enable opencollada or embree, use --with-opencollada/--with-embree (or --with-all) option to try it.
WITH_OPENCOLLADA=false
WITH_EMBREE=false
WITH_OIDN=false
THREADS=$(nproc)
COMMON_INFO="\"Source code of dependencies needed to be compiled will be downloaded and extracted into '\$SRC'.
Built libs of dependencies needed to be compiled will be installed into '\$INST'.
Please edit \\\$SRC and/or \\\$INST variables at the beginning of this script,
or use --source/--install options, if you want to use other paths!
Number of threads for building: \$THREADS (automatically detected, use --threads=<nbr> to override it).
Full install: \$WITH_ALL (use --with-all option to enable it).
Building OpenCOLLADA: \$WITH_OPENCOLLADA (use --with-opencollada option to enable it).
Building Embree: \$WITH_EMBREE (use --with-embree option to enable it).
Building OpenImageDenoise: \$WITH_OIDN (use --with-oidn option to enable it).
Example:
Full install without OpenCOLLADA: --with-all --skip-opencollada
Use --help to show all available options!\""
ARGUMENTS_INFO="\"COMMAND LINE ARGUMENTS:
-h, --help
Show this message and exit.
--show-deps
Show main dependencies of Blender (including officially supported versions) and exit.
-s <path>, --source=<path>
Use a specific path where to store downloaded libraries sources (defaults to '\$SRC').
-i <path>, --install=<path>
Use a specific path where to install built libraries (defaults to '\$INST').
--tmp=<path>
Use a specific temp path (defaults to '\$TMP').
--info=<path>
Use a specific info path (to store BUILD_NOTES.txt, defaults to '\$INFO_PATH').
-t n, --threads=n
Use a specific number of threads when building the libraries (auto-detected as '\$THREADS').
--no-sudo
Disable use of sudo (this script won't be able to do much though, will just print needed packages...).
--no-build
Do not build (compile) anything, dependencies not installable with the package manager will remain missing.
--no-confirm
Disable any interaction with user (suitable for automated run).
--with-all
By default, a number of optional and not-so-often needed libraries are not installed.
This option will try to install them, at the cost of potential conflicts (depending on
how your package system is set…).
Note this option also implies all other (more specific) --with-foo options below.
--with-opencollada
Build and install the OpenCOLLADA libraries.
--with-embree
Build and install the Embree libraries.
--with-oidn
Build and install the OpenImageDenoise libraries.
--with-jack
Install the jack libraries.
--ver-ocio=<ver>
Force version of OCIO library.
--ver-oiio=<ver>
Force version of OIIO library.
--ver-llvm=<ver>
Force version of LLVM library.
--ver-osl=<ver>
Force version of OSL library.
--ver-osd=<ver>
Force version of OSD library.
--ver-openvdb=<ver>
Force version of OpenVDB library.
Note about the --ver-foo options:
It may not always work as expected (some libs are actually checked out from a git rev...), yet it might help
to fix some build issues (like LLVM mismatch with the version used by your graphic system).
--build-all
Force the build of all possible libraries.
--build-python
Force the build of Python.
--build-numpy
Force the build of NumPy.
--build-boost
Force the build of Boost.
--build-ocio
Force the build of OpenColorIO.
--build-openexr
Force the build of OpenEXR.
--build-oiio
Force the build of OpenImageIO.
--build-llvm
Force the build of LLVM.
--build-osl
Force the build of OpenShadingLanguage.
--build-osd
Force the build of OpenSubdiv.
--build-openvdb
Force the build of OpenVDB.
--build-alembic
Force the build of Alembic.
--build-opencollada
Force the build of OpenCOLLADA.
--build-embree
Force the build of Embree.
--build-oidn
Force the build of OpenImageDenoise.
--build-ffmpeg
Force the build of FFMpeg.
Note about the --build-foo options:
* They force the script to prefer building dependencies rather than using available packages.
This may make things simpler and allow working around some distribution bugs, but on the other hand it will
use much more space on your hard drive.
* Please be careful with the Blender building options if you have both 'official' dev packages and
install_deps' built ones on your system, by default CMake will prefer official packages, which may lead to
linking issues. Please ensure your CMake configuration always uses all correct library paths.
* If the “force-built” library is a dependency of others, it will force the build
of those libraries as well (e.g. --build-boost also implies --build-oiio and --build-osl...).
--force-all
Force the rebuild of all built libraries.
--force-python
Force the rebuild of Python.
--force-numpy
Force the rebuild of NumPy.
--force-boost
Force the rebuild of Boost.
--force-ocio
Force the rebuild of OpenColorIO.
--force-openexr
Force the rebuild of OpenEXR.
--force-oiio
Force the rebuild of OpenImageIO.
--force-llvm
Force the rebuild of LLVM.
--force-osl
Force the rebuild of OpenShadingLanguage.
--force-osd
Force the rebuild of OpenSubdiv.
--force-openvdb
Force the rebuild of OpenVDB.
--force-alembic
Force the rebuild of Alembic.
--force-opencollada
Force the rebuild of OpenCOLLADA.
--force-embree
Force the rebuild of Embree.
--force-oidn
Force the rebuild of OpenImageDenoise.
--force-ffmpeg
Force the rebuild of FFMpeg.
Note about the --force-foo options:
* They obviously only have an effect if those libraries are built by this script
(i.e. if there is no available and satisfactory package)!
* If the “force-rebuilt” library is a dependency of others, it will force the rebuild
of those libraries too (e.g. --force-boost will also rebuild oiio and osl...).
--skip-python
Unconditionally skip Python installation/building.
--skip-numpy
Unconditionally skip NumPy installation/building.
--skip-boost
Unconditionally skip Boost installation/building.
--skip-ocio
Unconditionally skip OpenColorIO installation/building.
--skip-openexr
Unconditionally skip OpenEXR installation/building.
--skip-oiio
Unconditionally skip OpenImageIO installation/building.
--skip-llvm
Unconditionally skip LLVM installation/building.
--skip-osl
Unconditionally skip OpenShadingLanguage installation/building.
--skip-osd
Unconditionally skip OpenSubdiv installation/building.
--skip-openvdb
Unconditionally skip OpenVDB installation/building.
--skip-alembic
Unconditionally skip Alembic installation/building.
--skip-opencollada
Unconditionally skip OpenCOLLADA installation/building.
--skip-Embree
Unconditionally skip Embree installation/building.
--skip-oidn
Unconditionally skip OpenImageDenoise installation/building.
--skip-ffmpeg
Unconditionally skip FFMpeg installation/building.\""
##### Main Vars #####
DO_SHOW_DEPS=false
SUDO="sudo"
NO_BUILD=false
NO_CONFIRM=false
USE_CXX11=true
CLANG_FORMAT_VERSION_MIN="6.0"
PYTHON_VERSION="3.7.4"
PYTHON_VERSION_MIN="3.7"
PYTHON_FORCE_BUILD=false
PYTHON_FORCE_REBUILD=false
PYTHON_SKIP=false
NUMPY_VERSION="1.17.0"
NUMPY_VERSION_MIN="1.8"
NUMPY_FORCE_BUILD=false
NUMPY_FORCE_REBUILD=false
NUMPY_SKIP=false
BOOST_VERSION="1.68.0"
BOOST_VERSION_MIN="1.49"
BOOST_FORCE_BUILD=false
BOOST_FORCE_REBUILD=false
BOOST_SKIP=false
OCIO_VERSION="1.1.0"
OCIO_VERSION_MIN="1.0"
OCIO_FORCE_BUILD=false
OCIO_FORCE_REBUILD=false
OCIO_SKIP=false
OPENEXR_VERSION="2.3.0"
OPENEXR_VERSION_MIN="2.0.1"
ILMBASE_VERSION="2.3.0"
ILMBASE_VERSION_MIN="2.3"
OPENEXR_FORCE_BUILD=false
OPENEXR_FORCE_REBUILD=false
OPENEXR_SKIP=false
_with_built_openexr=false
OIIO_VERSION="1.8.13"
OIIO_VERSION_MIN="1.8.13"
OIIO_VERSION_MAX="99.99.0" # UNKNOWN currently # Not supported by current OSL...
OIIO_FORCE_BUILD=false
OIIO_FORCE_REBUILD=false
OIIO_SKIP=false
LLVM_VERSION="6.0.1"
LLVM_VERSION_MIN="6.0"
LLVM_VERSION_FOUND=""
LLVM_FORCE_BUILD=false
LLVM_FORCE_REBUILD=false
LLVM_SKIP=false
# OSL needs to be compiled for now!
OSL_VERSION="1.9.9"
OSL_VERSION_MIN=$OSL_VERSION
OSL_FORCE_BUILD=false
OSL_FORCE_REBUILD=false
OSL_SKIP=false
# OpenSubdiv needs to be compiled for now
OSD_VERSION="3.4.0_RC2"
OSD_VERSION_MIN=$OSD_VERSION
OSD_FORCE_BUILD=false
OSD_FORCE_REBUILD=false
OSD_SKIP=false
# OpenVDB needs to be compiled for now
OPENVDB_BLOSC_VERSION="1.14.4"
OPENVDB_VERSION="5.1.0"
OPENVDB_VERSION_MIN=$OPENVDB_VERSION
OPENVDB_FORCE_BUILD=false
OPENVDB_FORCE_REBUILD=false
OPENVDB_SKIP=false
# Alembic needs to be compiled for now
ALEMBIC_VERSION="1.7.8"
ALEMBIC_VERSION_MIN=$ALEMBIC_VERSION
ALEMBIC_FORCE_BUILD=false
ALEMBIC_FORCE_REBUILD=false
ALEMBIC_SKIP=false
OPENCOLLADA_VERSION="1.6.68"
OPENCOLLADA_FORCE_BUILD=false
OPENCOLLADA_FORCE_REBUILD=false
OPENCOLLADA_SKIP=false
EMBREE_VERSION="3.2.4"
EMBREE_FORCE_BUILD=false
EMBREE_FORCE_REBUILD=false
EMBREE_SKIP=false
OIDN_VERSION="1.0.0"
OIDN_FORCE_BUILD=false
OIDN_FORCE_REBUILD=false
OIDN_SKIP=false
FFMPEG_VERSION="4.0.2"
FFMPEG_VERSION_MIN="2.8.4"
FFMPEG_FORCE_BUILD=false
FFMPEG_FORCE_REBUILD=false
FFMPEG_SKIP=false
_ffmpeg_list_sep=";"
# FFMPEG optional libs.
VORBIS_USE=false
VORBIS_DEV=""
OGG_USE=false
OGG_DEV=""
THEORA_USE=false
THEORA_DEV=""
XVID_USE=false
XVID_DEV=""
X264_USE=false
X264_DEV=""
X264_VERSION_MIN=0.118
VPX_USE=false
VPX_VERSION_MIN=0.9.7
VPX_DEV=""
MP3LAME_USE=false
MP3LAME_DEV=""
OPENJPEG_USE=false
OPENJPEG_DEV=""
# Whether to use system GLEW or not (OpenSubDiv needs recent glew to work).
NO_SYSTEM_GLEW=false
# Switch to english language, else some things (like check_package_DEB()) won't work!
LANG_BACK=$LANG
LANG=""
export LANG
##### Generic Helpers #####
BLACK=$(tput setaf 0)
RED=$(tput setaf 1)
GREEN=$(tput setaf 2)
YELLOW=$(tput setaf 3)
LIME_YELLOW=$(tput setaf 190)
POWDER_BLUE=$(tput setaf 153)
BLUE=$(tput setaf 4)
MAGENTA=$(tput setaf 5)
CYAN=$(tput setaf 6)
WHITE=$(tput setaf 7)
BRIGHT=$(tput bold)
NORMAL=$(tput sgr0)
BLINK=$(tput blink)
REVERSE=$(tput smso)
UNDERLINE=$(tput smul)
_echo() {
if [ "X$1" = "X-n" ]; then
shift; printf "%s" "$@"
else
printf "%s\n" "$@"
fi
}
ERROR() {
_echo "${BRIGHT}${RED}ERROR! ${NORMAL}${RED}$@${NORMAL}"
}
WARNING() {
_echo "${BRIGHT}${YELLOW}WARNING! ${NORMAL}${YELLOW}$@${NORMAL}"
}
INFO() {
_echo "${GREEN}$@${NORMAL}"
}
PRINT() {
_echo "$@"
}
##### Args Handling #####
# Finish parsing the commandline args.
eval set -- "$ARGS"
while true; do
case $1 in
-s|--source)
SRC="$2"; shift; shift; continue
;;
-i|--install)
INST="$2"; shift; shift; continue
;;
--tmp)
TMP="$2"; shift; shift; continue
;;
--info)
INFO_PATH="$2"; shift; shift; continue
;;
-t|--threads)
THREADS="$2"; shift; shift; continue
;;
-h|--help)
PRINT ""
PRINT "USAGE:"
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
PRINT "`eval _echo "$ARGUMENTS_INFO"`"
PRINT ""
exit 0
;;
--show-deps)
# We have to defer...
DO_SHOW_DEPS=true; shift; continue
;;
--no-sudo)
PRINT ""
WARNING "--no-sudo enabled, this script might not be able to do much..."
PRINT ""
SUDO=""; shift; continue
;;
--no-build)
PRINT ""
WARNING "--no-build enabled, this script will not be able to install all dependencies..."
PRINT ""
NO_BUILD=true; shift; continue
;;
--no-confirm)
NO_CONFIRM=true; shift; continue
;;
--with-all)
WITH_ALL=true; shift; continue
;;
--with-opencollada)
WITH_OPENCOLLADA=true; shift; continue
;;
--with-embree)
WITH_EMBREE=true; shift; continue
;;
--with-oidn)
WITH_OIDN=true; shift; continue
;;
--with-jack)
WITH_JACK=true; shift; continue;
;;
--ver-ocio)
OCIO_VERSION="$2"
OCIO_VERSION_MIN=$OCIO_VERSION
shift; shift; continue
;;
--ver-oiio)
OIIO_VERSION="$2"
OIIO_VERSION_MIN=$OIIO_VERSION
shift; shift; continue
;;
--ver-llvm)
LLVM_VERSION="$2"
LLVM_VERSION_MIN=$LLVM_VERSION
shift; shift; continue
;;
--ver-osl)
OSL_VERSION="$2"
OSL_VERSION_MIN=$OSL_VERSION
shift; shift; continue
;;
--ver-osd)
OSD_VERSION="$2"
OSD_VERSION_MIN=$OSD_VERSION
shift; shift; continue
;;
--ver-openvdb)
OPENVDB_VERSION="$2"
OPENVDB_VERSION_MIN=$OPENVDB_VERSION
shift; shift; continue
;;
--build-all)
PYTHON_FORCE_BUILD=true
NUMPY_FORCE_BUILD=true
BOOST_FORCE_BUILD=true
OCIO_FORCE_BUILD=true
OPENEXR_FORCE_BUILD=true
OIIO_FORCE_BUILD=true
LLVM_FORCE_BUILD=true
OSL_FORCE_BUILD=true
OSD_FORCE_BUILD=true
OPENVDB_FORCE_BUILD=true
OPENCOLLADA_FORCE_BUILD=true
EMBREE_FORCE_BUILD=true
OIDN_FORCE_BUILD=true
FFMPEG_FORCE_BUILD=true
ALEMBIC_FORCE_BUILD=true
shift; continue
;;
--build-python)
PYTHON_FORCE_BUILD=true
NUMPY_FORCE_BUILD=true
shift; continue
;;
--build-numpy)
PYTHON_FORCE_BUILD=true
NUMPY_FORCE_BUILD=true
shift; continue
;;
--build-boost)
BOOST_FORCE_BUILD=true; shift; continue
;;
--build-ocio)
OCIO_FORCE_BUILD=true; shift; continue
;;
--build-openexr)
OPENEXR_FORCE_BUILD=true; shift; continue
;;
--build-oiio)
OIIO_FORCE_BUILD=true; shift; continue
;;
--build-llvm)
LLVM_FORCE_BUILD=true; shift; continue
;;
--build-osl)
OSL_FORCE_BUILD=true; shift; continue
;;
--build-osd)
OSD_FORCE_BUILD=true; shift; continue
;;
--build-openvdb)
OPENVDB_FORCE_BUILD=true; shift; continue
;;
--build-opencollada)
OPENCOLLADA_FORCE_BUILD=true; shift; continue
;;
--build-embree)
EMBREE_FORCE_BUILD=true; shift; continue
;;
--build-oidn)
OIDN_FORCE_BUILD=true; shift; continue
;;
--build-ffmpeg)
FFMPEG_FORCE_BUILD=true; shift; continue
;;
--build-alembic)
ALEMBIC_FORCE_BUILD=true; shift; continue
;;
--force-all)
PYTHON_FORCE_REBUILD=true
NUMPY_FORCE_REBUILD=true
BOOST_FORCE_REBUILD=true
OCIO_FORCE_REBUILD=true
OPENEXR_FORCE_REBUILD=true
OIIO_FORCE_REBUILD=true
LLVM_FORCE_REBUILD=true
OSL_FORCE_REBUILD=true
OSD_FORCE_REBUILD=true
OPENVDB_FORCE_REBUILD=true
OPENCOLLADA_FORCE_REBUILD=true
EMBREE_FORCE_REBUILD=true
OIDN_FORCE_REBUILD=true
FFMPEG_FORCE_REBUILD=true
ALEMBIC_FORCE_REBUILD=true
shift; continue
;;
--force-python)
PYTHON_FORCE_REBUILD=true
NUMPY_FORCE_REBUILD=true
shift; continue
;;
--force-numpy)
NUMPY_FORCE_REBUILD=true; shift; continue
;;
--force-boost)
BOOST_FORCE_REBUILD=true; shift; continue
;;
--force-ocio)
OCIO_FORCE_REBUILD=true; shift; continue
;;
--force-openexr)
OPENEXR_FORCE_REBUILD=true; shift; continue
;;
--force-oiio)
OIIO_FORCE_REBUILD=true; shift; continue
;;
--force-llvm)
LLVM_FORCE_REBUILD=true; shift; continue
;;
--force-osl)
OSL_FORCE_REBUILD=true; shift; continue
;;
--force-osd)
OSD_FORCE_REBUILD=true; shift; continue
;;
--force-openvdb)
OPENVDB_FORCE_REBUILD=true; shift; continue
;;
--force-opencollada)
OPENCOLLADA_FORCE_REBUILD=true; shift; continue
;;
--force-embree)
EMBREE_FORCE_REBUILD=true; shift; continue
;;
--force-oidn)
OIDN_FORCE_REBUILD=true; shift; continue
;;
--force-ffmpeg)
FFMPEG_FORCE_REBUILD=true; shift; continue
;;
--force-alembic)
ALEMBIC_FORCE_REBUILD=true; shift; continue
;;
--skip-python)
PYTHON_SKIP=true; shift; continue
;;
--skip-numpy)
NUMPY_SKIP=true; shift; continue
;;
--skip-boost)
BOOST_SKIP=true; shift; continue
;;
--skip-ocio)
OCIO_SKIP=true; shift; continue
;;
--skip-openexr)
OPENEXR_SKIP=true; shift; continue
;;
--skip-oiio)
OIIO_SKIP=true; shift; continue
;;
--skip-llvm)
LLVM_SKIP=true; shift; continue
;;
--skip-osl)
OSL_SKIP=true; shift; continue
;;
--skip-osd)
OSD_SKIP=true; shift; continue
;;
--skip-openvdb)
OPENVDB_SKIP=true; shift; continue
;;
--skip-opencollada)
OPENCOLLADA_SKIP=true; shift; continue
;;
--skip-embree)
EMBREE_SKIP=true; shift; continue
;;
--skip-oidn)
OIDN_SKIP=true; shift; continue
;;
--skip-ffmpeg)
FFMPEG_SKIP=true; shift; continue
;;
--skip-alembic)
ALEMBIC_SKIP=true; shift; continue
;;
--)
# no more arguments to parse
break
;;
*)
PRINT ""
ERROR "Wrong parameter! Usage:"
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
exit 1
;;
esac
done
if [ "$WITH_ALL" = true -a "$OPENCOLLADA_SKIP" = false ]; then
WITH_OPENCOLLADA=true
fi
if [ "$WITH_ALL" = true -a "$EMBREE_SKIP" = false ]; then
WITH_EMBREE=true
fi
if [ "$WITH_ALL" = true -a "$OIDN_SKIP" = false ]; then
WITH_OIDN=true
fi
if [ "$WITH_ALL" = true ]; then
WITH_JACK=true
fi
WARNING "****WARNING****"
PRINT "If you are experiencing issues building Blender, _*TRY A FRESH, CLEAN BUILD FIRST*_!"
PRINT "The same goes for install_deps itself, if you encounter issues, please first erase everything in $SRC and $INST"
PRINT "(provided obviously you did not add anything yourself in those dirs!), and run install_deps.sh again!"
PRINT "Often, changes in the libs built by this script, or in your distro package, cannot be handled simply, so..."
PRINT ""
PRINT "You may also try to use the '--build-foo' options to bypass your distribution's packages"
PRINT "for some troublesome/buggy libraries..."
PRINT ""
PRINT ""
PRINT "Ran with:"
PRINT " install_deps.sh $COMMANDLINE"
PRINT ""
PRINT ""
# This has to be done here, because user might force some versions...
PYTHON_SOURCE=( "https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tgz" )
NUMPY_SOURCE=( "https://github.com/numpy/numpy/releases/download/v$NUMPY_VERSION/numpy-$NUMPY_VERSION.tar.gz" )
_boost_version_nodots=`echo "$BOOST_VERSION" | sed -r 's/\./_/g'`
BOOST_SOURCE=( "http://sourceforge.net/projects/boost/files/boost/$BOOST_VERSION/boost_$_boost_version_nodots.tar.bz2/download" )
BOOST_BUILD_MODULES="--with-system --with-filesystem --with-thread --with-regex --with-locale --with-date_time --with-wave --with-iostreams --with-python --with-program_options"
OCIO_USE_REPO=false
OCIO_SOURCE=( "https://github.com/imageworks/OpenColorIO/archive/v$OCIO_VERSION.tar.gz")
#~ OCIO_SOURCE_REPO=( "https://github.com/imageworks/OpenColorIO.git" )
#~ OCIO_SOURCE_REPO_UID="6de971097c7f552300f669ed69ca0b6cf5a70843"
OPENEXR_USE_REPO=false
#~ OPENEXR_SOURCE=( "https://github.com/openexr/openexr/releases/download/v$OPENEXR_VERSION/openexr-$OPENEXR_VERSION.tar.gz" )
OPENEXR_SOURCE_REPO_UID="0ac2ea34c8f3134148a5df4052e40f155b76f6fb"
OPENEXR_SOURCE=( "https://github.com/openexr/openexr/archive/$OPENEXR_SOURCE_REPO_UID.tar.gz" )
#~ OPENEXR_SOURCE_REPO=( "https://github.com/mont29/openexr.git" )
ILMBASE_SOURCE=( "https://github.com/openexr/openexr/releases/download/v$ILMBASE_VERSION/ilmbase-$ILMBASE_VERSION.tar.gz" )
OIIO_USE_REPO=false
OIIO_SOURCE=( "https://github.com/OpenImageIO/oiio/archive/Release-$OIIO_VERSION.tar.gz" )
#~ OIIO_SOURCE_REPO=( "https://github.com/OpenImageIO/oiio.git" )
#~ OIIO_SOURCE_REPO_UID="c9e67275a0b248ead96152f6d2221cc0c0f278a4"
LLVM_SOURCE=( "http://releases.llvm.org/$LLVM_VERSION/llvm-$LLVM_VERSION.src.tar.xz" )
LLVM_CLANG_SOURCE=( "http://releases.llvm.org/$LLVM_VERSION/clang-$LLVM_VERSION.src.tar.xz" "http://llvm.org/releases/$LLVM_VERSION/cfe-$LLVM_VERSION.src.tar.xz" )
OSL_USE_REPO=false
OSL_SOURCE=( "https://github.com/imageworks/OpenShadingLanguage/archive/Release-$OSL_VERSION.tar.gz" )
#~ OSL_SOURCE_REPO=( "https://github.com/imageworks/OpenShadingLanguage.git" )
#~ OSL_SOURCE_REPO_BRANCH="master"
#~ OSL_SOURCE_REPO_UID="85179714e1bc69cd25ecb6bb711c1a156685d395"
#~ OSL_SOURCE=( "https://github.com/Nazg-Gul/OpenShadingLanguage/archive/Release-1.5.11.tar.gz" )
#~ OSL_SOURCE_REPO=( "https://github.com/mont29/OpenShadingLanguage.git" )
#~ OSL_SOURCE_REPO_UID="85179714e1bc69cd25ecb6bb711c1a156685d395"
#~ OSL_SOURCE_REPO=( "https://github.com/Nazg-Gul/OpenShadingLanguage.git" )
#~ OSL_SOURCE_REPO_UID="7d40ff5fe8e47b030042afb92d0e955f5aa96f48"
#~ OSL_SOURCE_REPO_BRANCH="blender-fixes"
OSD_USE_REPO=false
# Script foo to make the version string compliant with the archive name:
# ${Varname//SearchForThisChar/ReplaceWithThisChar}
OSD_SOURCE=( "https://github.com/PixarAnimationStudios/OpenSubdiv/archive/v${OSD_VERSION//./_}.tar.gz" )
#~ OSD_SOURCE_REPO=( "https://github.com/PixarAnimationStudios/OpenSubdiv.git" )
#~ OSD_SOURCE_REPO_UID="404659fffa659da075d1c9416e4fc939139a84ee"
#~ OSD_SOURCE_REPO_BRANCH="dev"
OPENVDB_USE_REPO=false
OPENVDB_BLOSC_SOURCE=( "https://github.com/Blosc/c-blosc/archive/v${OPENVDB_BLOSC_VERSION}.tar.gz" )
OPENVDB_SOURCE=( "https://github.com/dreamworksanimation/openvdb/archive/v${OPENVDB_VERSION}.tar.gz" )
#~ OPENVDB_SOURCE_REPO=( "https:///dreamworksanimation/openvdb.git" )
#~ OPENVDB_SOURCE_REPO_UID="404659fffa659da075d1c9416e4fc939139a84ee"
#~ OPENVDB_SOURCE_REPO_BRANCH="dev"
ALEMBIC_USE_REPO=false
ALEMBIC_SOURCE=( "https://github.com/alembic/alembic/archive/${ALEMBIC_VERSION}.tar.gz" )
# ALEMBIC_SOURCE_REPO=( "https://github.com/alembic/alembic.git" )
# ALEMBIC_SOURCE_REPO_UID="e6c90d4faa32c4550adeaaf3f556dad4b73a92bb"
# ALEMBIC_SOURCE_REPO_BRANCH="master"
OPENCOLLADA_USE_REPO=false
OPENCOLLADA_SOURCE=( "https://github.com/KhronosGroup/OpenCOLLADA/archive/v${OPENCOLLADA_VERSION}.tar.gz" )
#~ OPENCOLLADA_SOURCE_REPO=( "https://github.com/KhronosGroup/OpenCOLLADA.git" )
#~ OPENCOLLADA_REPO_UID="e937c3897b86fc0da53cde97257f5156"
#~ OPENCOLLADA_REPO_BRANCH="master"
EMBREE_USE_REPO=false
EMBREE_SOURCE=( "https://github.com/embree/embree/archive/v${EMBREE_VERSION}.tar.gz" )
#~ EMBREE_SOURCE_REPO=( "https://github.com/embree/embree.git" )
#~ EMBREE_REPO_UID="4a12bfed63c90e85b6eab98b8cdd8dd2a3ba5809"
#~ EMBREE_REPO_BRANCH="master"
OIDN_USE_REPO=false
OIDN_SOURCE=( "https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz" )
#~ OIDN_SOURCE_REPO=( "https://github.com/OpenImageDenoise/oidn.git" )
#~ OIDN_REPO_UID="dabfd9c80101edae9d25a710160d12d6d963c591"
#~ OIDN_REPO_BRANCH="master"
FFMPEG_SOURCE=( "http://ffmpeg.org/releases/ffmpeg-$FFMPEG_VERSION.tar.bz2" )
# C++11 is required now
CXXFLAGS_BACK=$CXXFLAGS
CXXFLAGS="$CXXFLAGS -std=c++11"
export CXXFLAGS
#### Show Dependencies ####
# Need those to be after we defined versions...
DEPS_COMMON_INFO="\"COMMON DEPENDENCIES:
Those libraries should be available as packages in all recent distributions (optional ones are [between brackets]):
* Basics of dev environment (cmake, gcc, svn , git, ...).
* libjpeg, libpng, libtiff, [openjpeg2], [libopenal].
* libx11, libxcursor, libxi, libxrandr, libxinerama (and other libx... as needed).
* libsqlite3, libbz2, libssl, libfftw3, libxml2, libtinyxml, yasm, libyaml-cpp.
* libsdl1.2, libglew, [libglewmx].\""
DEPS_SPECIFIC_INFO="\"BUILDABLE DEPENDENCIES:
The following libraries will probably not all be available as packages in your distribution
(install_deps will by default try to install packages, and fall back to building missing ones).
You can force install_deps to build those with '--build-all' or relevant 'build-foo' options, see '--help' message.
You may also want to build them yourself (optional ones are [between brackets]):
* Python $PYTHON_VERSION_MIN (from $PYTHON_SOURCE).
* [NumPy $NUMPY_VERSION_MIN] (from $NUMPY_SOURCE).
* Boost $BOOST_VERSION_MIN (from $BOOST_SOURCE, modules: $BOOST_BUILD_MODULES).
* [FFMpeg $FFMPEG_VERSION_MIN (needs libvorbis, libogg, libtheora, libx264, libmp3lame, libxvidcore, libvpx, ...)] (from $FFMPEG_SOURCE).
* [OpenColorIO $OCIO_VERSION_MIN] (from $OCIO_SOURCE).
* ILMBase $ILMBASE_VERSION_MIN (from $ILMBASE_SOURCE).
* OpenEXR $OPENEXR_VERSION_MIN (from $OPENEXR_SOURCE).
* OpenImageIO $OIIO_VERSION_MIN (from $OIIO_SOURCE).
* [LLVM $LLVM_VERSION_MIN (with clang)] (from $LLVM_SOURCE, and $LLVM_CLANG_SOURCE).
* [OpenShadingLanguage $OSL_VERSION_MIN] (from $OSL_SOURCE_REPO, branch $OSL_SOURCE_REPO_BRANCH, commit $OSL_SOURCE_REPO_UID).
* [OpenSubDiv $OSD_VERSION_MIN] (from $OSD_SOURCE_REPO, branch $OSD_SOURCE_REPO_BRANCH, commit $OSD_SOURCE_REPO_UID).
* [OpenVDB $OPENVDB_VERSION_MIN] (from $OPENVDB_SOURCE), [Blosc $OPENVDB_BLOSC_VERSION] (from $OPENVDB_BLOSC_SOURCE).
* [OpenCollada $OPENCOLLADA_VERSION] (from $OPENCOLLADA_SOURCE).
* [Embree $EMBREE_VERSION] (from $EMBREE_SOURCE).
* [OpenImageDenoise $OIDN_VERSION] (from $OIDN_SOURCE).
* [Alembic $ALEMBIC_VERSION] (from $ALEMBIC_SOURCE).\""
if [ "$DO_SHOW_DEPS" = true ]; then
PRINT ""
PRINT "Blender dependencies (libraries needed to build it):"
PRINT ""
PRINT "`eval _echo "$DEPS_COMMON_INFO"`"
PRINT ""
PRINT "`eval _echo "$DEPS_SPECIFIC_INFO"`"
PRINT ""
exit 0
fi
##### Generic Helpers #####
# Check return code of wget for success...
download() {
declare -a sources=("${!1}")
sources_count=${#sources[@]}
error=1
for (( i=0; $i < $sources_count; i++ ))
do
wget -c ${sources[$i]} -O $2
if [ $? -eq 0 ]; then
error=0
break
fi
done
if [ $error -eq 1 ]; then
ERROR "wget could not find ${sources[@]}, or could not write it to $2, exiting"
exit 1
fi
}
# Return 0 if $1 = $2 (i.e. 1.01.0 = 1.1, but 1.1.1 != 1.1), else 1.
# $1 and $2 should be version numbers made of numbers only.
version_eq() {
backIFS=$IFS
IFS='.'
# Split both version numbers into their numeric elements.
arr1=( $1 )
arr2=( $2 )
ret=1
count1=${#arr1[@]}
count2=${#arr2[@]}
if [ $count2 -ge $count1 ]; then
_t=$count1
count1=$count2
count2=$_t
arr1=( $2 )
arr2=( $1 )
fi
ret=0
for (( i=0; $i < $count2; i++ ))
do
if [ $(( 10#${arr1[$i]} )) -ne $(( 10#${arr2[$i]} )) ]; then
ret=1
break
fi
done
for (( i=$count2; $i < $count1; i++ ))
do
if [ $(( 10#${arr1[$i]} )) -ne 0 ]; then
ret=1
break
fi
done
IFS=$backIFS
return $ret
}
# Return 0 if $1 >= $2, else 1.
# $1 and $2 should be version numbers made of numbers only.
version_ge() {
version_eq $1 $2
if [ $? -eq 1 -a $(_echo "$1" "$2" | sort --version-sort | head --lines=1) = "$1" ]; then
return 1
else
return 0
fi
}
# Return 0 if $3 > $1 >= $2, else 1.
# $1 and $2 should be version numbers made of numbers only.
version_ge_lt() {
version_ge $1 $3
if [ $? -eq 0 ]; then
return 1
else
version_ge $1 $2
return $?
fi
}
# Return 0 if $1 is into $2 (e.g. 3.3.2 is into 3.3, but not 3.3.0 or 3.3.5), else 1.
# $1 and $2 should be version numbers made of numbers only.
# $1 should be at least as long as $2!
version_match() {
backIFS=$IFS
IFS='.'
# Split both version numbers into their numeric elements.
arr1=( $1 )
arr2=( $2 )
ret=1
count1=${#arr1[@]}
count2=${#arr2[@]}
if [ $count1 -ge $count2 ]; then
ret=0
for (( i=0; $i < $count2; i++ ))
do
if [ $(( 10#${arr1[$i]} )) -ne $(( 10#${arr2[$i]} )) ]; then
ret=1
break
fi
done
fi
IFS=$backIFS
return $ret
}
##### Generic compile helpers #####
prepare_opt() {
INFO "Ensuring $INST exists and is writable by us"
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, might be impossible to create install dir..."
fi
if [ ! -d $INST ]; then
$SUDO mkdir -p $INST
fi
if [ ! -w $INST ]; then
$SUDO chown $USER $INST
$SUDO chmod 775 $INST
fi
}
# Check whether the current package needs to be recompiled, based on a dummy file containing a magic number in its name...
magic_compile_check() {
if [ -f $INST/.$1-magiccheck-$2-$USE_CXX11 ]; then
return 0
else
return 1
fi
}
magic_compile_set() {
rm -f $INST/.$1-magiccheck-*
touch $INST/.$1-magiccheck-$2-$USE_CXX11
}
# Note: should clean nicely in $INST, but not in $SRC, when we switch to a new version of a lib...
_clean() {
rm -rf `readlink -f $_inst_shortcut`
# Only remove $_src dir when not using git repo (avoids to re-clone the whole repo every time!!!).
if [ $_git == false ]; then
rm -rf $_src
fi
rm -rf $_inst
rm -rf $_inst_shortcut
}
_create_inst_shortcut() {
rm -f $_inst_shortcut
ln -s $_inst $_inst_shortcut
}
# ldconfig
run_ldconfig() {
_lib_path="$INST/$1/lib"
_lib64_path="$INST/$1/lib64"
_ldconf_path="/etc/ld.so.conf.d/$1.conf"
PRINT ""
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to run ldconfig for $1, you'll have to do it yourself..."
else
INFO "Running ldconfig for $1..."
$SUDO sh -c "echo -e \"$_lib_path\n$_lib64_path\" > $_ldconf_path"
$SUDO /sbin/ldconfig # XXX OpenSuse does not include sbin in command path with sudo!!!
fi
PRINT ""
}
#### Build Python ####
_init_python() {
_src=$SRC/Python-$PYTHON_VERSION
_git=false
_inst=$INST/python-$PYTHON_VERSION
_inst_shortcut=$INST/python-$PYTHON_VERSION_MIN
}
clean_Python() {
clean_Numpy
_init_python
_clean
}
compile_Python() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Python will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
py_magic=1
_init_python
# Clean install if needed!
magic_compile_check python-$PYTHON_VERSION $py_magic
if [ $? -eq 1 -o "$PYTHON_FORCE_REBUILD" = true ]; then
clean_Python
fi
if [ ! -d $_inst ]; then
INFO "Building Python-$PYTHON_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
download PYTHON_SOURCE[@] $_src.tgz
INFO "Unpacking Python-$PYTHON_VERSION"
tar -C $SRC -xf $_src.tgz
fi
cd $_src
./configure --prefix=$_inst --libdir=$_inst/lib --enable-ipv6 \
--enable-loadable-sqlite-extensions --with-dbmliborder=bdb \
--with-computed-gotos --with-pymalloc
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Python--$PYTHON_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set python-$PYTHON_VERSION $py_magic
cd $CWD
INFO "Done compiling Python-$PYTHON_VERSION!"
else
INFO "Own Python-$PYTHON_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-python option."
fi
}
##### Build Numpy #####
_init_numpy() {
_src=$SRC/numpy-$NUMPY_VERSION
_git=false
_inst=$INST/numpy-$NUMPY_VERSION
_python=$INST/python-$PYTHON_VERSION
_site=lib/python$PYTHON_VERSION_MIN/site-packages
_inst_shortcut=$_python/$_site/numpy
}
clean_Numpy() {
_init_numpy
_clean
}
compile_Numpy() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Numpy will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
numpy_magic=0
_init_numpy
# Clean install if needed!
magic_compile_check numpy-$NUMPY_VERSION $numpy_magic
if [ $? -eq 1 -o "$NUMPY_FORCE_REBUILD" = true ]; then
clean_Numpy
fi
if [ ! -d $_inst ]; then
INFO "Building Numpy-$NUMPY_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
download NUMPY_SOURCE[@] $_src.tar.gz
INFO "Unpacking Numpy-$NUMPY_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
cd $_src
$_python/bin/python3 setup.py install --old-and-unmanageable --prefix=$_inst
if [ -d $_inst ]; then
# Can't use _create_inst_shortcut here...
rm -f $_inst_shortcut
ln -s $_inst/$_site/numpy $_inst_shortcut
else
ERROR "Numpy-$NUMPY_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set numpy-$NUMPY_VERSION $numpy_magic
cd $CWD
INFO "Done compiling Numpy-$NUMPY_VERSION!"
else
INFO "Own Numpy-$NUMPY_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-numpy option."
fi
}
#### Build Boost ####
_init_boost() {
_src=$SRC/boost-$BOOST_VERSION
_git=false
_inst=$INST/boost-$BOOST_VERSION
_inst_shortcut=$INST/boost
}
clean_Boost() {
_init_boost
_clean
}
compile_Boost() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Boost will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
boost_magic=11
_init_boost
# Clean install if needed!
magic_compile_check boost-$BOOST_VERSION $boost_magic
if [ $? -eq 1 -o "$BOOST_FORCE_REBUILD" = true ]; then
clean_Boost
fi
if [ ! -d $_inst ]; then
INFO "Building Boost-$BOOST_VERSION"
# Rebuild dependencies as well!
OIIO_FORCE_BUILD=true
OIIO_FORCE_REBUILD=true
OSL_FORCE_BUILD=true
OSL_FORCE_REBUILD=true
OPENVDB_FORCE_BUILD=true
OPENVDB_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading Boost-$BOOST_VERSION"
mkdir -p $SRC
download BOOST_SOURCE[@] $_src.tar.bz2
tar -C $SRC --transform "s,\w*,boost-$BOOST_VERSION,x" -xf $_src.tar.bz2
fi
cd $_src
if [ ! -f $_src/b2 ]; then
./bootstrap.sh
fi
./b2 -j$THREADS -a $BOOST_BUILD_MODULES \
--prefix=$_inst --disable-icu boost.locale.icu=off install
./b2 --clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Boost-$BOOST_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set boost-$BOOST_VERSION $boost_magic
cd $CWD
INFO "Done compiling Boost-$BOOST_VERSION!"
else
INFO "Own Boost-$BOOST_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-boost option."
fi
# Just always run it, much simpler this way!
run_ldconfig "boost"
}
#### Build OCIO ####
_init_ocio() {
_src=$SRC/OpenColorIO-$OCIO_VERSION
if [ "$OCIO_USE_REPO" = true ]; then
_git=true
else
_git=false
fi
_inst=$INST/ocio-$OCIO_VERSION
_inst_shortcut=$INST/ocio
}
clean_OCIO() {
_init_ocio
_clean
}
compile_OCIO() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenColorIO will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
ocio_magic=2
_init_ocio
# Clean install if needed!
magic_compile_check ocio-$OCIO_VERSION $ocio_magic
if [ $? -eq 1 -o "$OCIO_FORCE_REBUILD" = true ]; then
clean_OCIO
fi
if [ ! -d $_inst ]; then
INFO "Building OpenColorIO-$OCIO_VERSION"
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading OpenColorIO-$OCIO_VERSION"
mkdir -p $SRC
if [ "$OCIO_USE_REPO" = true ]; then
git clone ${OCIO_SOURCE_REPO[0]} $_src
else
download OCIO_SOURCE[@] $_src.tar.gz
INFO "Unpacking OpenColorIO-$OCIO_VERSION"
tar -C $SRC --transform "s,(.*/?)imageworks-OpenColorIO[^/]*(.*),\1OpenColorIO-$OCIO_VERSION\2,x" \
-xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OCIO_USE_REPO" = true ]; then
# XXX For now, always update from latest repo...
git pull origin master
git checkout $OCIO_SOURCE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_PREFIX_PATH=$_inst"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D OCIO_BUILD_APPS=OFF"
cmake_d="$cmake_d -D OCIO_BUILD_PYGLUE=OFF"
cmake_d="$cmake_d -D STOP_ON_WARNING=OFF"
if file /bin/cp | grep -q '32-bit'; then
cflags="-fPIC -m32 -march=i686"
else
cflags="-fPIC"
fi
cflags="$cflags -Wno-error=unused-function -Wno-error=deprecated-declarations"
cmake $cmake_d -D CMAKE_CXX_FLAGS="$cflags" -D CMAKE_EXE_LINKER_FLAGS="-lgcc_s -lgcc" ..
make -j$THREADS && make install
# Force linking against static libs
rm -f $_inst/lib/*.so*
# Additional depencencies
cp ext/dist/lib/libtinyxml.a $_inst/lib
cp ext/dist/lib/libyaml-cpp.a $_inst/lib
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenColorIO-$OCIO_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set ocio-$OCIO_VERSION $ocio_magic
cd $CWD
INFO "Done compiling OpenColorIO-$OCIO_VERSION!"
else
INFO "Own OpenColorIO-$OCIO_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-ocio option."
fi
run_ldconfig "ocio"
}
#### Build ILMBase ####
_init_ilmbase() {
_src=$SRC/ILMBase-$ILMBASE_VERSION
_git=false
_inst=$TMP/ilmbase-$ILMBASE_VERSION
_inst_shortcut=$TMP/ilmbase
}
clean_ILMBASE() {
_init_ilmbase
_clean
}
compile_ILMBASE() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, ILMBase will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
ilmbase_magic=10
_init_ilmbase
# Clean install if needed!
magic_compile_check ilmbase-$ILMBASE_VERSION $ilmbase_magic
if [ $? -eq 1 -o "$OPENEXR_FORCE_REBUILD" = true ]; then
clean_ILMBASE
rm -rf $_openexr_inst
fi
if [ ! -d $_openexr_inst ]; then
INFO "Building ILMBase-$ILMBASE_VERSION"
# Rebuild dependencies as well!
OPENEXR_FORCE_BUILD=true
OPENEXR_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading ILMBase-$ILMBASE_VERSION"
mkdir -p $SRC
download ILMBASE_SOURCE[@] $_src.tar.gz
INFO "Unpacking ILMBase-$ILMBASE_VERSION"
tar -C $SRC --transform "s,(.*/?)ilmbase-[^/]*(.*),\1ILMBase-$ILMBASE_VERSION\2,x" -xf $_src.tar.gz
fi
cd $_src
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_PREFIX_PATH=$_inst"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D BUILD_SHARED_LIBS=ON"
cmake_d="$cmake_d -D NAMESPACE_VERSIONING=OFF" # VERY IMPORTANT!!!
if file /bin/cp | grep -q '32-bit'; then
cflags="-fPIC -m32 -march=i686"
else
cflags="-fPIC"
fi
cmake $cmake_d -D CMAKE_CXX_FLAGS="$cflags" -D CMAKE_EXE_LINKER_FLAGS="-lgcc_s -lgcc" ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "ILMBase-$ILMBASE_VERSION failed to compile, exiting"
exit 1
fi
cd $CWD
INFO "Done compiling ILMBase-$ILMBASE_VERSION!"
else
INFO "Own ILMBase-$ILMBASE_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib (and openexr), use the --force-openexr option."
fi
magic_compile_set ilmbase-$ILMBASE_VERSION $ilmbase_magic
}
#### Build OpenEXR ####
_init_openexr() {
_src=$SRC/OpenEXR-$OPENEXR_VERSION
_git=true
_inst=$_openexr_inst
_inst_shortcut=$INST/openexr
}
clean_OPENEXR() {
clean_ILMBASE
_init_openexr
_clean
}
compile_OPENEXR() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenEXR will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
openexr_magic=14
# Clean install if needed!
magic_compile_check openexr-$OPENEXR_VERSION $openexr_magic
if [ $? -eq 1 -o "$OPENEXR_FORCE_REBUILD" = true ]; then
clean_OPENEXR
fi
_openexr_inst=$INST/openexr-$OPENEXR_VERSION
compile_ILMBASE
PRINT ""
_ilmbase_inst=$_inst_shortcut
_init_openexr
if [ ! -d $_inst ]; then
INFO "Building OpenEXR-$OPENEXR_VERSION"
# Rebuild dependencies as well!
OIIO_FORCE_BUILD=true
OIIO_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading OpenEXR-$OPENEXR_VERSION"
mkdir -p $SRC
if [ "$OPENEXR_USE_REPO" = true ]; then
git clone ${OPENEXR_SOURCE_REPO[0]} $_src
else
download OPENEXR_SOURCE[@] $_src.tar.gz
INFO "Unpacking OpenEXR-$OPENEXR_VERSION"
tar -C $SRC --transform "s,(.*/?)openexr[^/]*(.*),\1OpenEXR-$OPENEXR_VERSION\2,x" -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OPENEXR_USE_REPO" = true ]; then
# XXX For now, always update from latest repo...
git pull origin master
git checkout $OPENEXR_SOURCE_REPO_UID
git reset --hard
oiio_src_path="../OpenEXR"
else
oiio_src_path=".."
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_PREFIX_PATH=$_inst"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D ILMBASE_PACKAGE_PREFIX=$_ilmbase_inst"
cmake_d="$cmake_d -D BUILD_SHARED_LIBS=ON"
cmake_d="$cmake_d -D NAMESPACE_VERSIONING=OFF" # VERY IMPORTANT!!!
if file /bin/cp | grep -q '32-bit'; then
cflags="-fPIC -m32 -march=i686"
else
cflags="-fPIC"
fi
cmake $cmake_d -D CMAKE_CXX_FLAGS="$cflags" -D CMAKE_EXE_LINKER_FLAGS="-lgcc_s -lgcc" $oiio_src_path
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
# Copy ilmbase files here (blender expects same dir for ilmbase and openexr :/).
cp -an $_ilmbase_inst/* $_inst_shortcut
else
ERROR "OpenEXR-$OPENEXR_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set openexr-$OPENEXR_VERSION $openexr_magic
cd $CWD
INFO "Done compiling OpenEXR-$OPENEXR_VERSION!"
else
INFO "Own OpenEXR-$OPENEXR_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-openexr option."
fi
_with_built_openexr=true
# Just always run it, much simpler this way!
run_ldconfig "openexr"
}
#### Build OIIO ####
_init_oiio() {
_src=$SRC/OpenImageIO-$OIIO_VERSION
_git=true
_inst=$INST/oiio-$OIIO_VERSION
_inst_shortcut=$INST/oiio
}
clean_OIIO() {
_init_oiio
_clean
}
compile_OIIO() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenImageIO will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
oiio_magic=17
_init_oiio
# Clean install if needed!
magic_compile_check oiio-$OIIO_VERSION $oiio_magic
if [ $? -eq 1 -o "$OIIO_FORCE_REBUILD" = true ]; then
clean_OIIO
fi
if [ ! -d $_inst ]; then
INFO "Building OpenImageIO-$OIIO_VERSION"
# Rebuild dependencies as well!
OSL_FORCE_BUILD=true
OSL_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "$OIIO_USE_REPO" = true ]; then
git clone ${OIIO_SOURCE_REPO[0]} $_src
else
download OIIO_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenImageIO-$OIIO_VERSION"
tar -C $SRC --transform "s,(.*/?)oiio-Release-[^/]*(.*),\1OpenImageIO-$OIIO_VERSION\2,x" -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OIIO_USE_REPO" = true ]; then
# XXX For now, always update from latest repo...
git pull origin master
# Stick to same rev as windows' libs...
git checkout $OIIO_SOURCE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_PREFIX_PATH=$_inst"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D STOP_ON_WARNING=OFF"
cmake_d="$cmake_d -D BUILDSTATIC=OFF"
cmake_d="$cmake_d -D LINKSTATIC=OFF"
cmake_d="$cmake_d -D USE_SIMD=sse2"
cmake_d="$cmake_d -D OPENEXR_VERSION=$OPENEXR_VERSION"
if [ "$_with_built_openexr" = true ]; then
cmake_d="$cmake_d -D ILMBASE_HOME=$INST/openexr"
cmake_d="$cmake_d -D OPENEXR_HOME=$INST/openexr"
INFO "ILMBASE_HOME=$INST/openexr"
fi
# ptex is only needed when nicholas bishop is ready
cmake_d="$cmake_d -D USE_PTEX=OFF"
# Optional tests and cmd tools
cmake_d="$cmake_d -D USE_QT=OFF"
cmake_d="$cmake_d -D USE_PYTHON=OFF"
cmake_d="$cmake_d -D USE_FFMPEG=OFF"
cmake_d="$cmake_d -D USE_OPENCV=OFF"
cmake_d="$cmake_d -D BUILD_TESTING=OFF"
cmake_d="$cmake_d -D OIIO_BUILD_TESTS=OFF"
cmake_d="$cmake_d -D OIIO_BUILD_TOOLS=OFF"
cmake_d="$cmake_d -D TXT2MAN="
#cmake_d="$cmake_d -D CMAKE_EXPORT_COMPILE_COMMANDS=ON"
#cmake_d="$cmake_d -D CMAKE_VERBOSE_MAKEFILE=ON"
if [ -d $INST/boost ]; then
cmake_d="$cmake_d -D BOOST_ROOT=$INST/boost -D Boost_NO_SYSTEM_PATHS=ON"
fi
# Looks like we do not need ocio in oiio for now...
# if [ -d $INST/ocio ]; then
# cmake_d="$cmake_d -D OCIO_PATH=$INST/ocio"
# fi
cmake_d="$cmake_d -D USE_OCIO=OFF"
cmake_d="$cmake_d -D OIIO_BUILD_CPP11=ON"
if file /bin/cp | grep -q '32-bit'; then
cflags="-fPIC -m32 -march=i686"
else
cflags="-fPIC"
fi
cmake $cmake_d -D CMAKE_CXX_FLAGS="$cflags" -D CMAKE_EXE_LINKER_FLAGS="-lgcc_s -lgcc" ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenImageIO-$OIIO_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set oiio-$OIIO_VERSION $oiio_magic
cd $CWD
INFO "Done compiling OpenImageIO-$OIIO_VERSION!"
else
INFO "Own OpenImageIO-$OIIO_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-oiio option."
fi
# Just always run it, much simpler this way!
run_ldconfig "oiio"
}
#### Build LLVM ####
_init_llvm() {
_src=$SRC/LLVM-$LLVM_VERSION
_src_clang=$SRC/CLANG-$LLVM_VERSION
_git=false
_inst=$INST/llvm-$LLVM_VERSION
_inst_shortcut=$INST/llvm
}
clean_LLVM() {
_init_llvm
_clean
}
compile_LLVM() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, LLVM will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
llvm_magic=3
_init_llvm
# Clean install if needed!
magic_compile_check llvm-$LLVM_VERSION $llvm_magic
if [ $? -eq 1 -o "$LLVM_FORCE_REBUILD" = true ]; then
clean_LLVM
fi
if [ ! -d $_inst ]; then
INFO "Building LLVM-$LLVM_VERSION (CLANG included!)"
# Rebuild dependencies as well!
OSL_FORCE_BUILD=true
OSL_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src -o true ]; then
mkdir -p $SRC
download LLVM_SOURCE[@] "$_src.tar.xz"
download LLVM_CLANG_SOURCE[@] "$_src_clang.tar.xz"
INFO "Unpacking LLVM-$LLVM_VERSION"
tar -C $SRC --transform "s,([^/]*/?)llvm-[^/]*(.*),\1LLVM-$LLVM_VERSION\2,x" \
-xf $_src.tar.xz
INFO "Unpacking CLANG-$LLVM_VERSION to $_src/tools/clang"
# Stupid clang guys renamed 'clang' to 'cfe' for now handle both cases... :(
tar -C $_src/tools \
--transform "s,([^/]*/?)(clang|cfe)-[^/]*(.*),\1clang\3,x" \
-xf $_src_clang.tar.xz
cd $_src
cd $CWD
fi
cd $_src
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D LLVM_ENABLE_FFI=ON"
cmake_d="$cmake_d -D LLVM_TARGETS_TO_BUILD=X86"
cmake_d="$cmake_d -D LLVM_ENABLE_TERMINFO=OFF"
if [ -d $_FFI_INCLUDE_DIR ]; then
cmake_d="$cmake_d -D FFI_INCLUDE_DIR=$_FFI_INCLUDE_DIR"
fi
cmake $cmake_d ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "LLVM-$LLVM_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set llvm-$LLVM_VERSION $llvm_magic
cd $CWD
INFO "Done compiling LLVM-$LLVM_VERSION (CLANG included)!"
else
INFO "Own LLVM-$LLVM_VERSION (CLANG included) is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-llvm option."
fi
}
#### Build OSL ####
_init_osl() {
_src=$SRC/OpenShadingLanguage-$OSL_VERSION
_git=true
_inst=$INST/osl-$OSL_VERSION
_inst_shortcut=$INST/osl
}
clean_OSL() {
_init_osl
_clean
}
compile_OSL() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenShadingLanguage will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
osl_magic=21
_init_osl
# Clean install if needed!
magic_compile_check osl-$OSL_VERSION $osl_magic
if [ $? -eq 1 -o "$OSL_FORCE_REBUILD" = true ]; then
#~ rm -Rf $_src # XXX Radical, but not easy to change remote repo fully automatically
clean_OSL
fi
if [ ! -d $_inst ]; then
INFO "Building OpenShadingLanguage-$OSL_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "$OSL_USE_REPO" = true ]; then
git clone ${OSL_SOURCE_REPO[0]} $_src
else
download OSL_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenShadingLanguage-$OSL_VERSION"
tar -C $SRC --transform "s,(.*/?)OpenShadingLanguage-[^/]*(.*),\1OpenShadingLanguage-$OSL_VERSION\2,x" \
-xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OSL_USE_REPO" = true ]; then
git remote set-url origin ${OSL_SOURCE_REPO[0]}
# XXX For now, always update from latest repo...
git pull --no-edit -X theirs origin $OSL_SOURCE_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $OSL_SOURCE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D BUILD_TESTING=OFF"
cmake_d="$cmake_d -D STOP_ON_WARNING=OFF"
cmake_d="$cmake_d -D BUILDSTATIC=OFF"
cmake_d="$cmake_d -D OSL_BUILD_PLUGINS=OFF"
cmake_d="$cmake_d -D OSL_BUILD_TESTS=OFF"
cmake_d="$cmake_d -D USE_SIMD=sse2"
cmake_d="$cmake_d -D USE_LLVM_BITCODE=OFF"
cmake_d="$cmake_d -D USE_PARTIO=OFF"
#~ cmake_d="$cmake_d -D ILMBASE_VERSION=$ILMBASE_VERSION"
if [ "$_with_built_openexr" = true ]; then
INFO "ILMBASE_HOME=$INST/openexr"
cmake_d="$cmake_d -D ILMBASE_HOME=$INST/openexr"
# XXX Temp workaround... sigh, ILMBase really messed the things up by defining their custom names ON by default :(
cmake_d="$cmake_d -D ILMBASE_CUSTOM=ON"
cmake_d="$cmake_d -D ILMBASE_CUSTOM_LIBRARIES='Half;Iex;Imath;IlmThread'"
fi
if [ -d $INST/boost ]; then
cmake_d="$cmake_d -D BOOST_ROOT=$INST/boost -D Boost_NO_SYSTEM_PATHS=ON"
fi
if [ -d $INST/oiio ]; then
cmake_d="$cmake_d -D OPENIMAGEIOHOME=$INST/oiio"
fi
if [ ! -z $LLVM_VERSION_FOUND ]; then
cmake_d="$cmake_d -D LLVM_VERSION=$LLVM_VERSION_FOUND"
if [ -d $INST/llvm ]; then
cmake_d="$cmake_d -D LLVM_DIRECTORY=$INST/llvm"
cmake_d="$cmake_d -D LLVM_STATIC=ON"
fi
fi
#~ cmake_d="$cmake_d -D CMAKE_EXPORT_COMPILE_COMMANDS=ON"
#~ cmake_d="$cmake_d -D CMAKE_VERBOSE_MAKEFILE=ON"
cmake $cmake_d ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenShadingLanguage-$OSL_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set osl-$OSL_VERSION $osl_magic
cd $CWD
INFO "Done compiling OpenShadingLanguage-$OSL_VERSION!"
else
INFO "Own OpenShadingLanguage-$OSL_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-osl option."
fi
run_ldconfig "osl"
}
#### Build OSD ####
_init_osd() {
_src=$SRC/OpenSubdiv-$OSD_VERSION
_git=true
_inst=$INST/osd-$OSD_VERSION
_inst_shortcut=$INST/osd
}
clean_OSD() {
_init_osd
_clean
}
compile_OSD() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenSubdiv will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
osd_magic=2
_init_osd
# Clean install if needed!
magic_compile_check osd-$OSD_VERSION $osd_magic
if [ $? -eq 1 -o "$OSD_FORCE_REBUILD" = true ]; then
clean_OSD
fi
if [ ! -d $_inst ]; then
INFO "Building OpenSubdiv-$OSD_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "$OSD_USE_REPO" = true ]; then
git clone ${OSD_SOURCE_REPO[0]} $_src
else
download OSD_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenSubdiv-$OSD_VERSION"
tar -C $SRC --transform "s,(.*/?)OpenSubdiv-[^/]*(.*),\1OpenSubdiv-$OSD_VERSION\2,x" \
-xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OSD_USE_REPO" = true ]; then
git remote set-url origin ${OSD_SOURCE_REPO[0]}
# XXX For now, always update from latest repo...
git pull --no-edit -X theirs origin $OSD_SOURCE_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $OSD_SOURCE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
# ptex is only needed when nicholas bishop is ready
cmake_d="$cmake_d -D NO_PTEX=1"
cmake_d="$cmake_d -D NO_CLEW=1 -D NO_CUDA=1 -D NO_OPENCL=1"
# maya plugin, docs, tutorials, regression tests and examples are not needed
cmake_d="$cmake_d -D NO_MAYA=1 -D NO_DOC=1 -D NO_TUTORIALS=1 -D NO_REGRESSION=1 -DNO_EXAMPLES=1"
cmake $cmake_d ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenSubdiv-$OSD_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set osd-$OSD_VERSION $osd_magic
cd $CWD
INFO "Done compiling OpenSubdiv-$OSD_VERSION!"
else
INFO "Own OpenSubdiv-$OSD_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-osd option."
fi
run_ldconfig "osd"
}
#### Build Blosc ####
_init_blosc() {
_src=$SRC/c-blosc-$OPENVDB_BLOSC_VERSION
_git=false
_inst=$INST/blosc-$OPENVDB_BLOSC_VERSION
_inst_shortcut=$INST/blosc
}
clean_BLOSC() {
_init_blosc
_clean
}
compile_BLOSC() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Blosc will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
blosc_magic=0
_init_blosc
# Clean install if needed!
magic_compile_check blosc-$OPENVDB_BLOSC_VERSION $blosc_magic
if [ $? -eq 1 -o "$OPENVDB_FORCE_REBUILD" = true ]; then
clean_BLOSC
rm -rf $_inst
fi
if [ ! -d $_inst ]; then
INFO "Building Blosc-$OPENVDB_BLOSC_VERSION"
# Rebuild dependencies as well!
OPENVDB_FORCE_BUILD=true
OPENVDB_FORCE_REBUILD=true
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading Blosc-$OPENVDB_BLOSC_VERSION"
mkdir -p $SRC
download OPENVDB_BLOSC_SOURCE[@] $_src.tar.gz
INFO "Unpacking Blosc-$OPENVDB_BLOSC_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
cd $_src
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D BUILD_STATIC=OFF"
cmake_d="$cmake_d -D BUILD_TESTS=OFF"
cmake_d="$cmake_d -D BUILD_BENCHMARKS=OFF"
INFO "$cmake_d"
cmake $cmake_d ..
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Blosc-$OPENVDB_BLOSC_VERSION failed to compile, exiting"
exit 1
fi
cd $CWD
INFO "Done compiling Blosc-$OPENVDB_BLOSC_VERSION!"
else
INFO "Own Blosc-$OPENVDB_BLOSC_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib (and openvdb), use the --force-openvdb option."
fi
magic_compile_set blosc-$OPENVDB_BLOSC_VERSION $blosc_magic
run_ldconfig "blosc"
}
#### Build OpenVDB ####
_init_openvdb() {
_src=$SRC/openvdb-$OPENVDB_VERSION
_git=false
_inst=$INST/openvdb-$OPENVDB_VERSION
_inst_shortcut=$INST/openvdb
}
clean_OPENVDB() {
_init_openvdb
_clean
}
compile_OPENVDB() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenVDB will not be compiled!"
return
fi
compile_BLOSC
PRINT ""
# To be changed each time we make edits that would modify the compiled result!
openvdb_magic=1
_init_openvdb
# Clean install if needed!
magic_compile_check openvdb-$OPENVDB_VERSION $openvdb_magic
if [ $? -eq 1 -o "$OPENVDB_FORCE_REBUILD" = true ]; then
clean_OPENVDB
fi
if [ ! -d $_inst ]; then
INFO "Building OpenVDB-$OPENVDB_VERSION"
prepare_opt
if [ ! -d $_src -o true ]; then
mkdir -p $SRC
download OPENVDB_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenVDB-$OPENVDB_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
cd $_src
#~ if [ "$OPENVDB_USE_REPO" = true ]; then
#~ git remote set-url origin ${OPENVDB_SOURCE_REPO[0]}
#~ # XXX For now, always update from latest repo...
#~ git pull --no-edit -X theirs origin $OPENVDB_SOURCE_REPO_BRANCH
#~ # Stick to same rev as windows' libs...
#~ git checkout $OPENVDB_SOURCE_REPO_UID
#~ git reset --hard
#~ fi
# Source builds here
cd openvdb
make_d="DESTDIR=$_inst"
make_d="$make_d HDSO=/usr"
if [ -d $INST/boost ]; then
make_d="$make_d BOOST_INCL_DIR=$INST/boost/include BOOST_LIB_DIR=$INST/boost/lib"
fi
if [ "$_with_built_openexr" = true ]; then
make_d="$make_d ILMBASE_INCL_DIR=$INST/openexr/include ILMBASE_LIB_DIR=$INST/openexr/lib"
make_d="$make_d EXR_INCL_DIR=$INST/openexr/include EXR_LIB_DIR=$INST/openexr/lib"
INFO "ILMBASE_HOME=$INST/openexr"
fi
if [ -d $INST/blosc ]; then
make_d="$make_d BLOSC_INCL_DIR=$INST/blosc/include BLOSC_LIB_DIR=$INST/blosc/lib"
fi
# Build without log4cplus, glfw, python module & docs
make_d="$make_d LOG4CPLUS_INCL_DIR= GLFW_INCL_DIR= PYTHON_VERSION= DOXYGEN="
make -j$THREADS lib $make_d install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenVDB-$OPENVDB_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set openvdb-$OPENVDB_VERSION $openvdb_magic
cd $CWD
INFO "Done compiling OpenVDB-$OPENVDB_VERSION!"
else
INFO "Own OpenVDB-$OPENVDB_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-openvdb option."
fi
run_ldconfig "openvdb"
}
#### Build Alembic ####
_init_alembic() {
_src=$SRC/alembic-$ALEMBIC_VERSION
_git=false
_inst=$INST/alembic-$ALEMBIC_VERSION
_inst_shortcut=$INST/alembic
}
clean_ALEMBIC() {
_init_alembic
_clean
}
compile_ALEMBIC() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Alembic will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
alembic_magic=2
_init_alembic
# Clean install if needed!
magic_compile_check alembic-$ALEMBIC_VERSION $alembic_magic
if [ $? -eq 1 -o "$ALEMBIC_FORCE_REBUILD" = true ]; then
clean_ALEMBIC
fi
if [ ! -d $_inst ]; then
INFO "Building Alembic-$ALEMBIC_VERSION"
prepare_opt
if [ ! -d $_src -o true ]; then
mkdir -p $SRC
download ALEMBIC_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking Alembic-$ALEMBIC_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
cd $_src
cmake_d="-D CMAKE_INSTALL_PREFIX=$_inst"
if [ -d $INST/boost ]; then
if [ -d $INST/boost ]; then
cmake_d="$cmake_d -D BOOST_ROOT=$INST/boost"
fi
cmake_d="$cmake_d -D USE_STATIC_BOOST=ON"
else
cmake_d="$cmake_d -D USE_STATIC_BOOST=OFF"
fi
if [ "$_with_built_openexr" = true ]; then
cmake_d="$cmake_d -D ILMBASE_ROOT=$INST/openexr"
cmake_d="$cmake_d -D USE_ARNOLD=OFF"
cmake_d="$cmake_d -D USE_BINARIES=OFF"
cmake_d="$cmake_d -D USE_EXAMPLES=OFF"
cmake_d="$cmake_d -D USE_HDF5=OFF"
cmake_d="$cmake_d -D USE_MAYA=OFF"
cmake_d="$cmake_d -D USE_PRMAN=OFF"
cmake_d="$cmake_d -D USE_PYALEMBIC=OFF"
cmake_d="$cmake_d -D USE_STATIC_HDF5=OFF"
cmake_d="$cmake_d -D ALEMBIC_ILMBASE_LINK_STATIC=OFF"
cmake_d="$cmake_d -D ALEMBIC_SHARED_LIBS=OFF"
INFO "ILMBASE_ROOT=$INST/openexr"
fi
cmake $cmake_d ./
make -j$THREADS install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Alembic-$ALEMBIC_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set alembic-$ALEMBIC_VERSION $alembic_magic
cd $CWD
INFO "Done compiling Alembic-$ALEMBIC_VERSION!"
else
INFO "Own Alembic-$ALEMBIC_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-alembic option."
fi
run_ldconfig "alembic"
}
#### Build OpenCOLLADA ####
_init_opencollada() {
_src=$SRC/OpenCOLLADA-$OPENCOLLADA_VERSION
_git=true
_inst=$INST/opencollada-$OPENCOLLADA_VERSION
_inst_shortcut=$INST/opencollada
}
clean_OpenCOLLADA() {
_init_opencollada
_clean
}
compile_OpenCOLLADA() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenCOLLADA will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled results!
opencollada_magic=9
_init_opencollada
# Clean install if needed!
magic_compile_check opencollada-$OPENCOLLADA_VERSION $opencollada_magic
if [ $? -eq 1 -o "$OPENCOLLADA_FORCE_REBUILD" = true ]; then
clean_OpenCOLLADA
fi
if [ ! -d $_inst ]; then
INFO "Building OpenCOLLADA-$OPENCOLLADA_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "$OPENCOLLADA_USE_REPO" = true ]; then
git clone $OPENCOLLADA_SOURCE_REPO $_src
else
download OPENCOLLADA_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenCOLLADA-$OPENCOLLADA_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OPENCOLLADA_USE_REPO" = true ]; then
git pull origin $OPENCOLLADA_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $OPENCOLLADA_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D USE_EXPAT=OFF"
cmake_d="$cmake_d -D USE_LIBXML=ON"
# XXX Does not work!
# cmake_d="$cmake_d -D USE_STATIC=OFF"
cmake_d="$cmake_d -D USE_STATIC=ON"
cmake $cmake_d ../
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenCOLLADA-$OPENCOLLADA_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set opencollada-$OPENCOLLADA_VERSION $opencollada_magic
cd $CWD
INFO "Done compiling OpenCOLLADA-$OPENCOLLADA_VERSION!"
else
INFO "Own OpenCOLLADA-$OPENCOLLADA_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-opencollada option."
fi
}
#### Build Embree ####
_init_embree() {
_src=$SRC/embree-$EMBREE_VERSION
_git=true
_inst=$INST/embree-$EMBREE_VERSION
_inst_shortcut=$INST/embree
}
clean_Embree() {
_init_embree
_clean
}
compile_Embree() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, Embree will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled results!
embree_magic=9
_init_embree
# Clean install if needed!
magic_compile_check embree-$EMBREE_VERSION $embree_magic
if [ $? -eq 1 -o "$EMBREE_FORCE_REBUILD" = true ]; then
clean_Embree
fi
if [ ! -d $_inst ]; then
INFO "Building Embree-$EMBREE_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "EMBREE_USE_REPO" = true ]; then
git clone $EMBREE_SOURCE_REPO $_src
else
download EMBREE_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking Embree-$EMBREE_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$EMBREE_USE_REPO" = true ]; then
git pull origin $EMBREE_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $EMBREE_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D EMBREE_ISPC_SUPPORT=OFF"
cmake_d="$cmake_d -D EMBREE_TUTORIALS=OFF"
cmake_d="$cmake_d -D EMBREE_STATIC_LIB=ON"
cmake_d="$cmake_d -D EMBREE_RAY_MASK=ON"
cmake_d="$cmake_d -D EMBREE_FILTER_FUNCTION=ON"
cmake_d="$cmake_d -D EMBREE_BACKFACE_CULLING=OFF"
cmake_d="$cmake_d -D EMBREE_TASKING_SYSTEM=INTERNAL"
cmake_d="$cmake_d -D EMBREE_MAX_ISA=AVX2"
cmake $cmake_d ../
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "Embree-$EMBREE_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set embree-$EMBREE_VERSION $embree_magic
cd $CWD
INFO "Done compiling Embree-$EMBREE_VERSION!"
else
INFO "Own Embree-$EMBREE_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-embree option."
fi
}
#### Build OpenImageDenoise ####
_init_oidn() {
_src=$SRC/oidn-$OIDN_VERSION
_git=true
_inst=$INST/oidn-$OIDN_VERSION
_inst_shortcut=$INST/oidn
}
clean_oidn() {
_init_oidn
_clean
}
compile_OIDN() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, OpenImageDenoise will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled results!
oidn_magic=9
_init_oidn
# Clean install if needed!
magic_compile_check oidn-$OIDN_VERSION $oidn_magic
if [ $? -eq 1 -o "$OIDN_FORCE_REBUILD" = true ]; then
clean_oidn
fi
if [ ! -d $_inst ]; then
INFO "Building OpenImageDenoise-$OIDN_VERSION"
prepare_opt
if [ ! -d $_src ]; then
mkdir -p $SRC
if [ "OIDN_USE_REPO" = true ]; then
git clone $OIDN_SOURCE_REPO $_src
else
download OIDN_SOURCE[@] "$_src.tar.gz"
INFO "Unpacking OpenImageDenoise-$OIDN_VERSION"
tar -C $SRC -xf $_src.tar.gz
fi
fi
cd $_src
if [ "$OIDN_USE_REPO" = true ]; then
git pull origin $OIDN_REPO_BRANCH
# Stick to same rev as windows' libs...
git checkout $OIDN_REPO_UID
git reset --hard
fi
# Always refresh the whole build!
if [ -d build ]; then
rm -rf build
fi
mkdir build
cd build
cmake_d="-D CMAKE_BUILD_TYPE=Release"
cmake_d="$cmake_d -D CMAKE_INSTALL_PREFIX=$_inst"
cmake_d="$cmake_d -D WITH_EXAMPLE=OFF"
cmake_d="$cmake_d -D WITH_TEST=OFF"
cmake_d="$cmake_d -D OIDN_STATIC_LIB=ON"
cmake $cmake_d ../
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "OpenImageDenoise-$OIDN_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set oidn-$OIDN_VERSION $oidn_magic
cd $CWD
INFO "Done compiling OpenImageDenoise-$OIDN_VERSION!"
else
INFO "Own OpenImageDenoise-$OIDN_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-oidn option."
fi
run_ldconfig "oidn"
}
#### Build FFMPEG ####
_init_ffmpeg() {
_src=$SRC/ffmpeg-$FFMPEG_VERSION
_inst=$INST/ffmpeg-$FFMPEG_VERSION
_inst_shortcut=$INST/ffmpeg
}
clean_FFmpeg() {
_init_ffmpeg
_clean
}
compile_FFmpeg() {
if [ "$NO_BUILD" = true ]; then
WARNING "--no-build enabled, ffmpeg will not be compiled!"
return
fi
# To be changed each time we make edits that would modify the compiled result!
ffmpeg_magic=5
_init_ffmpeg
# Clean install if needed!
magic_compile_check ffmpeg-$FFMPEG_VERSION $ffmpeg_magic
if [ $? -eq 1 -o "$FFMPEG_FORCE_REBUILD" = true ]; then
clean_FFmpeg
fi
if [ ! -d $_inst ]; then
INFO "Building ffmpeg-$FFMPEG_VERSION"
prepare_opt
if [ ! -d $_src ]; then
INFO "Downloading ffmpeg-$FFMPEG_VERSION"
mkdir -p $SRC
download FFMPEG_SOURCE[@] "$_src.tar.bz2"
INFO "Unpacking ffmpeg-$FFMPEG_VERSION"
tar -C $SRC -xf $_src.tar.bz2
fi
cd $_src
extra=""
if [ "$VORBIS_USE" = true ]; then
extra="$extra --enable-libvorbis"
fi
if [ "$THEORA_USE" = true ]; then
extra="$extra --enable-libtheora"
fi
if [ "$XVID_USE" = true ]; then
extra="$extra --enable-libxvid"
fi
if [ "$X264_USE" = true ]; then
extra="$extra --enable-libx264"
fi
if [ "$VPX_USE" = true ]; then
extra="$extra --enable-libvpx"
fi
if [ "$MP3LAME_USE" = true ]; then
extra="$extra --enable-libmp3lame"
fi
if [ "$OPENJPEG_USE" = true ]; then
extra="$extra --enable-libopenjpeg"
fi
./configure --cc="gcc -Wl,--as-needed" \
--extra-ldflags="-pthread -static-libgcc" \
--prefix=$_inst --enable-static \
--disable-ffplay --disable-doc \
--enable-gray \
--enable-avfilter --disable-vdpau \
--disable-bzlib --disable-libgsm --disable-libspeex \
--enable-pthreads --enable-zlib --enable-stripping --enable-runtime-cpudetect \
--disable-vaapi --disable-nonfree --enable-gpl \
--disable-postproc --disable-librtmp --disable-libopencore-amrnb \
--disable-libopencore-amrwb --disable-libdc1394 --disable-version3 --disable-outdev=sdl \
--disable-libxcb \
--disable-outdev=xv --disable-indev=sndio --disable-outdev=sndio \
--disable-outdev=alsa --disable-indev=sdl --disable-indev=alsa --disable-indev=jack \
--disable-indev=lavfi $extra
make -j$THREADS && make install
make clean
if [ -d $_inst ]; then
_create_inst_shortcut
else
ERROR "FFmpeg-$FFMPEG_VERSION failed to compile, exiting"
exit 1
fi
magic_compile_set ffmpeg-$FFMPEG_VERSION $ffmpeg_magic
cd $CWD
INFO "Done compiling ffmpeg-$FFMPEG_VERSION!"
else
INFO "Own ffmpeg-$FFMPEG_VERSION is up to date, nothing to do!"
INFO "If you want to force rebuild of this lib, use the --force-ffmpeg option."
fi
}
#### Install on DEB-like ####
get_package_version_DEB() {
dpkg-query -W -f '${Version}' $1 | sed -r 's/([0-9]+:)?(([0-9]+\.?)+([0-9]+)).*/\2/'
}
check_package_DEB() {
r=`apt-cache show $1 | grep -c 'Package:'`
if [ $r -ge 1 ]; then
return 0
else
return 1
fi
}
check_package_installed_DEB() {
r=`dpkg-query -W -f='${Status}' $1 | grep -c "install ok"`
if [ $r -ge 1 ]; then
return 0
else
return 1
fi
}
check_package_version_match_DEB() {
v=`apt-cache policy $1 | grep 'Candidate:' | sed -r 's/.*:\s*([0-9]+:)(([0-9]+\.?)+).*/\2/'`
if [ -z "$v" ]; then
return 1
fi
version_match $v $2
return $?
}
check_package_version_ge_DEB() {
v=`apt-cache policy $1 | grep 'Candidate:' | sed -r 's/.*:\s*([0-9]+:)?(([0-9]+\.?)+).*/\2/'`
if [ -z "$v" ]; then
return 1
fi
version_ge $v $2
return $?
}
check_package_version_ge_lt_DEB() {
v=`apt-cache policy $1 | grep 'Candidate:' | sed -r 's/.*:\s*([0-9]+:)?(([0-9]+\.?)+).*/\2/'`
if [ -z "$v" ]; then
return 1
fi
version_ge_lt $v $2 $3
return $?
}
install_packages_DEB() {
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to run apt-get install for $@, you'll have to do it yourself..."
else
$SUDO apt-get install -y $@
if [ $? -ge 1 ]; then
ERROR "apt-get failed to install requested packages, exiting."
exit 1
fi
fi
}
install_DEB() {
PRINT ""
INFO "Installing dependencies for DEB-based distribution"
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
if [ "$NO_CONFIRM" = false ]; then
read -p "Do you want to continue (Y/n)?"
[ "$(echo ${REPLY:=Y} | tr [:upper:] [:lower:])" != "y" ] && exit
fi
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to run apt-get update, you'll have to do it yourself..."
else
$SUDO apt-get update
fi
# These libs should always be available in debian/ubuntu official repository...
VORBIS_DEV="libvorbis-dev"
OGG_DEV="libogg-dev"
THEORA_DEV="libtheora-dev"
_packages="gawk cmake cmake-curses-gui build-essential libjpeg-dev libpng-dev libtiff-dev \
git libfreetype6-dev libx11-dev flex bison libtbb-dev libxxf86vm-dev \
libxcursor-dev libxi-dev wget libsqlite3-dev libxrandr-dev libxinerama-dev \
libbz2-dev libncurses5-dev libssl-dev liblzma-dev libreadline-dev \
libopenal-dev libglew-dev yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV \
libsdl1.2-dev libfftw3-dev patch bzip2 libxml2-dev libtinyxml-dev libjemalloc-dev"
# libglewmx-dev (broken in deb testing currently...)
VORBIS_USE=true
OGG_USE=true
THEORA_USE=true
PRINT ""
# We need openjp2, libopenjpeg is an old version
OPENJPEG_DEV="libopenjp2-7-dev"
check_package_DEB $OPENJPEG_DEV
if [ $? -eq 0 ]; then
_packages="$_packages $OPENJPEG_DEV"
OPENJPEG_USE=true
fi
PRINT ""
# Some not-so-old distro (ubuntu 12.4) do not have it, do not fail in this case, just warn.
YAMLCPP_DEV="libyaml-cpp-dev"
check_package_DEB $YAMLCPP_DEV
if [ $? -eq 0 ]; then
_packages="$_packages $YAMLCPP_DEV"
else
PRINT ""
WARNING "libyaml-cpp-dev not found, you may have to install it by hand to get Blender compiling..."
PRINT ""
fi
PRINT ""
CLANG_FORMAT="clang-format"
check_package_version_ge_DEB $CLANG_FORMAT $CLANG_FORMAT_VERSION_MIN
if [ $? -eq 0 ]; then
_packages="$_packages $CLANG_FORMAT"
else
PRINT ""
WARNING "clang-format $CLANG_FORMAT_VERSION_MIN or higher not found, this is NOT needed to get Blender compiling..."
PRINT ""
fi
if [ "$WITH_JACK" = true ]; then
_packages="$_packages libspnav-dev"
# Only install jack if jack2 is not already installed!
JACK="libjack-dev"
JACK2="libjack-jackd2-dev"
check_package_installed_DEB $JACK2
if [ $? -eq 0 ]; then
_packages="$_packages $JACK2"
else
_packages="$_packages $JACK"
fi
fi
PRINT ""
install_packages_DEB $_packages
PRINT""
LIBSNDFILE_DEV="libsndfile1-dev"
check_package_DEB $LIBSNDFILE_DEV
if [ $? -eq 0 ]; then
install_packages_DEB $LIBSNDFILE_DEV
fi
PRINT ""
X264_DEV="libx264-dev"
check_package_version_ge_DEB $X264_DEV $X264_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB $X264_DEV
X264_USE=true
fi
if [ "$WITH_ALL" = true ]; then
PRINT ""
XVID_DEV="libxvidcore-dev"
check_package_DEB $XVID_DEV
if [ $? -eq 0 ]; then
install_packages_DEB $XVID_DEV
XVID_USE=true
fi
PRINT ""
MP3LAME_DEV="libmp3lame-dev"
check_package_DEB $MP3LAME_DEV
if [ $? -eq 0 ]; then
install_packages_DEB $MP3LAME_DEV
MP3LAME_USE=true
fi
PRINT ""
VPX_DEV="libvpx-dev"
check_package_version_ge_DEB $VPX_DEV $VPX_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB $VPX_DEV
VPX_USE=true
fi
fi
# Check cmake/glew versions and disable features for older distros.
# This is so Blender can at least compile.
PRINT ""
_cmake=`get_package_version_DEB cmake`
version_ge $_cmake "2.8.10"
if [ $? -eq 1 ]; then
version_ge $_cmake "2.8.8"
if [ $? -eq 1 ]; then
WARNING "OpenVDB and OpenCOLLADA disabled because cmake-$_cmake is not enough"
OPENVDB_SKIP=true
OPENCOLLADA_SKIP=true
else
WARNING "OpenVDB disabled because cmake-$_cmake is not enough"
OPENVDB_SKIP=true
fi
fi
PRINT ""
_glew=`get_package_version_DEB libglew-dev`
if [ -z $_glew ]; then
# Stupid virtual package in Ubuntu 12.04 doesn't show version number...
_glew=`apt-cache showpkg libglew-dev|tail -n1|awk '{print $2}'|sed 's/-.*//'`
fi
version_ge $_glew "1.9.0"
if [ $? -eq 1 ]; then
version_ge $_glew "1.7.0"
if [ $? -eq 1 ]; then
WARNING "OpenSubdiv disabled because GLEW-$_glew is not enough"
WARNING "Blender will not use system GLEW library"
OSD_SKIP=true
NO_SYSTEM_GLEW=true
else
WARNING "OpenSubdiv will compile with GLEW-$_glew but with limited capability"
WARNING "Blender will not use system GLEW library"
NO_SYSTEM_GLEW=true
fi
fi
PRINT ""
_do_compile_python=false
if [ "$PYTHON_SKIP" = true ]; then
WARNING "Skipping Python/NumPy installation, as requested..."
elif [ "$PYTHON_FORCE_BUILD" = true ]; then
INFO "Forced Python/NumPy building, as requested..."
_do_compile_python=true
else
check_package_DEB python$PYTHON_VERSION_MIN-dev
if [ $? -eq 0 ]; then
install_packages_DEB python$PYTHON_VERSION_MIN-dev
clean_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
check_package_DEB python3-numpy
if [ $? -eq 0 ]; then
install_packages_DEB python3-numpy
else
WARNING "Sorry, using python package but no valid numpy package available!" \
" Use --build-numpy to force building of both Python and NumPy."
fi
fi
else
_do_compile_python=true
fi
fi
if $_do_compile_python; then
install_packages_DEB libffi-dev
compile_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
compile_Numpy
fi
fi
PRINT ""
if [ "$BOOST_SKIP" = true ]; then
WARNING "Skipping Boost installation, as requested..."
elif [ "$BOOST_FORCE_BUILD" = true ]; then
INFO "Forced Boost building, as requested..."
compile_Boost
else
check_package_version_ge_DEB libboost-dev $BOOST_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB libboost-dev
boost_version=$(echo `get_package_version_DEB libboost-dev` | sed -r 's/^([0-9]+\.[0-9]+).*/\1/')
install_packages_DEB libboost-{filesystem,iostreams,locale,regex,system,thread,wave,program-options}$boost_version-dev
clean_Boost
else
compile_Boost
fi
fi
PRINT ""
if [ "$OCIO_SKIP" = true ]; then
WARNING "Skipping OpenColorIO installation, as requested..."
elif [ "$OCIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenColorIO building, as requested..."
compile_OCIO
else
# XXX Always force build of own OCIO, until linux distro guys update their package to default libyaml-cpp ver (0.5)!
#check_package_version_ge_DEB libopencolorio-dev $OCIO_VERSION_MIN
#if [ $? -eq 0 ]; then
#install_packages_DEB libopencolorio-dev
#clean_OCIO
#else
compile_OCIO
#fi
fi
PRINT ""
if [ "$OPENEXR_SKIP" = true ]; then
WARNING "Skipping ILMBase/OpenEXR installation, as requested..."
elif [ "$OPENEXR_FORCE_BUILD" = true ]; then
INFO "Forced ILMBase/OpenEXR building, as requested..."
compile_OPENEXR
else
check_package_version_ge_DEB libopenexr-dev $OPENEXR_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB libopenexr-dev
OPENEXR_VERSION=`get_package_version_DEB libopenexr-dev`
ILMBASE_VERSION=$OPENEXR_VERSION
clean_OPENEXR
else
compile_OPENEXR
fi
fi
PRINT ""
if [ "$OIIO_SKIP" = true ]; then
WARNING "Skipping OpenImageIO installation, as requested..."
elif [ "$OIIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageIO building, as requested..."
compile_OIIO
else
# XXX Debian Testing / Ubuntu 16.04 pulls in WAY too many deps (gtk2/opencv ?!) incl. OCIO build against libyaml-cpp0.3 so build for now...
#check_package_version_ge_lt_DEB libopenimageio-dev $OIIO_VERSION_MIN $OIIO_VERSION_MAX
#if [ $? -eq 0 -a "$_with_built_openexr" = false ]; then
# install_packages_DEB libopenimageio-dev
# clean_OIIO
#else
compile_OIIO
#fi
fi
PRINT ""
have_llvm=false
_do_compile_llvm=false
if [ "$LLVM_SKIP" = true ]; then
WARNING "Skipping LLVM installation, as requested (this also implies skipping OSL!)..."
OSL_SKIP=true
elif [ "$LLVM_FORCE_BUILD" = true ]; then
INFO "Forced LLVM building, as requested..."
_do_compile_llvm=true
else
check_package_DEB clang-$LLVM_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB llvm-$LLVM_VERSION_MIN-dev clang-$LLVM_VERSION_MIN
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION_MIN
clean_LLVM
else
_do_compile_llvm=true
fi
fi
if [ "$_do_compile_llvm" = true ]; then
install_packages_DEB libffi-dev
# LLVM can't find the debian ffi header dir
_FFI_INCLUDE_DIR=`dpkg -L libffi-dev | grep -e ".*/ffi.h" | sed -r 's/(.*)\/ffi.h/\1/'`
PRINT ""
compile_LLVM
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION
fi
PRINT ""
_do_compile_osl=false
if [ "$OSL_SKIP" = true ]; then
WARNING "Skipping OpenShadingLanguage installation, as requested..."
elif [ "$OSL_FORCE_BUILD" = true ]; then
INFO "Forced OpenShadingLanguage building, as requested..."
_do_compile_osl=true
else
# No package currently!
_do_compile_osl=true
fi
if [ "$_do_compile_osl" = true ]; then
if [ "$have_llvm" = true ]; then
PRINT ""
compile_OSL
else
WARNING "No LLVM available, cannot build OSL!"
fi
fi
PRINT ""
if [ "$OSD_SKIP" = true ]; then
WARNING "Skipping OpenSubdiv installation, as requested..."
elif [ "$OSD_FORCE_BUILD" = true ]; then
INFO "Forced OpenSubdiv building, as requested..."
compile_OSD
else
# No package currently!
PRINT ""
compile_OSD
fi
PRINT ""
if [ "$OPENVDB_SKIP" = true ]; then
WARNING "Skipping OpenVDB installation, as requested..."
elif [ "$OPENVDB_FORCE_BUILD" = true ]; then
INFO "Forced OpenVDB building, as requested..."
compile_OPENVDB
else
check_package_version_ge_DEB libopenvdb-dev $OPENVDB_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB libopenvdb-dev libblosc-dev
clean_OPENVDB
else
compile_OPENVDB
fi
fi
PRINT ""
if [ "$ALEMBIC_SKIP" = true ]; then
WARNING "Skipping Alembic installation, as requested..."
elif [ "$ALEMBIC_FORCE_BUILD" = true ]; then
INFO "Forced Alembic building, as requested..."
compile_ALEMBIC
else
# No package currently, only HDF5!
compile_ALEMBIC
fi
if [ "$WITH_OPENCOLLADA" = true ]; then
_do_compile_collada=false
PRINT ""
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping OpenCOLLADA installation, as requested..."
elif [ "$OPENCOLLADA_FORCE_BUILD" = true ]; then
INFO "Forced OpenCollada building, as requested..."
_do_compile_collada=true
else
# No package currently!
_do_compile_collada=true
fi
if [ "$_do_compile_collada" = true ]; then
install_packages_DEB libpcre3-dev
# Find path to libxml shared lib...
_XML2_LIB=`dpkg -L libxml2-dev | grep -e ".*/libxml2.so"`
# No package
PRINT ""
compile_OpenCOLLADA
fi
fi
if [ "$WITH_EMBREE" = true ]; then
_do_compile_embree=false
PRINT ""
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
# No package currently!
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
if [ "$WITH_OIDN" = true ]; then
_do_compile_oidn=false
PRINT ""
if [ "$OIDN_SKIP" = true ]; then
WARNING "Skipping OpenImgeDenoise installation, as requested..."
elif [ "$OIDN_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageDenoise building, as requested..."
_do_compile_oidn=true
else
# No package currently!
_do_compile_oidn=true
fi
if [ "$_do_compile_oidn" = true ]; then
compile_OIDN
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
WARNING "Skipping FFMpeg installation, as requested..."
elif [ "$FFMPEG_FORCE_BUILD" = true ]; then
INFO "Forced FFMpeg building, as requested..."
compile_FFmpeg
else
# XXX Debian Testing / Ubuntu 16.04 finally includes FFmpeg, so check as usual
check_package_DEB ffmpeg
if [ $? -eq 0 ]; then
check_package_version_ge_DEB ffmpeg $FFMPEG_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_DEB libavdevice-dev
clean_FFmpeg
else
compile_FFmpeg
fi
else
compile_FFmpeg
fi
fi
}
#### Install on RPM-like ####
rpm_flavour() {
if [ -f /etc/redhat-release ]; then
if [ "`grep '[6-7]\.' /etc/redhat-release`" ]; then
RPM="RHEL"
else
RPM="FEDORA"
fi
elif [ -f /etc/SuSE-release ]; then
RPM="SUSE"
fi
}
get_package_version_RPM() {
rpm_flavour
if [ "$RPM" = "RHEL" ]; then
yum info $1 | grep Version | tail -n 1 | sed -r 's/.*:\s+(([0-9]+\.?)+).*/\1/'
elif [ "$RPM" = "FEDORA" ]; then
dnf info $1 | grep Version | tail -n 1 | sed -r 's/.*:\s+(([0-9]+\.?)+).*/\1/'
elif [ "$RPM" = "SUSE" ]; then
zypper info $1 | grep Version | tail -n 1 | sed -r 's/.*:\s+(([0-9]+\.?)+).*/\1/'
fi
}
check_package_RPM() {
rpm_flavour
if [ "$RPM" = "RHEL" ]; then
r=`yum info $1 | grep -c 'Summary'`
elif [ "$RPM" = "FEDORA" ]; then
r=`dnf info $1 | grep -c 'Summary'`
elif [ "$RPM" = "SUSE" ]; then
r=`zypper info $1 | grep -c 'Summary'`
fi
if [ $r -ge 1 ]; then
return 0
else
return 1
fi
}
check_package_version_match_RPM() {
v=`get_package_version_RPM $1`
if [ -z "$v" ]; then
return 1
fi
version_match $v $2
return $?
}
check_package_version_ge_RPM() {
v=`get_package_version_RPM $1`
if [ -z "$v" ]; then
return 1
fi
version_ge $v $2
return $?
}
check_package_version_ge_lt_RPM() {
v=`get_package_version_RPM $1`
if [ -z "$v" ]; then
return 1
fi
version_ge_lt $v $2 $3
return $?
}
install_packages_RPM() {
rpm_flavour
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to install $@, you'll have to do it yourself..."
fi
if [ "$RPM" = "RHEL" ]; then
$SUDO yum install -y $@
if [ $? -ge 1 ]; then
ERROR "yum failed to install requested packages, exiting."
exit 1
fi
elif [ "$RPM" = "FEDORA" ]; then
$SUDO dnf install -y $@
if [ $? -ge 1 ]; then
ERROR "dnf failed to install requested packages, exiting."
exit 1
fi
elif [ "$RPM" = "SUSE" ]; then
$SUDO zypper --non-interactive install --auto-agree-with-licenses $@
if [ $? -ge 1 ]; then
ERROR "zypper failed to install requested packages, exiting."
exit 1
fi
fi
}
install_RPM() {
PRINT ""
INFO "Installing dependencies for RPM-based distribution"
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
if [ "$NO_CONFIRM" = false ]; then
read -p "Do you want to continue (Y/n)?"
[ "$(echo ${REPLY:=Y} | tr [:upper:] [:lower:])" != "y" ] && exit
fi
# Enable non-free repositories for all flavours
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to install third party repositories, you'll have to do it yourself..."
else
rpm_flavour
if [ "$RPM" = "FEDORA" ]; then
_fedora_rel="`egrep "[0-9]{1,}" /etc/fedora-release -o`"
$SUDO dnf -y install --nogpgcheck \
http://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$_fedora_rel.noarch.rpm \
http://download1.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$_fedora_rel.noarch.rpm
$SUDO dnf -y update
elif [ "$RPM" = "RHEL" ]; then
if [ "`grep '[^.]6\.' /etc/redhat-release`" ]; then
ERROR "Building with GCC 4.4 is not supported!"
exit 1
else
$SUDO yum -y install --nogpgcheck \
http://download.fedoraproject.org/pub/epel/7/$(uname -i)/e/epel-release-7-6.noarch.rpm \
http://li.nux.ro/download/nux/dextop/el7/x86_64/nux-dextop-release-0-5.el7.nux.noarch.rpm
$SUDO yum -y update
fi
elif [ "$RPM" = "SUSE" ]; then
# Packman repo now includes name in link...
_suse_rel="`grep -w VERSION /etc/os-release | sed 's/[^0-9.]*//g'`"
_suse_name="`grep -w NAME /etc/os-release | gawk '{print $2}' | sed 's/\"//'`"
if [ $_suse_name ]; then
_suse_rel="${_suse_name}_${_suse_rel}"
fi
PRINT ""
INFO "About to add 'packman' repository from http://packman.inode.at/suse/openSUSE_$_suse_rel/"
INFO "This is only needed if you do not already have a packman repository enabled..."
read -p "Do you want to add this repo (Y/n)?"
if [ "$(echo ${REPLY:=Y} | tr [:upper:] [:lower:])" == "y" ]; then
INFO " Installing packman..."
$SUDO zypper ar -f -n packman http://ftp.gwdg.de/pub/linux/misc/packman/suse/openSUSE_$_suse_rel/ packman
INFO " Done."
else
INFO " Skipping packman installation."
fi
$SUDO zypper --non-interactive --gpg-auto-import-keys update --auto-agree-with-licenses
fi
fi
# These libs should always be available in fedora/suse official repository...
OPENJPEG_DEV="openjpeg2-devel"
VORBIS_DEV="libvorbis-devel"
OGG_DEV="libogg-devel"
THEORA_DEV="libtheora-devel"
_packages="gcc gcc-c++ git make cmake tar bzip2 xz findutils flex bison \
libtiff-devel libjpeg-devel libpng-devel sqlite-devel fftw-devel SDL-devel \
libX11-devel libXi-devel libXcursor-devel libXrandr-devel libXinerama-devel \
wget ncurses-devel readline-devel $OPENJPEG_DEV openal-soft-devel \
glew-devel yasm $THEORA_DEV $VORBIS_DEV $OGG_DEV patch \
libxml2-devel yaml-cpp-devel tinyxml-devel jemalloc-devel"
OPENJPEG_USE=true
VORBIS_USE=true
OGG_USE=true
THEORA_USE=true
if [ "$RPM" = "FEDORA" -o "$RPM" = "RHEL" ]; then
_packages="$_packages freetype-devel tbb-devel"
if [ "$WITH_JACK" = true ]; then
_packages="$_packages jack-audio-connection-kit-devel"
fi
PRINT ""
install_packages_RPM $_packages
PRINT ""
X264_DEV="x264-devel"
check_package_version_ge_RPM $X264_DEV $X264_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM $X264_DEV
X264_USE=true
fi
if [ "$WITH_ALL" = true ]; then
PRINT ""
XVID_DEV="xvidcore-devel"
check_package_RPM $XVID_DEV
if [ $? -eq 0 ]; then
install_packages_RPM $XVID_DEV
XVID_USE=true
fi
PRINT ""
MP3LAME_DEV="lame-devel"
check_package_RPM $MP3LAME_DEV
if [ $? -eq 0 ]; then
install_packages_RPM $MP3LAME_DEV
MP3LAME_USE=true
fi
fi
elif [ "$RPM" = "SUSE" ]; then
_packages="$_packages freetype2-devel"
PRINT ""
install_packages_RPM $_packages
PRINT ""
# Install TBB on openSUSE, from temporary repo
check_package_RPM tbb-devel
if [ $? -eq 0 ]; then
install_packages_RPM tbb-devel
else
$SUDO zypper ar -f http://download.opensuse.org/repositories/devel:/libraries:/c_c++/openSUSE_$_suse_rel/devel:libraries:c_c++.repo
$SUDO zypper -n --gpg-auto-import-keys install tbb-devel
$SUDO zypper rr devel_libraries_c_c++
fi
PRINT ""
X264_DEV="libx264-devel"
check_package_version_ge_RPM $X264_DEV $X264_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM $X264_DEV
X264_USE=true
fi
if [ "$WITH_ALL" = true ]; then
PRINT ""
XVID_DEV="libxvidcore-devel"
check_package_RPM $XVID_DEV
if [ $? -eq 0 ]; then
install_packages_RPM $XVID_DEV
XVID_USE=true
fi
PRINT ""
MP3LAME_DEV="libmp3lame-devel"
check_package_RPM $MP3LAME_DEV
if [ $? -eq 0 ]; then
install_packages_RPM $MP3LAME_DEV
MP3LAME_USE=true
fi
fi
fi
PRINT""
LIBSNDFILE_DEV="libsndfile-devel"
check_package_RPM $LIBSNDFILE_DEV
if [ $? -eq 0 ]; then
install_packages_RPM $LIBSNDFILE_DEV
fi
if [ "$WITH_ALL" = true ]; then
PRINT ""
VPX_DEV="libvpx-devel"
check_package_version_ge_RPM $VPX_DEV $VPX_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM $VPX_DEV
VPX_USE=true
fi
PRINT ""
install_packages_RPM libspnav-devel
fi
PRINT ""
CLANG_FORMAT="clang" # Yeah, on fedora/suse clang-format is part of main clang package...
check_package_version_ge_RPM $CLANG_FORMAT $CLANG_FORMAT_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM $CLANG_FORMAT
else
PRINT ""
WARNING "clang-format $CLANG_FORMAT_VERSION_MIN or higher not found, this is NOT needed to get Blender compiling..."
PRINT ""
fi
PRINT ""
_do_compile_python=false
if [ "$PYTHON_SKIP" = true ]; then
WARNING "Skipping Python installation, as requested..."
elif [ "$PYTHON_FORCE_BUILD" = true ]; then
INFO "Forced Python/NumPy building, as requested..."
_do_compile_python=true
else
check_package_version_match_RPM python3-devel $PYTHON_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM python3-devel
clean_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
check_package_version_ge_RPM python3-numpy $NUMPY_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM python3-numpy
else
WARNING "Sorry, using python package but no valid numpy package available!" \
" Use --build-numpy to force building of both Python and NumPy."
fi
fi
else
_do_compile_python=true
fi
fi
if [ "$_do_compile_python" = true ]; then
install_packages_RPM libffi-devel
compile_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
compile_Numpy
fi
fi
PRINT ""
_do_compile_boost=false
if [ "$BOOST_SKIP" = true ]; then
WARNING "Skipping Boost installation, as requested..."
elif [ "$BOOST_FORCE_BUILD" = true ]; then
INFO "Forced Boost building, as requested..."
_do_compile_boost=true
else
check_package_version_ge_RPM boost-devel $BOOST_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM boost-devel
clean_Boost
else
_do_compile_boost=true
fi
fi
if [ "$_do_compile_boost" = true ]; then
if [ "$RPM" = "SUSE" ]; then
install_packages_RPM gcc-fortran
else
install_packages_RPM libquadmath-devel bzip2-devel
fi
PRINT ""
compile_Boost
fi
PRINT ""
if [ "$OCIO_SKIP" = true ]; then
WARNING "Skipping OpenColorIO installation, as requested..."
elif [ "$OCIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenColorIO building, as requested..."
compile_OCIO
else
if [ "$RPM" = "SUSE" ]; then
check_package_version_ge_RPM OpenColorIO-devel $OCIO_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM OpenColorIO-devel
clean_OCIO
else
compile_OCIO
fi
# XXX Fedora/RHEL OCIO still depends on libyaml-cpp v0.3 even when system default is v0.5!
else
compile_OCIO
fi
fi
PRINT ""
if [ "$OPENEXR_SKIP" = true ]; then
WARNING "Skipping ILMBase/OpenEXR installation, as requested..."
elif [ "$OPENEXR_FORCE_BUILD" = true ]; then
INFO "Forced ILMBase/OpenEXR building, as requested..."
compile_OPENEXR
else
check_package_version_ge_RPM openexr-devel $OPENEXR_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM openexr-devel
OPENEXR_VERSION=`get_package_version_RPM openexr-devel`
ILMBASE_VERSION=$OPENEXR_VERSION
clean_OPENEXR
else
compile_OPENEXR
fi
fi
PRINT ""
if [ "$OIIO_SKIP" = true ]; then
WARNING "Skipping OpenImageIO installation, as requested..."
elif [ "$OIIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageIO building, as requested..."
compile_OIIO
else
# XXX RPM distros pulls in too much and depends on old libs, so better to build for now...
#check_package_version_ge_lt_RPM OpenImageIO-devel $OIIO_VERSION_MIN $OIIO_VERSION_MAX
#if [ $? -eq 0 -a $_with_built_openexr == false ]; then
# install_packages_RPM OpenImageIO-devel
# clean_OIIO
#else
compile_OIIO
#fi
fi
PRINT ""
have_llvm=false
_do_compile_llvm=false
if [ "$LLVM_SKIP" = true ]; then
WARNING "Skipping LLVM installation, as requested (this also implies skipping OSL!)..."
OSL_SKIP=true
elif [ "$LLVM_FORCE_BUILD" = true ]; then
INFO "Forced LLVM building, as requested..."
_do_compile_llvm=true
else
if [ "$RPM" = "SUSE" ]; then
CLANG_DEV="llvm-clang-devel"
else
CLANG_DEV="clang-devel"
fi
check_package_version_match_RPM $CLANG_DEV $LLVM_VERSION
if [ $? -eq 0 ]; then
install_packages_RPM llvm-devel $CLANG_DEV
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION
clean_LLVM
else
_do_compile_llvm=true
fi
fi
if [ "$_do_compile_llvm" = true ]; then
install_packages_RPM libffi-devel
# LLVM can't find the fedora ffi header dir...
_FFI_INCLUDE_DIR=`rpm -ql libffi-devel | grep -e ".*/ffi.h" | sed -r 's/(.*)\/ffi.h/\1/'`
PRINT ""
compile_LLVM
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION
fi
PRINT ""
_do_compile_osl=false
if [ "$OSL_SKIP" = true ]; then
WARNING "Skipping OpenShadingLanguage installation, as requested..."
elif [ "$OSL_FORCE_BUILD" = true ]; then
INFO "Forced OpenShadingLanguage building, as requested..."
_do_compile_osl=true
else
# No package currently!
_do_compile_osl=true
fi
if [ "$_do_compile_osl" = true ]; then
if [ "$have_llvm" = true ]; then
PRINT ""
compile_OSL
else
WARNING "No LLVM available, cannot build OSL!"
fi
fi
PRINT ""
if [ "$OSD_SKIP" = true ]; then
WARNING "Skipping OpenSubdiv installation, as requested..."
elif [ "$OSD_FORCE_BUILD" = true ]; then
INFO "Forced OpenSubdiv building, as requested..."
compile_OSD
else
# No package currently!
compile_OSD
fi
PRINT ""
if [ "$OPENVDB_SKIP" = true ]; then
WARNING "Skipping OpenVDB installation, as requested..."
elif [ "$OPENVDB_FORCE_BUILD" = true ]; then
INFO "Forced OpenVDB building, as requested..."
compile_OPENVDB
else
# No package currently!
compile_OPENVDB
fi
PRINT ""
if [ "$ALEMBIC_SKIP" = true ]; then
WARNING "Skipping Alembic installation, as requested..."
elif [ "$ALEMBIC_FORCE_BUILD" = true ]; then
INFO "Forced Alembic building, as requested..."
compile_ALEMBIC
else
# No package currently!
compile_ALEMBIC
fi
if [ "$WITH_OPENCOLLADA" = true ]; then
PRINT ""
_do_compile_collada=false
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping OpenCOLLADA installation, as requested..."
elif [ "$OPENCOLLADA_FORCE_BUILD" = true ]; then
INFO "Forced OpenCollada building, as requested..."
_do_compile_collada=true
else
# No package...
_do_compile_collada=true
fi
if [ "$_do_compile_collada" = true ]; then
install_packages_RPM pcre-devel
# Find path to libxml shared lib...
_XML2_LIB=`rpm -ql libxml2-devel | grep -e ".*/libxml2.so"`
PRINT ""
compile_OpenCOLLADA
fi
fi
if [ "$WITH_EMBREE" = true ]; then
PRINT ""
_do_compile_embree=false
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
# No package...
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
if [ "$WITH_OIDN" = true ]; then
_do_compile_oidn=false
PRINT ""
if [ "$OIDN_SKIP" = true ]; then
WARNING "Skipping OpenImgeDenoise installation, as requested..."
elif [ "$OIDN_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageDenoise building, as requested..."
_do_compile_oidn=true
else
# No package currently!
_do_compile_oidn=true
fi
if [ "$_do_compile_oidn" = true ]; then
compile_OIDN
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
WARNING "Skipping FFMpeg installation, as requested..."
elif [ "$FFMPEG_FORCE_BUILD" = true ]; then
INFO "Forced FFMpeg building, as requested..."
compile_FFmpeg
else
check_package_version_ge_RPM ffmpeg-devel $FFMPEG_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_RPM ffmpeg ffmpeg-devel
clean_FFmpeg
else
compile_FFmpeg
fi
fi
}
#### Install on ARCH-like ####
get_package_version_ARCH() {
pacman -Si $1 | grep Version | tail -n 1 | sed -r 's/.*:\s+?(([0-9]+\.?)+).*/\1/'
}
check_package_ARCH() {
r=`pacman -Si $1 | grep -c 'Description'`
if [ $r -ge 1 ]; then
return 0
else
return 1
fi
}
check_package_version_match_ARCH() {
v=`get_package_version_ARCH $1`
if [ -z "$v" ]; then
return 1
fi
version_match $v $2
return $?
}
check_package_version_ge_ARCH() {
v=`get_package_version_ARCH $1`
if [ -z "$v" ]; then
return 1
fi
version_ge $v $2
return $?
}
check_package_version_ge_lt_ARCH() {
v=`get_package_version_ARCH $1`
if [ -z "$v" ]; then
return 1
fi
version_ge_lt $v $2 $3
return $?
}
install_packages_ARCH() {
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to run pacman for $@, you'll have to do it yourself..."
else
$SUDO pacman -S --needed --noconfirm $@
if [ $? -ge 1 ]; then
ERROR "pacman failed to install requested packages, exiting."
exit 1
fi
fi
}
install_ARCH() {
PRINT ""
INFO "Installing dependencies for ARCH-based distribution"
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
if [ "$NO_CONFIRM" = false ]; then
read -p "Do you want to continue (Y/n)?"
[ "$(echo ${REPLY:=Y} | tr [:upper:] [:lower:])" != "y" ] && exit
fi
# Check for sudo...
if [ $SUDO ]; then
if [ ! -x "/usr/bin/sudo" ]; then
PRINT ""
ERROR "This script requires sudo but it is not installed."
PRINT "Please setup sudo according to:"
PRINT "https://wiki.archlinux.org/index.php/Sudo"
PRINT "and try again."
PRINT ""
exit
fi
fi
if [ ! $SUDO ]; then
WARNING "--no-sudo enabled, impossible to run pacman -Sy, you'll have to do it yourself..."
else
$SUDO pacman -Sy
fi
# These libs should always be available in arch official repository...
OPENJPEG_DEV="openjpeg2"
VORBIS_DEV="libvorbis"
OGG_DEV="libogg"
THEORA_DEV="libtheora"
BASE_DEVEL="base-devel"
# Avoid conflicts when gcc-multilib is installed
pacman -Qi gcc-multilib &>/dev/null
if [ $? -eq 0 ]; then
BASE_DEVEL=`pacman -Sgq base-devel | sed -e 's/^gcc$/gcc-multilib/g' | paste -s -d' '`
fi
_packages="$BASE_DEVEL git cmake \
libxi libxcursor libxrandr libxinerama glew libpng libtiff wget openal \
$OPENJPEG_DEV $VORBIS_DEV $OGG_DEV $THEORA_DEV yasm sdl fftw intel-tbb \
libxml2 yaml-cpp tinyxml python-requests jemalloc"
OPENJPEG_USE=true
VORBIS_USE=true
OGG_USE=true
THEORA_USE=true
if [ "$WITH_ALL" = true ]; then
_packages="$_packages libspnav"
fi
if [ "$WITH_JACK" = true ]; then
_packages="$_packages jack"
fi
PRINT ""
install_packages_ARCH $_packages
PRINT""
LIBSNDFILE_DEV="libsndfile"
check_package_ARCH $LIBSNDFILE_DEV
if [ $? -eq 0 ]; then
install_packages_ARCH $LIBSNDFILE_DEV
fi
PRINT ""
X264_DEV="x264"
check_package_version_ge_ARCH $X264_DEV $X264_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH $X264_DEV
X264_USE=true
fi
if [ "$WITH_ALL" = true ]; then
PRINT ""
XVID_DEV="xvidcore"
check_package_ARCH $XVID_DEV
if [ $? -eq 0 ]; then
install_packages_ARCH $XVID_DEV
XVID_USE=true
fi
PRINT ""
MP3LAME_DEV="lame"
check_package_ARCH $MP3LAME_DEV
if [ $? -eq 0 ]; then
install_packages_ARCH $MP3LAME_DEV
MP3LAME_USE=true
fi
PRINT ""
VPX_DEV="libvpx"
check_package_version_ge_ARCH $VPX_DEV $VPX_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH $VPX_DEV
VPX_USE=true
fi
fi
PRINT ""
CLANG_FORMAT="clang" # Yeah, on arch clang-format is part of main clang package...
check_package_version_ge_ARCH $CLANG_FORMAT $CLANG_FORMAT_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH $CLANG_FORMAT
else
PRINT ""
WARNING "clang-format $CLANG_FORMAT_VERSION_MIN or higher not found, this is NOT needed to get Blender compiling..."
PRINT ""
fi
PRINT ""
_do_compile_python=false
if [ "$PYTHON_SKIP" = true ]; then
WARNING "Skipping Python installation, as requested..."
elif [ "$PYTHON_FORCE_BUILD" = true ]; then
INFO "Forced Python/NumPy building, as requested..."
_do_compile_python=true
else
check_package_version_ge_ARCH python $PYTHON_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH python
clean_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
check_package_version_ge_ARCH python-numpy $NUMPY_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH python-numpy
else
WARNING "Sorry, using python package but no valid numpy package available!" \
"Use --build-numpy to force building of both Python and NumPy."
fi
fi
else
_do_compile_python=true
fi
fi
if [ "$_do_compile_python" = true ]; then
install_packages_ARCH libffi
compile_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
compile_Numpy
fi
fi
PRINT ""
if [ "$BOOST_SKIP" = true ]; then
WARNING "Skipping Boost installation, as requested..."
elif [ "$BOOST_FORCE_BUILD" = true ]; then
INFO "Forced Boost building, as requested..."
compile_Boost
else
check_package_version_ge_ARCH boost $BOOST_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH boost
clean_Boost
else
compile_Boost
fi
fi
PRINT ""
if [ "$OCIO_SKIP" = true ]; then
WARNING "Skipping OpenColorIO installation, as requested..."
elif [ "$OCIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenColorIO building, as requested..."
compile_OCIO
else
check_package_version_ge_ARCH opencolorio $OCIO_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH opencolorio
clean_OCIO
else
compile_OCIO
fi
fi
PRINT ""
if [ "$OPENEXR_SKIP" = true ]; then
WARNING "Skipping ILMBase/OpenEXR installation, as requested..."
elif [ "$OPENEXR_FORCE_BUILD" = true ]; then
INFO "Forced ILMBase/OpenEXR building, as requested..."
compile_OPENEXR
else
check_package_version_ge_ARCH openexr $OPENEXR_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH openexr
OPENEXR_VERSION=`get_package_version_ARCH openexr`
ILMBASE_VERSION=$OPENEXR_VERSION
clean_OPENEXR
else
compile_OPENEXR
fi
fi
PRINT ""
if [ "$OIIO_SKIP" = true ]; then
WARNING "Skipping OpenImageIO installation, as requested..."
elif [ "$OIIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageIO building, as requested..."
compile_OIIO
else
check_package_version_ge_lt_ARCH openimageio $OIIO_VERSION_MIN $OIIO_VERSION_MAX
if [ $? -eq 0 ]; then
install_packages_ARCH openimageio
clean_OIIO
else
compile_OIIO
fi
fi
PRINT ""
have_llvm=false
_do_compile_llvm=false
if [ "$LLVM_SKIP" = true ]; then
WARNING "Skipping LLVM installation, as requested (this also implies skipping OSL!)..."
OSL_SKIP=true
elif [ "$LLVM_FORCE_BUILD" = true ]; then
INFO "Forced LLVM building, as requested..."
_do_compile_llvm=true
else
check_package_version_match_ARCH llvm $LLVM_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH llvm clang
have_llvm=true
LLVM_VERSION=`get_package_version_ARCH llvm`
LLVM_VERSION_FOUND=$LLVM_VERSION
clean_LLVM
else
_do_compile_llvm=true
fi
fi
if [ "$_do_compile_llvm" = true ]; then
install_packages_ARCH libffi
# LLVM can't find the arch ffi header dir...
_FFI_INCLUDE_DIR=`pacman -Ql libffi | grep -e ".*/ffi.h" | awk '{print $2}' | sed -r 's/(.*)\/ffi.h/\1/'`
PRINT ""
compile_LLVM
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION
fi
PRINT ""
_do_compile_osl=false
if [ "$OSL_SKIP" = true ]; then
WARNING "Skipping OpenShadingLanguage installation, as requested..."
elif [ "$OSL_FORCE_BUILD" = true ]; then
INFO "Forced OpenShadingLanguage building, as requested..."
_do_compile_osl=true
else
# XXX Compile for now due to requirement of LLVM 3.4 ...
#check_package_version_ge_ARCH openshadinglanguage $OSL_VERSION_MIN
#if [ $? -eq 0 ]; then
# install_packages_ARCH openshadinglanguage
# clean_OSL
#else
_do_compile_osl=true
#fi
fi
if [ "$_do_compile_osl" = true ]; then
if [ "$have_llvm" = true ]; then
PRINT ""
compile_OSL
else
WARNING "No LLVM available, cannot build OSL!"
fi
fi
PRINT ""
if [ "$OSD_SKIP" = true ]; then
WARNING "Skipping OpenSubdiv installation, as requested..."
elif [ "$OSD_FORCE_BUILD" = true ]; then
INFO "Forced OpenSubdiv building, as requested..."
compile_OSD
else
check_package_version_ge_ARCH opensubdiv $OSD_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH opensubdiv
clean_OSD
else
compile_OSD
fi
fi
PRINT ""
if [ "$OPENVDB_SKIP" = true ]; then
WARNING "Skipping OpenVDB installation, as requested..."
elif [ "$OPENVDB_FORCE_BUILD" = true ]; then
INFO "Forced OpenVDB building, as requested..."
compile_OPENVDB
else
check_package_version_ge_ARCH openvdb $OPENVDB_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH openvdb
clean_OPENVDB
else
compile_OPENVDB
fi
fi
PRINT ""
if [ "$ALEMBIC_SKIP" = true ]; then
WARNING "Skipping Alembic installation, as requested..."
elif [ "$ALEMBIC_FORCE_BUILD" = true ]; then
INFO "Forced Alembic building, as requested..."
compile_ALEMBIC
else
compile_ALEMBIC
fi
if [ "$WITH_OPENCOLLADA" = true ]; then
PRINT ""
_do_compile_collada=false
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping OpenCOLLADA installation, as requested..."
elif [ "$OPENCOLLADA_FORCE_BUILD" = true ]; then
INFO "Forced OpenCollada building, as requested..."
_do_compile_collada=true
else
check_package_ARCH opencollada
if [ $? -eq 0 ]; then
install_packages_ARCH opencollada
clean_OpenCOLLADA
else
_do_compile_collada=true
fi
fi
if [ "$_do_compile_collada" = true ]; then
install_packages_ARCH pcre
# Find path to libxml shared lib...
_XML2_LIB=`pacman -Ql libxml2 | grep -e ".*/libxml2.so$" | gawk '{print $2}'`
PRINT ""
compile_OpenCOLLADA
fi
fi
if [ "$WITH_EMBREE" = true ]; then
PRINT ""
_do_compile_embree=false
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
else
check_package_ARCH embree
if [ $? -eq 0 ]; then
install_packages_ARCH embree
clean_Embree
else
_do_compile_embree=true
fi
fi
if [ "$_do_compile_embree" = true ]; then
compile_Embree
fi
fi
if [ "$WITH_OIDN" = true ]; then
_do_compile_oidn=false
PRINT ""
if [ "$OIDN_SKIP" = true ]; then
WARNING "Skipping OpenImgeDenoise installation, as requested..."
elif [ "$OIDN_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageDenoise building, as requested..."
_do_compile_oidn=true
else
# No package currently!
_do_compile_oidn=true
fi
if [ "$_do_compile_oidn" = true ]; then
compile_OIDN
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
WARNING "Skipping FFMpeg installation, as requested..."
elif [ "$FFMPEG_FORCE_BUILD" = true ]; then
INFO "Forced FFMpeg building, as requested..."
compile_FFmpeg
else
check_package_version_ge_ARCH ffmpeg $FFMPEG_VERSION_MIN
if [ $? -eq 0 ]; then
install_packages_ARCH ffmpeg
clean_FFmpeg
else
compile_FFmpeg
fi
fi
}
#### Install on other distro (very limited!) ####
install_OTHER() {
PRINT ""
WARNING "Attempt to build main dependencies for other linux distributions."
PRINT ""
PRINT "`eval _echo "$COMMON_INFO"`"
PRINT ""
ERROR "Failed to detect distribution type."
PRINT ""
PRINT "Your distribution is not supported by this script, you'll have to install dependencies and"
PRINT "dev packages yourself. However, this script can still attempt to build main (complex) libraries for you,"
PRINT "if you use '--build-foo' options (you can try '--build-all' one first)."
PRINT ""
PRINT "Quite obviously, it assumes dependencies from those libraries are already available, otherwise please"
PRINT "install them (you can also use error messages printed out by build process to find missing libraries...)."
PRINT ""
PRINT "`eval _echo "$DEPS_COMMON_INFO"`"
PRINT ""
PRINT "`eval _echo "$DEPS_SPECIFIC_INFO"`"
PRINT ""
if [ "$NO_CONFIRM" = false ]; then
read -p "Do you want to continue (Y/n)?"
[ "$(echo ${REPLY:=Y} | tr [:upper:] [:lower:])" != "y" ] && exit
fi
PRINT ""
_do_compile_python=false
if [ "$PYTHON_SKIP" = true ]; then
WARNING "Skipping Python/NumPy installation, as requested..."
elif [ "$PYTHON_FORCE_BUILD" = true ]; then
INFO "Forced Python/NumPy building, as requested..."
_do_compile_python=true
fi
if [ "$_do_compile_python" = true ]; then
compile_Python
PRINT ""
if [ "$NUMPY_SKIP" = true ]; then
WARNING "Skipping NumPy installation, as requested..."
else
compile_Numpy
fi
fi
PRINT ""
if [ "$BOOST_SKIP" = true ]; then
WARNING "Skipping Boost installation, as requested..."
elif [ "$BOOST_FORCE_BUILD" = true ]; then
INFO "Forced Boost building, as requested..."
compile_Boost
fi
PRINT ""
if [ "$OCIO_SKIP" = true ]; then
WARNING "Skipping OpenColorIO installation, as requested..."
elif [ "$OCIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenColorIO building, as requested..."
compile_OCIO
fi
PRINT ""
if [ "$OPENEXR_SKIP" = true ]; then
WARNING "Skipping ILMBase/OpenEXR installation, as requested..."
elif [ "$OPENEXR_FORCE_BUILD" = true ]; then
INFO "Forced ILMBase/OpenEXR building, as requested..."
compile_OPENEXR
fi
PRINT ""
if [ "$OIIO_SKIP" = true ]; then
WARNING "Skipping OpenImageIO installation, as requested..."
elif [ "$OIIO_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageIO building, as requested..."
compile_OIIO
fi
PRINT ""
have_llvm=false
_do_compile_llvm=false
if [ "$LLVM_SKIP" = true ]; then
WARNING "Skipping LLVM installation, as requested (this also implies skipping OSL!)..."
elif [ "$LLVM_FORCE_BUILD" = true ]; then
INFO "Forced LLVM building, as requested..."
_do_compile_llvm=true
fi
if [ "$_do_compile_llvm" = true ]; then
PRINT ""
compile_LLVM
have_llvm=true
LLVM_VERSION_FOUND=$LLVM_VERSION
fi
PRINT ""
_do_compile_osl=false
if [ "$OSL_SKIP" = true ]; then
WARNING "Skipping OpenShadingLanguage installation, as requested..."
elif [ "$OSL_FORCE_BUILD" = true ]; then
INFO "Forced OpenShadingLanguage building, as requested..."
_do_compile_osl=true
fi
if [ "$_do_compile_osl" = true ]; then
if [ "$have_llvm" = true ]; then
PRINT ""
compile_OSL
else
WARNING "No LLVM available, cannot build OSL!"
fi
fi
PRINT ""
_do_compile_osd=false
if [ "$OSD_SKIP" = true ]; then
WARNING "Skipping OpenSubdiv installation, as requested..."
elif [ "$OSD_FORCE_BUILD" = true ]; then
INFO "Forced OpenSubdiv building, as requested..."
_do_compile_osd=true
fi
if [ "$_do_compile_osd" = true ]; then
PRINT ""
compile_OSD
fi
if [ "$WITH_OPENCOLLADA" = true ]; then
_do_compile_collada=false
PRINT ""
if [ "$OPENCOLLADA_SKIP" = true ]; then
WARNING "Skipping OpenCOLLADA installation, as requested..."
elif [ "$OPENCOLLADA_FORCE_BUILD" = true ]; then
INFO "Forced OpenCollada building, as requested..."
_do_compile_collada=true
fi
if [ "$_do_compile_collada" = true ]; then
PRINT ""
compile_OpenCOLLADA
fi
fi
if [ "$WITH_EMBREE" = true ]; then
_do_compile_embree=false
PRINT ""
if [ "$EMBREE_SKIP" = true ]; then
WARNING "Skipping Embree installation, as requested..."
elif [ "$EMBREE_FORCE_BUILD" = true ]; then
INFO "Forced Embree building, as requested..."
_do_compile_embree=true
fi
if [ "$_do_compile_embree" = true ]; then
PRINT ""
compile_Embree
fi
fi
if [ "$WITH_OIDN" = true ]; then
_do_compile_oidn=false
PRINT ""
if [ "$OIDN_SKIP" = true ]; then
WARNING "Skipping OpenImgeDenoise installation, as requested..."
elif [ "$OIDN_FORCE_BUILD" = true ]; then
INFO "Forced OpenImageDenoise building, as requested..."
_do_compile_oidn=true
else
# No package currently!
_do_compile_oidn=true
fi
if [ "$_do_compile_oidn" = true ]; then
compile_OIDN
fi
fi
PRINT ""
if [ "$FFMPEG_SKIP" = true ]; then
WARNING "Skipping FFMpeg installation, as requested..."
elif [ "$FFMPEG_FORCE_BUILD" = true ]; then
INFO "Forced FFMpeg building, as requested..."
compile_FFmpeg
fi
}
#### Printing User Info ####
print_info_ffmpeglink_DEB() {
dpkg -L $_packages | grep -e ".*\/lib[^\/]\+\.so" | gawk '{ printf(nlines ? "'"$_ffmpeg_list_sep"'%s" : "%s", gensub(/.*lib([^\/]+)\.so/, "\\1", "g", $0)); nlines++ }'
}
print_info_ffmpeglink_RPM() {
rpm -ql $_packages | grep -e ".*\/lib[^\/]\+\.so" | gawk '{ printf(nlines ? "'"$_ffmpeg_list_sep"'%s" : "%s", gensub(/.*lib([^\/]+)\.so/, "\\1", "g", $0)); nlines++ }'
}
print_info_ffmpeglink_ARCH() {
pacman -Ql $_packages | grep -e ".*\/lib[^\/]\+\.so$" | gawk '{ printf(nlines ? "'"$_ffmpeg_list_sep"'%s" : "%s", gensub(/.*lib([^\/]+)\.so/, "\\1", "g", $0)); nlines++ }'
}
print_info_ffmpeglink() {
# This func must only print a ';'-separated list of libs...
if [ -z "$DISTRO" ]; then
ERROR "Failed to detect distribution type"
exit 1
fi
# Create list of packages from which to get libs names...
_packages=""
if [ "$THEORA_USE" = true ]; then
_packages="$_packages $THEORA_DEV"
fi
if [ "$VORBIS_USE" = true ]; then
_packages="$_packages $VORBIS_DEV"
fi
if [ "$OGG_USE" = true ]; then
_packages="$_packages $OGG_DEV"
fi
if [ "$XVID_USE" = true ]; then
_packages="$_packages $XVID_DEV"
fi
if [ "$VPX_USE" = true ]; then
_packages="$_packages $VPX_DEV"
fi
if [ "$MP3LAME_USE" = true ]; then
_packages="$_packages $MP3LAME_DEV"
fi
if [ "$X264_USE" = true ]; then
_packages="$_packages $X264_DEV"
fi
if [ "$OPENJPEG_USE" = true ]; then
_packages="$_packages $OPENJPEG_DEV"
fi
if [ "$DISTRO" = "DEB" ]; then
print_info_ffmpeglink_DEB
elif [ "$DISTRO" = "RPM" ]; then
print_info_ffmpeglink_RPM
elif [ "$DISTRO" = "ARCH" ]; then
print_info_ffmpeglink_ARCH
# XXX TODO!
else
PRINT "<Could not determine additional link libraries needed for ffmpeg, replace this by valid list of libs...>"
fi
}
print_info() {
PRINT ""
PRINT ""
PRINT "Ran with:"
PRINT " install_deps.sh $COMMANDLINE"
PRINT ""
PRINT ""
PRINT "If you're using CMake add this to your configuration flags:"
_buildargs="-U *SNDFILE* -U *PYTHON* -U *BOOST* -U *Boost*"
_buildargs="$_buildargs -U *OPENCOLORIO* -U *OPENEXR* -U *OPENIMAGEIO* -U *LLVM* -U *CYCLES*"
_buildargs="$_buildargs -U *OPENSUBDIV* -U *OPENVDB* -U *COLLADA* -U *FFMPEG* -U *ALEMBIC*"
_1="-D WITH_CODEC_SNDFILE=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
_1="-D PYTHON_VERSION=$PYTHON_VERSION_MIN"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/python-$PYTHON_VERSION_MIN ]; then
_1="-D PYTHON_ROOT_DIR=$INST/python-$PYTHON_VERSION_MIN"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ -d $INST/boost ]; then
_1="-D BOOST_ROOT=$INST/boost"
_2="-D Boost_NO_SYSTEM_PATHS=ON"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
fi
if [ "$OCIO_SKIP" = false ]; then
_1="-D WITH_OPENCOLORIO=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/ocio ]; then
_1="-D OPENCOLORIO_ROOT_DIR=$INST/ocio"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ -d $INST/openexr ]; then
_1="-D OPENEXR_ROOT_DIR=$INST/openexr"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ -d $INST/oiio ]; then
_1="-D WITH_OPENIMAGEIO=ON"
_2="-D OPENIMAGEIO_ROOT_DIR=$INST/oiio"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
fi
if [ "$OSL_SKIP" = false ]; then
_1="-D WITH_CYCLES_OSL=ON"
_2="-D WITH_LLVM=ON"
_3="-D LLVM_VERSION=$LLVM_VERSION_FOUND"
PRINT " $_1"
PRINT " $_2"
PRINT " $_3"
_buildargs="$_buildargs $_1 $_2 $_3"
if [ -d $INST/osl ]; then
_1="-D OSL_ROOT_DIR=$INST/osl"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ -d $INST/llvm ]; then
_1="-D LLVM_ROOT_DIR=$INST/llvm"
_2="-D LLVM_STATIC=ON"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
fi
else
_1="-D WITH_CYCLES_OSL=OFF"
_2="-D WITH_LLVM=OFF"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
fi
if [ "$OSD_SKIP" = false ]; then
_1="-D WITH_OPENSUBDIV=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/osd ]; then
_1="-D OPENSUBDIV_ROOT_DIR=$INST/osd"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$OPENVDB_SKIP" = false ]; then
_1="-D WITH_OPENVDB=ON"
_2="-D WITH_OPENVDB_BLOSC=ON"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
if [ -d $INST/openvdb ]; then
_1="-D OPENVDB_ROOT_DIR=$INST/openvdb"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ -d $INST/blosc ]; then
_1="-D BLOSC_ROOT_DIR=$INST/blosc"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$WITH_OPENCOLLADA" = true ]; then
_1="-D WITH_OPENCOLLADA=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/opencollada ]; then
_1="-D OPENCOLLADA_ROOT_DIR=$INST/opencollada"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$WITH_EMBREE" = true ]; then
_1="-D WITH_CYCLES_EMBREE=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/embree ]; then
_1="-D EMBREE_ROOT_DIR=$INST/embree"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$WITH_OIDN" = true ]; then
_1="-D WITH_OPENIMAGEDENOISE=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/oidn ]; then
_1="-D OPENIMAGEDENOISE_ROOT_DIR=$INST/oidn"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$WITH_JACK" = true ]; then
_1="-D WITH_JACK=ON"
_2="-D WITH_JACK_DYNLOAD=ON"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
fi
if [ "$ALEMBIC_SKIP" = false ]; then
_1="-D WITH_ALEMBIC=ON"
PRINT " $_1"
_buildargs="$_buildargs $_1"
if [ -d $INST/alembic ]; then
_1="-D ALEMBIC_ROOT_DIR=$INST/alembic"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
if [ "$NO_SYSTEM_GLEW" = true ]; then
_1="-D WITH_SYSTEM_GLEW=OFF"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
if [ "$FFMPEG_SKIP" = false ]; then
_1="-D WITH_CODEC_FFMPEG=ON"
_2="-D FFMPEG_LIBRARIES='avformat;avcodec;avutil;avdevice;swscale;swresample;lzma;rt;`print_info_ffmpeglink`'"
PRINT " $_1"
PRINT " $_2"
_buildargs="$_buildargs $_1 $_2"
if [ -d $INST/ffmpeg ]; then
_1="-D FFMPEG=$INST/ffmpeg"
PRINT " $_1"
_buildargs="$_buildargs $_1"
fi
fi
PRINT ""
PRINT "Or even simpler, just run (in your blender-source dir):"
PRINT " make -j$THREADS BUILD_CMAKE_ARGS=\"$_buildargs\""
PRINT ""
PRINT "Or in all your build directories:"
PRINT " cmake $_buildargs ."
}
#### "Main" ####
# Detect distribution type used on this machine
if [ -f /etc/debian_version ]; then
DISTRO="DEB"
install_DEB
elif [ -f /etc/arch-release ]; then
DISTRO="ARCH"
install_ARCH
elif [ -f /etc/redhat-release -o /etc/SuSE-release ]; then
DISTRO="RPM"
install_RPM
else
DISTRO="OTHER"
install_OTHER
fi
print_info | tee $INFO_PATH/BUILD_NOTES.txt
PRINT ""
PRINT "This information has been written to $INFO_PATH/BUILD_NOTES.txt"
PRINT ""
# Switch back to user language.
LANG=LANG_BACK
export LANG
CXXFLAGS=$CXXFLAGS_BACK
export CXXFLAGS
|
<reponame>sthagen/markdown-it
// Join raw text tokens with the rest of the text
//
// This is set as a separate rule to provide an opportunity for plugins
// to run text replacements after text join, but before escape join.
//
// For example, `\:)` shouldn't be replaced with an emoji.
//
'use strict';
module.exports = function text_join(state) {
var j, l, tokens, curr, max, last,
blockTokens = state.tokens;
for (j = 0, l = blockTokens.length; j < l; j++) {
if (blockTokens[j].type !== 'inline') continue;
tokens = blockTokens[j].children;
max = tokens.length;
for (curr = 0; curr < max; curr++) {
if (tokens[curr].type === 'text_special') {
tokens[curr].type = 'text';
}
}
for (curr = last = 0; curr < max; curr++) {
if (tokens[curr].type === 'text' &&
curr + 1 < max &&
tokens[curr + 1].type === 'text') {
// collapse two adjacent text nodes
tokens[curr + 1].content = tokens[curr].content + tokens[curr + 1].content;
} else {
if (curr !== last) { tokens[last] = tokens[curr]; }
last++;
}
}
if (curr !== last) {
tokens.length = last;
}
}
};
|
package com.asadmshah.moviegur.injection.modules;
import com.asadmshah.moviegur.utils.ResourceSupplier;
import org.mockito.Mockito;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
@Module
public class MockResourceSupplierModule {
@Provides
@Singleton
public ResourceSupplier providesResourceSupplier() {
return Mockito.mock(ResourceSupplier.class);
}
}
|
<reponame>minJonRing/electron<gh_stars>0
let config = {};db = [];
for(let i = 1 ; i < 17 ; i++){
db.push({
logo:`assets/img/index/nav/${i}.png`,
company:`assets/img/index/list/${i}/company.png`,
desc:`assets/img/index/list/${i}/desc.png`,
img:`assets/img/index/list/${i}/img.png`,
})
}
config.db = db;
// export default db;
module.exports = config; |
#!/bin/bash
### USER PI AUTOSTART (LCD Display)
# this script gets started by the autologin of the pi user and
# and its output is gets displayed on the LCD or the RaspiBlitz
function usage() {
echo -e "This script gets started by the autologin of the pi user and "
echo -e "and its output is gets displayed on the LCD or the RaspiBlitz."
echo -e ""
echo -e "Usage: $0 [-h|--help] [-v*|--verbose] [-p|--pause STRING]"
echo -e ""
echo -e " -h, --help\t\tprint this help message"
echo -e " -v, --verbose\t\tbe more verbose"
echo -e " -p, --pause STRING\ttime in seconds to pause"
echo -e ""
}
# Default Values
verbose=0
pause=12
while [[ "$1" == -* ]]; do
case "$1" in
-h|--help)
usage
exit 0
;;
-v*)
(( verbose += ${#1} - 1 ))
;;
--verbose)
(( verbose++ ))
;;
-p|--pause)
shift
pause="$1"
;;
--)
shift
break
;;
*)
echo "Unrecognized option $1."
echo ""
usage
exit 1
;;
esac
shift
done
if ! [[ "$pause" =~ ^[[:digit:]]+$ ]]; then
echo "pause must be a positive integer or 0." >&2
exit 1
fi
# CONFIGFILE - configuration of RaspiBlitz
configFile="/mnt/hdd/raspiblitz.conf"
# INFOFILE - state data from bootstrap
infoFile="/home/admin/raspiblitz.info"
# check that user is pi
if [ "$USER" != "pi" ]; then
echo "plz run as user pi --> su pi"
exit 1
fi
# display a 10s startup time
source /home/admin/_version.info
if [ "$pause" -ne "0" ]; then
dialog --pause " Starting RaspiBlitz v${codeVersion} ..." 8 58 ${pause}
fi
# DISPLAY LOOP
chain=""
while :
do
###########################
# CHECK BASIC DATA
###########################
# get the local network IP to be displayed on the lCD
source <(sudo /home/admin/config.scripts/internet.sh status)
# waiting for IP in general
if [ ${#localip} -eq 0 ]; then
l1="Waiting for Network ...\n"
l2="Not able to get local IP.\n"
l3="LAN cable connected? WIFI lost?\n"
dialog --backtitle "RaspiBlitz ${codeVersion}" --infobox "$l1$l2$l3" 5 40
sleep 3
continue
fi
# waiting for Internet connection
if [ ${online} -eq 0 ]; then
l1="Waiting for Internet ...\n"
l2="Local Network seems OK but no Internet.\n"
l3="Is router still online?\n"
dialog --backtitle "RaspiBlitz ${codeVersion} ${localip}" --infobox "$l1$l2$l3" 5 45
sleep 3
continue
fi
# get config info if already available (with state value)
source ${infoFile}
configExists=$(ls ${configFile} 2>/dev/null | grep -c '.conf')
if [ ${configExists} -eq 1 ]; then
source ${configFile}
source <(/home/admin/config.scripts/network.aliases.sh getvars lnd ${chain}net)
shopt -s expand_aliases
alias bitcoincli_alias="$bitcoincli_alias"
alias lncli_alias="$lncli_alias"
alias lightningcli_alias="$lightningcli_alias"
fi
# reboot info
if [ "${state}" = "reboot" ]; then
dialog --backtitle "RaspiBlitz ${codeVersion}" --infobox "Waiting for Reboot ..." 3 30
sleep 20
continue
fi
# shutdown info
if [ "${state}" = "shutdown" ]; then
dialog --backtitle "RaspiBlitz ${codeVersion}" --infobox "Waiting for Shutdown ..." 3 30
sleep 20
continue
fi
# waiting for DHCP in general
if [ "${state}" = "noDHCP" ]; then
l1="Waiting for DHCP ...\n"
l2="Not able to get local IP.\n"
l3="Check you router if constant.\n"
dialog --backtitle "RaspiBlitz ${codeVersion} (${localip})" --infobox "$l1$l2$l3" 5 40
sleep 1
continue
fi
# if no information available from files - set default
if [ ${#setupStep} -eq 0 ]; then
setupStep=0
fi
# before setup even started
if [ ${setupStep} -eq 0 ]; then
# check for internet connection
online=$(ping 1.0.0.1 -c 1 -W 2 | grep -c '1 received')
if [ ${online} -eq 0 ]; then
# re-test with other server
online=$(ping 8.8.8.8 -c 1 -W 2 | grep -c '1 received')
fi
if [ ${online} -eq 0 ]; then
# re-test with other server
online=$(ping 208.67.222.222 -c 1 -W 2 | grep -c '1 received')
fi
if [ ${online} -eq 0 ]; then
message="no internet connection"
# when in presync - get more info on progress
elif [ "${state}" = "presync" ]; then
blockchaininfo="$(sudo -u root bitcoin-cli --conf=/home/admin/assets/bitcoin.conf getblockchaininfo 2>/dev/null)"
message="starting"
if [ ${#blockchaininfo} -gt 0 ]; then
message="$(echo "${blockchaininfo}" | jq -r '.verificationprogress')"
message=$(echo $message | awk '{printf( "%.2f%%", 100 * $1)}')
fi
# when old data - improve message
elif [ "${state}" = "sdtoosmall" ]; then
message="SDCARD TOO SMALL - min 16GB"
# when no HDD - improve message
elif [ "${state}" = "noHDD" ]; then
message="Connect external HDD/SSD"
fi
# setup process has not started yet
l1="Login to your RaspiBlitz with:\n"
l2="ssh admin@${localip}\n"
l3="Use password: raspiblitz\n"
if [ "${state}" = "recovering" ]; then
l1="Recovering please wait ..\n"
fi
boxwidth=$((${#localip} + 24))
sleep 3
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) - ${message}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 5
continue
fi
# check if recovering/upgrade is running
if [ "${state}" = "recovering" ]; then
if [ ${#message} -eq 0 ]; then
message="Setup in Progress"
fi
l1="Upgrade/Recover/Provision\n"
l2="---> ${message}\n"
l3="Please keep running until reboot."
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) ${setupStep} ${localip}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
# if freshly recovered
recoveredInfoExists=$(sudo ls /home/admin/recover.flag 2>/dev/null | grep -c '.flag')
if [ ${recoveredInfoExists} -gt 0 ]; then
l1="FINAL RECOVER LOGIN NEEDED:\n"
l2="ssh admin@${localip}\n"
l3="Use password: raspiblitz\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state})" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
# if re-indexing
if [ "${state}" = "reindex" ]; then
l1="REINDEXING BLOCKCHAIN\n"
l2="To monitor & detect finish:\n"
l3="ssh admin@${localip}\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state})" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
# when setup is in progress - password has been changed
if [ ${setupStep} -lt 100 ]; then
l1="Login to your RaspiBlitz with:\n"
l2="ssh admin@${localip}\n"
l3="Use your Password A\n"
boxwidth=$((${#localip} + 24))
sleep 3
dialog --backtitle "RaspiBlitz ${codeVersion} ${localip} - Welcome (${setupStep})" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 7
continue
fi
###########################
# DISPLAY AFTER SETUP
###########################
if [ "${state}" = "repair" ]; then
l1="Repair Mode\n"
l2="ssh admin@${localip}\n"
l3="Use password: PasswordA\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) ${setupStep} ${localip}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
if [ "${state}" = "reboot" ]; then
l1="Reboot needed.\n"
l2="ssh admin@${localip}\n"
l3="Use password: PasswordA\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) ${setupStep} ${localip}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
if [ "${state}" = "retorrent" ]; then
l1="Repair Mode- TORRENT\n"
l2="ssh admin@${localip}\n"
l3="Use password: PasswordA\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) ${setupStep} ${localip}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
if [ "${state}" = "recopy" ]; then
l1="Repair Mode - COPY\n"
l2="ssh admin@${localip}\n"
l3="Use password: PasswordA\n"
boxwidth=$((${#localip} + 28))
dialog --backtitle "RaspiBlitz ${codeVersion} (${state}) ${setupStep} ${localip}" --infobox "$l1$l2$l3" 5 ${boxwidth}
sleep 3
continue
fi
if [ "${state}" = "copystation" ]; then
l1="COPY STATION MODE\n"
l2="${message}"
dialog --backtitle "RaspiBlitz ${codeVersion} ${localip}" --infobox "$l1$l2" 6 56
sleep 2
continue
fi
# if LND is syncing or scanning
lndSynced=$($lncli_alias getinfo 2>/dev/null | jq -r '.synced_to_chain' | grep -c true)
if [ ${lndSynced} -eq 0 ]; then
/home/admin/80scanLND.sh
sleep 20
continue
fi
# perform config check
configCheck=$(/home/admin/config.scripts/blitz.configcheck.py)
if [ $? -eq 0 ]; then
configValid=1
# echo "Config Valid!"
else
configValid=0
# echo "Config Not Valid!"
l1="POTENTIAL CONFIG ERROR FOUND\n"
l2="ssh admin@${localip}\n"
l3="use Password A\n"
l4="Run on Terminal command: check"
dialog --backtitle "RaspiBlitz ${codeVersion} cfg-err ${localip}" --infobox "$l1$l2$l3$l4" 6 50
sleep 20
continue
fi
# no special case - show status display
/home/admin/00infoBlitz.sh
sleep 5
done
|
<reponame>Luzifer/gallery<filename>storage_log.go
package main
import (
"bytes"
"io"
"io/ioutil"
log "github.com/sirupsen/logrus"
)
type logStorage struct{}
// WriteFile takes the content of a file and writes it into the underlying
// storage system.
func (l logStorage) WriteFile(filepath string, content io.Reader, contentType string) error {
log.WithFields(log.Fields{
"path": filepath,
"content_type": contentType,
}).Info("Write file")
return nil
}
// ReadFile retrieves a file from the underlying storage, needs to return
// errFileNotFound when file is not present.
func (l logStorage) ReadFile(filepath string) (io.ReadCloser, error) {
log.WithFields(log.Fields{
"path": filepath,
}).Info("Read file")
return ioutil.NopCloser(bytes.NewReader([]byte{})), errFileNotFound
}
|
import { makeStyles } from "@material-ui/core";
import { Alert, AlertTitle } from "@material-ui/lab";
import { FC, useCallback, useEffect, useRef } from "react";
import { CSSTransition } from "react-transition-group";
import { ToastProps } from "./types";
const useStyles = makeStyles((theme) => ({
toast: {
right: "16px",
position: "fixed",
maxWidth: "600px",
width: "100%",
transition: "all 250ms ease-in",
[theme.breakpoints.down("sm")]: {
maxWidth: "400px",
},
},
}));
export const Toast: FC<ToastProps> = ({
toast,
onRemove,
duration,
style,
...props
}) => {
const classes = useStyles();
const timer = useRef<number>();
const ref = useRef(null);
const removeHandler = useRef(onRemove);
const { id, title, description, type } = toast;
const handleRemove = useCallback(
() => removeHandler.current(id),
[id, removeHandler]
);
const handleMouseEnter = () => {
clearTimeout(timer.current);
};
const handleMouseLeave = () => {
if (timer.current) {
clearTimeout(timer.current);
}
timer.current = window.setTimeout(() => {
handleRemove();
}, duration);
};
useEffect(() => {
if (timer.current) {
clearTimeout(timer.current);
}
timer.current = window.setTimeout(() => {
handleRemove();
}, duration);
return () => {
clearTimeout(timer.current);
};
}, [timer, duration, handleRemove]);
return (
<CSSTransition nodeRef={ref} timeout={250} style={style} {...props}>
<div
className={classes.toast}
ref={ref}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
>
<Alert severity={type}>
<AlertTitle>{title}</AlertTitle>
{description}
</Alert>
</div>
</CSSTransition>
);
};
|
from .notify import send_notification
from .sendEmbed import send_embedded_notification
from .isStaff import check_staff
def notify_staff(member_id: int, message: str, embed_data: dict):
if check_staff(member_id):
send_embedded_notification(message, embed_data)
else:
send_notification("Unauthorized access attempt detected.") |
<!DOCTYPE html>
<html lang="en">
<head>
<title>Display Current Time</title>
</head>
<body>
<h1>Current Time: <span id="time"></span></h1>
<script>
// Function to display current time
function displayTime() {
let time = new Date().toLocaleTimeString("en-US");
document.getElementById("time").innerHTML = time;
}
setInterval(displayTime, 1000);
</script>
</body>
</html> |
#!/bin/sh
set -e
echo STARTING NODE-RED - $SNAP/bin/node-red $SNAP/settings.js $SNAP_USER_DATA
"$SNAP"/bin/node-red -v --settings "$SNAP/settings.js" --userDir "$SNAP_USER_DATA" "$SNAP_USER_DATA/flows.json"
|
/* CSS for mobile layout */
@media only screen and (max-width: 600px) {
body {
margin: 0;
padding: 0;
}
.container {
display: flex;
flex-direction: column;
font-family: sans-serif;
}
.sidebar {
width: 100%;
position: relative;
margin-bottom: 20px;
}
.content-area {
width: 100%;
}
}
/* CSS for desktop layout */
@media screen and (min-width: 601px) {
body {
margin: 0;
padding: 0;
}
.container {
display: flex;
font-family: sans-serif;
flex-direction: row;
}
.sidebar {
width: 200px;
position: relative;
}
.content-area {
width: calc(100% - 200px);
}
} |
document.getElementById("menu-item").classList.add("active"); |
import com.jacksonf.dto.Cliente;
import com.jacksonf.service.CrudDB;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class ClienteDAO {
private CrudDB crudDB;
// Constructor to initialize the CrudDB instance
public ClienteDAO(Connection connection) {
this.crudDB = new CrudDB(connection);
}
// Method to create a new Cliente in the database
public void createCliente(Cliente cliente) {
String sql = "INSERT INTO clientes (id, name, email) VALUES (?, ?, ?)";
try (PreparedStatement statement = crudDB.getConnection().prepareStatement(sql)) {
statement.setInt(1, cliente.getId());
statement.setString(2, cliente.getName());
statement.setString(3, cliente.getEmail());
statement.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
}
}
// Method to retrieve a Cliente from the database by ID
public Cliente getClienteById(int id) {
String sql = "SELECT * FROM clientes WHERE id = ?";
try (PreparedStatement statement = crudDB.getConnection().prepareStatement(sql)) {
statement.setInt(1, id);
try (ResultSet resultSet = statement.executeQuery()) {
if (resultSet.next()) {
Cliente cliente = new Cliente();
cliente.setId(resultSet.getInt("id"));
cliente.setName(resultSet.getString("name"));
cliente.setEmail(resultSet.getString("email"));
return cliente;
}
}
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
// Method to update an existing Cliente in the database
public void updateCliente(Cliente cliente) {
String sql = "UPDATE clientes SET name = ?, email = ? WHERE id = ?";
try (PreparedStatement statement = crudDB.getConnection().prepareStatement(sql)) {
statement.setString(1, cliente.getName());
statement.setString(2, cliente.getEmail());
statement.setInt(3, cliente.getId());
statement.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
}
}
// Method to delete a Cliente from the database
public void deleteCliente(int id) {
String sql = "DELETE FROM clientes WHERE id = ?";
try (PreparedStatement statement = crudDB.getConnection().prepareStatement(sql)) {
statement.setInt(1, id);
statement.executeUpdate();
} catch (SQLException e) {
e.printStackTrace();
}
}
} |
#!/usr/bin/bash
python -m nicu_los.src.modelling.logistic_regression --coarse-targets \
--no-grid-search --model-name log_reg_coarse_gs_pre_imputed --pre-imputed \
--C 0.0001 --regularizer l2
python -m nicu_los.src.modelling.logistic_regression --coarse-targets \
--no-grid-search --model-name log_reg_coarse_gs_not_pre_imputed \
--not-pre-imputed --C 0.0001 --regularizer l2
|
package com.java.study.redis.hyperloglog;
import java.util.concurrent.ThreadLocalRandom;
public class PfTest {
static class BitKeeper {
private int maxbits;
public void random(long value) {
int bits = lowZeros(value);
if (bits > this.maxbits) {
this.maxbits = bits;
}
}
private int lowZeros(long value) {
int i = 1;
for (; i < 32; i++) {
if (value >> i << i != value) {
break;
}
}
return i - 1;
}
}
static class Experiment {
private int n;
private int k;
private BitKeeper[] keepers;
public Experiment(int n) {
this(n, 1024);
}
public Experiment(int n, int k) {
this.n = n;
this.k = k;
this.keepers = new BitKeeper[k];
for (int i = 0; i < k; i++) {
keepers[i] = new BitKeeper();
}
}
public void work() {
for (int i = 0; i < this.n; i++) {
long m = ThreadLocalRandom.current().nextLong(1L << 32);
BitKeeper bitKeeper = keepers[(int) ((m & 0xfff000 >> 16) % keepers.length)];
bitKeeper.random(m);
}
}
public double estimate() {
double submitsInverse = 0.0;
for (BitKeeper keeper : keepers) {
submitsInverse += 1.0 / (float) keeper.maxbits;
}
double avgBits = (float) keepers.length / submitsInverse;
return Math.pow(2, avgBits) * this.k;
}
}
public static void main(String[] args) {
for (int i = 100000; i < 1000000; i += 10000) {
Experiment experiment = new Experiment(i);
experiment.work();
double est = experiment.estimate();
System.out.printf("%d %.2f %.2f\n", i, est, Math.abs(est - 1)/i);
}
}
}
|
#!/bin/bash
dieharder -d 3 -g 53 -S 2817201488
|
import org.junit.jupiter.api.*;
import static org.junit.jupiter.api.Assertions.*;
public class PercolationStatsTest {
private static PercolationStats percolationStats;
@BeforeAll
public static void beforeAll() {
percolationStats = new PercolationStats(200, 100);
}
@Test
public void testMean() {
double expected = 0.5929934999999997;
assertEquals(expected, percolationStats.mean(), 0.01 * expected);
}
@Test
public void testStddev() {
double expected = 0.00876990421552567;
assertEquals(expected, percolationStats.stddev(), 0.5 * expected);
}
@Test
public void testConfidenceLo() {
double expected = 0.5912745987737567;
assertEquals(expected, percolationStats.confidenceLo(), 0.01 * expected);
}
@Test
public void testConfidenceHi() {
double expected = 0.5947124012262428;
assertEquals(expected, percolationStats.confidenceHi(), 0.01 * expected);
}
}
|
import pandas as pd
def data_preprocessing_pipeline(training_data_file, testing_data_file, train_results_file, test_results_file):
# Step 1: Read the training data from the training_data_file into a pandas DataFrame
training_data = pd.read_csv(training_data_file, header=None)
# Step 2: Read the testing data from the testing_data_file into a pandas DataFrame
testing_data = pd.read_csv(testing_data_file, header=None)
# Step 3: Read the train results from the train_results_file into a pandas DataFrame
train_results = pd.read_csv(train_results_file)
# Step 4: Read the test results from the test_results_file into a pandas DataFrame
test_results = pd.read_csv(test_results_file)
# Step 5: Filter out rows from the training data where the corresponding probability in the train results is less than or equal to 0.5
remove_train_id = train_results.loc[train_results['probability'] <= 0.5, 'id']
training_data_2 = training_data.loc[~training_data.iloc[:, 0].isin(remove_train_id)]
# Step 6: Filter out rows from the testing data where the corresponding probability in the test results is less than or equal to 0.5
remove_test_id = test_results.loc[test_results['probability'] <= 0.5, 'id']
testing_data_2 = testing_data.loc[~testing_data.iloc[:, 0].isin(remove_test_id)]
# Step 7: Save the filtered training data to a new CSV file named 'training_data_2.csv' without including the index or header
training_data_2.to_csv('training_data_2.csv', sep=',', index=False, header=None)
# Step 8: Save the filtered testing data to a new CSV file named 'testing_data_2.csv' without including the index or header
testing_data_2.to_csv('testing_data_2.csv', sep=',', index=False, header=None) |
<table>
<thead>
<tr>
<th>Name</th>
<th>Email</th>
<th>Country</th>
</tr>
</thead>
<tbody>
<tr>
<td>John Doe</td>
<td>john@example.com</td>
<td>USA</td>
</tr>
<tr>
<td>Jane Doe</td>
<td>jane@example.com</td>
<td>UK</td>
</tr>
<tr>
<td>Maxwell Math</td>
<td>max@example.com</td>
<td>Canada</td>
</tr>
</tbody>
</table> |
<reponame>matom20/tuke
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include "k.h"
#define MATRIX 4
// MAGIC FUNCTION from k.h
void add_random_tile(GAME *game){
int row, col;
// find random, but empty tile
do {
row = rand() % 4;
col = rand() % 4;
} while(game->board[row][col] != ' ');
// place to the random position 'A' or 'B' tile
int tile = 'A' + (rand() % 2);
game->board[row][col] = tile;
}
/**
* Function finding charakters in string
* @param text list of characters
* @param find character finding in list
* @param size array
* @return status search true or false
*/
bool searchLetter(const char text[], const char *find, const int *size){
int count = 0;
for (int i = 0; i < *size; i++) {
if (text[i] == *find) {
count++;
break;
}
}
if (count != 0) return true; else return false;
}
// MAGIC FUNCTION from k.h
void render(const GAME game) {
int matrixInt = MATRIX;
printf("\t+---+---+---+---+");
// | A | | | |
for (int i = 0; i < matrixInt; i++) {
printf("\n\t| ");
for (int s = 0; s < matrixInt; s++) {
printf("%c", game.board[i][s]);
printf(" | ");
}
printf("\n");
printf("\t+---+---+---+---+");
}
}
// MAGIC FUNCTION from k.h
bool is_game_won(const GAME game) {
int matrixInt = MATRIX;
for (int i = 0; i < matrixInt; i++)
if (searchLetter(game.board[i], "K", &matrixInt)) return true;
return false;
}
// MAGIC FUNCTION from k.h
bool is_move_possible(const GAME game) {
int matrixInt = MATRIX;
if (is_game_won(game)) return false;
for (int i = 0; i < matrixInt; i++) {
if (searchLetter(game.board[i], " ", &matrixInt)) return true;
// LETTER
for (int s = 0; s < matrixInt; s++) {
if ( (matrixInt-1 > s && (char)game.board[i][s] == (char)game.board[i][s+1]) || // CHECK RIGHT
(matrixInt-1 > i && (char)game.board[i][s] == (char)game.board[i+1][s]) // CHECK DOWN
) return true;
}
}
return false;
}
/**
* Function updating score for player
* @param game reference to the game object
* @param letter for math score
*/
void update_score(GAME *game, const char letter) {
game->score += pow(2, (int)((int)letter - 'A' + 1));
}
/**
* Function replace letter A A -> B
* @param game reference to the game object
* @param i is column of matrix
* @param dy movement in y-axe
* @param dx movement in x-axe
*/
bool replace_matrix(GAME *game, int i, int *dy, int *dx) {
int m, g, swapArray[10], d; bool retrn = false;
int matrixInt = MATRIX;
// CHECK FOR LOOP
m = g = 0;
for (int s = 0; s < matrixInt; s++) {
if (*dy != 0) { swapArray[0] = s; swapArray[1] = i; }
else if(*dx != 0) { swapArray[0] = i; swapArray[1] = s; }
if (s == 0) g = game->board[swapArray[0]][swapArray[1]];
if (g == game->board[swapArray[0]][swapArray[1]]) m++;
}
if (m == 4 || m == 2) g = 2; else g = 1;
// LOOP
for (int s = 0; s < g; s++) {
if (*dy == 1 || *dx == 1) d = matrixInt-1;
else d = 0;
// REPLACE
while( ( (*dy == -1 || *dx == -1) && d < matrixInt) || ( (*dy == 1 || *dx == 1) && d >= 0) ) { // LEFT, UP || RIGHT, DOWN
if (*dy != 0) { swapArray[0] = d; swapArray[1] = i; swapArray[2] = d+1; swapArray[3] = i; }
else if(*dx != 0) { swapArray[0] = i; swapArray[1] = d; swapArray[2] = i; swapArray[3] = d+1; }
if ((int)game->board[swapArray[0]][swapArray[1]] != 32 && matrixInt-1 > d && (int)game->board[swapArray[0]][swapArray[1]] == (int)game->board[swapArray[2]][swapArray[3]]) {
game->board[swapArray[0]][swapArray[1]] += 1;
game->board[swapArray[2]][swapArray[3]] = ' ';
update_score(game, game->board[swapArray[0]][swapArray[1]]);
retrn = true;
break;
}
if ((*dy == 1 || *dx == 1)) d--;
else d++;
}
}
return retrn;
}
/**
* Function updating matrix for fixed position
* @param game reference to the game object
* @param swapArray determined index of arrays
*/
bool update_matIf(GAME *game, int swapArray[]) {
if ((int)game->board[swapArray[3]][swapArray[4]] >= 65) {
game->board[swapArray[0]][swapArray[1]] = game->board[swapArray[3]][swapArray[4]];
game->board[swapArray[3]][swapArray[4]] = ' ';
return true;
}
return false;
}
/**
* Function updating matrix given the direction
* @param game reference to the game object
* @param i is column of matrix
* @param dy movement in y-axe
* @param dx movement in x-axe
*/
bool update_matrix_walk(GAME *game, int i, int *dy, int *dx) {
int matrixInt = MATRIX;
int swapArray[10]; bool retrn = false;
// ALL UPDATE
if (*dy == -1 || *dx == -1) // FOR LEFT, UP
for (int s = 0; s < matrixInt; s++) {
//printf("S: %d\n", s);
if (*dy != 0) { swapArray[0] = s; swapArray[1] = i;}
else if(*dx != 0) { swapArray[0] = i; swapArray[1] = s;}
// UPDATE
if ((int)game->board[swapArray[0]][swapArray[1]] == 32) {
for (int d = s; d < matrixInt; d++) {
if (*dy != 0) { swapArray[3] = d; swapArray[4] = i; }
else if(*dx != 0) { swapArray[3] = i; swapArray[4] = d; }
if (update_matIf(game, swapArray)) { retrn = true; break; }
}
}
}
else if (*dy == 1 || *dx == 1) // FOR RIGHT, DOWN
for (int s = matrixInt-1; s >= 0; s--) {
if (*dy != 0) { swapArray[0] = s; swapArray[1] = i; }
else if(*dx != 0) { swapArray[0] = i; swapArray[1] = s; }
// UPDATE
if ((int)game->board[swapArray[0]][swapArray[1]] == 32) {
for (int d = matrixInt-1-((matrixInt-1)-s+1); d >= 0; d--) {
if (*dy != 0) { swapArray[3] = d; swapArray[4] = i; }
else if(*dx != 0) { swapArray[3] = i; swapArray[4] = d; }
if (update_matIf(game, swapArray)) { retrn = true; break; }
}
}
}
return retrn;
}
// MAGIC FUNCTION from k.h
bool update(GAME *game, int dy, int dx) {
if ( !(dx == 0 && dy != 0) && !(dx != 0 && dy == 0)) return false;
int matrixInt = MATRIX;
bool returns = false, retrn = false;
for (int i = 0; i < matrixInt; i++) {
// UPDATE
retrn = update_matrix_walk(game, i, &dy, &dx);
if (retrn && !returns) returns = true;
// REPLACE
retrn = replace_matrix(game, i, &dy, &dx);
if (retrn && !returns) returns = true;
// AGAIN UPDATE
update_matrix_walk(game, i, &dy, &dx);
}
return returns;
}
|
<reponame>shadowIdeas/shadowAPI3
#include "stdafx.h"
#include "Client.h"
Client::Client()
{
while (!WaitNamedPipe(L"\\\\.\\pipe\\EBIP0", NMPWAIT_WAIT_FOREVER))
Sleep(20);
_pipe = CreateFile(L"\\\\.\\pipe\\EBIP0", GENERIC_WRITE | GENERIC_READ, 0, 0, OPEN_EXISTING, FILE_FLAG_OVERLAPPED, 0);
DWORD mode = PIPE_READMODE_MESSAGE;
SetNamedPipeHandleState(_pipe, &mode, 0, 0);
_freed = false;
for (size_t i = 0; i < ARRAYSIZE(_events); i++)
{
_events[i] = CreateEvent(0, 1, 0, 0);
_communicationEvents[i+2] = CreateEvent(0, 1, 0, 0);
_used[i] = false;
_out[i] = nullptr;
}
_running = true;
_readThread = std::thread(&Client::CommunicationThread, this);
}
Client::~Client()
{
CloseHandle(_pipe);
_running = false;
if (_readThread.joinable())
_readThread.join();
Free();
}
std::shared_ptr<ClientMessage> Client::CreateMessage(PacketIdentifier identifier)
{
auto message = std::make_shared<ClientMessage>(identifier);
return message;
}
void Client::Write(std::shared_ptr<ClientMessage> message)
{
if (_freed)
{
message->Invalidate();
return;
}
int id = -1;
{
std::lock_guard<std::mutex> guard(_idMutex);
for (size_t i = 0; i < ARRAYSIZE(_events); i++)
{
if (!_used[i])
{
_used[i] = true;
id = i;
break;
}
}
}
// Wait until a ID is free
while (id == -1)
{
Sleep(20);
if (_freed)
{
message->Invalidate();
return;
}
std::lock_guard<std::mutex> guard(_idMutex);
for (size_t i = 0; i < ARRAYSIZE(_events); i++)
{
if (!_used[i])
{
_used[i] = true;
id = i;
break;
}
}
}
SerializeableQueue queue;
queue.WriteInteger(id);
queue.WriteRawBytes(message->GetInput().GetData());
auto data = queue.GetData();
_communicationData[id] = data;
SetEvent(_communicationEvents[id+2]);
WaitForSingleObject(_events[id], INFINITE);
if (_freed)
{
message->Invalidate();
return;
}
auto output = _out[id];
message->SetResponse(_out[id]);
_out[id] = nullptr;
ResetEvent(_events[id]);
_used[id] = false;
}
int Client::FastReadInteger(PacketIdentifier identifier)
{
auto message = CreateMessage(identifier);
Write(message);
if (message->IsInvalid())
return -1;
return message->GetResponse()->ReadInteger();
}
float Client::FastReadFloat(PacketIdentifier identifier)
{
auto message = CreateMessage(identifier);
Write(message);
if (message->IsInvalid())
return 0.0f;
return message->GetResponse()->ReadFloat();
}
bool Client::FastReadBoolean(PacketIdentifier identifier)
{
auto message = CreateMessage(identifier);
Write(message);
if (message->IsInvalid())
return false;
return message->GetResponse()->ReadBoolean();
}
std::wstring Client::FastReadString(PacketIdentifier identifier)
{
auto message = CreateMessage(identifier);
Write(message);
if (message->IsInvalid())
return std::wstring();
return message->GetResponse()->ReadString();
}
void Client::FastWriteVoid(PacketIdentifier identifier)
{
auto message = CreateMessage(identifier);
Write(message);
}
void Client::FastWriteInteger(PacketIdentifier identifier, int i)
{
auto message = CreateMessage(identifier);
message->GetInput().WriteInteger(i);
Write(message);
}
void Client::FastWriteBoolean(PacketIdentifier identifier, bool b)
{
auto message = CreateMessage(identifier);
message->GetInput().WriteBoolean(b);
Write(message);
}
void Client::FastWriteString(PacketIdentifier identifier, const std::wstring & s)
{
auto message = CreateMessage(identifier);
message->GetInput().WriteString(s);
Write(message);
}
void Client::CommunicationThread()
{
HANDLE readEvent = CreateEvent(0, 1, 0, 0);
HANDLE writeEvent = CreateEvent(0, 1, 0, 0);
OVERLAPPED overlappedRead = { 0 };
OVERLAPPED overlappedWrite = { 0 };
overlappedRead.hEvent = readEvent;
overlappedWrite.hEvent = writeEvent;
_communicationEvents[0] = readEvent;
_communicationEvents[1] = writeEvent;
BYTE buffer[512 * 128] = {};
DWORD bytesRead = 0;
ReadFile(_pipe, buffer, 512 * 128, 0, &overlappedRead);
while (_running)
{
DWORD triggeredEvent = WaitForMultipleObjects(64, _communicationEvents, false, INFINITE);
switch (triggeredEvent)
{
case WAIT_OBJECT_0:
{
GetOverlappedResult(_pipe, &overlappedRead, &bytesRead, true);
ResetEvent(readEvent);
if (bytesRead == 0)
{
if (GetLastError() == ERROR_MORE_DATA)
continue;
Free();
return;
}
std::vector<BYTE> bytes;
for (size_t i = 0; i < bytesRead; i++)
bytes.push_back(buffer[i]);
auto in = std::make_shared<SerializeableQueue>(bytes);
int id = in->ReadInteger();
_out[id] = in;
SetEvent(_events[id]);
ResetEvent(readEvent);
ReadFile(_pipe, buffer, 512 * 128, 0, &overlappedRead);
}
break;
case WAIT_OBJECT_0+1:
{
ResetEvent(writeEvent);
for (size_t i = 0; i < 62; i++)
{
DWORD eventIndex = i+2;
bool triggered = WaitForSingleObjectEx(_communicationEvents[eventIndex], 0, true) != WAIT_TIMEOUT;
if (triggered)
{
auto data = _communicationData[i];
WriteFile(_pipe, data.data(), data.size(), 0, &overlappedWrite);
ResetEvent(_communicationEvents[eventIndex]);
break;
}
}
}
case WAIT_TIMEOUT:
{
}
break;
case WAIT_FAILED:
{
auto error = GetLastError();
}
break;
default:
{
bool triggered = WaitForSingleObjectEx(writeEvent, 0, true) != WAIT_TIMEOUT;
if (!triggered)
{
DWORD eventIndex = triggeredEvent;
auto data = _communicationData[eventIndex - 2];
WriteFile(_pipe, data.data(), data.size(), 0, &overlappedWrite);
ResetEvent(_communicationEvents[eventIndex]);
}
}
break;
}
}
CloseHandle(readEvent);
CloseHandle(writeEvent);
}
void Client::Free()
{
if (!_freed)
{
_freed = true;
for (size_t i = 0; i < ARRAYSIZE(_events); i++)
{
SetEvent(_events[i]);
CloseHandle(_events[i]);
CloseHandle(_communicationEvents[i + 2]);
}
}
} |
import re
def parse_network_arguments(arguments):
result = {}
pattern = r'--(\w+)=(".*?"|\S+)'
matches = re.findall(pattern, arguments)
for match in matches:
key = match[0]
value = match[1].strip('"')
result[key] = value
return result
input_arguments = '--hostname-override="127.0.0.1" --address="0.0.0.0"'
network_config = parse_network_arguments(input_arguments)
print(network_config) # Output: {'hostname-override': '127.0.0.1', 'address': '0.0.0.0'} |
#!/usr/bin/env bash
# Scan log files and collect unis that hawe active users every month
[[ -z ${1} ]] && echo 'Temp folder is not specified' && exit 1
TMP_FILE=${1}
MD="`dirname $(readlink -f ${0})`/.."
A_DIR=${MD}/src/app
DATA_FOLDER="/www/faculty/it/bwHPC/SCRIPTS/bwUniCluster2/sacct_logs"
PREFIXES='aa as es hf hk hn hr hs ht hu ro mn of'
declare -i START_YEAR="2020"
declare -i END_YEAR=`date -d 'yesterday' '+%Y'`
is_haw_active()
{
year=${1}
month=${2}
prefix=${3}
log_file=${4}
while read -r line; do
read -ra ADDR <<< ${line}
if [[ ${ADDR[1]} == "${prefix}_${prefix}" ]] && [[ ${ADDR[3]} != "0" ]]; then
echo -e "${year}\t\t${month}\t\t${prefix}"
return
fi
done < ${log_file}
return
}
read_monthly_logs()
{
declare -i year=${1}
end_month=${2}
end_month="${end_month#'0'}" # remove prefix '0'
declare -i start_month="1"
[[ ${year} -eq "2020" ]] && declare -i start_month="3"
while [[ ${start_month} -le ${end_month} ]]; do
month_str=${start_month}
[[ ${start_month} -lt 10 ]] && month_str="0${start_month}"
log_file="${DATA_FOLDER}/${year}-${month_str}.log"
[[ ! -f ${log_file} ]] && start_month=$(( ${start_month} + 1 )) && continue
for prefix in ${PREFIXES}; do
is_haw_active ${year} ${start_month} ${prefix} ${log_file} >> ${TMP_FILE}
done
start_month=$(( ${start_month} + 1 ))
done
}
while [[ ${START_YEAR} -le ${END_YEAR} ]]; do
echo "... Writing temp data ${START_YEAR} into ${TMP_FILE}"
END_MONTH="12"
[[ ${START_YEAR} -eq ${END_YEAR} ]] && END_MONTH=`date -d 'yesterday' '+%m'`
read_monthly_logs ${START_YEAR} ${END_MONTH}
START_YEAR=$(( ${START_YEAR} + 1 ))
done
|
#!/bin/bash
set -xe
/opt/elasticbeanstalk/bin/healthd-track-pidfile --proxy nginx
/opt/elasticbeanstalk/bin/healthd-track-pidfile --name application --location /var/run/php-fpm/php-fpm-7.1.pid
|
<reponame>temoctzin/radish
# -*- coding: utf-8 -*-
class Hero(object):
def __init__(self, forename, surname, hero):
self.forename = forename
self.surname = surname
self.heroname = hero
|
<reponame>tchaik/tchaik<filename>cmd/tchaik/ui/js/src/components/TimeFormatter.js
"use strict";
import React from "react";
function zeroPad(n, width) {
let s = "" + n;
while (s.length < width) {
s = "0" + s;
}
return s;
}
function timeText(time) {
let text = "";
let minsPad = 0;
let totalSeconds = parseInt(time);
const hours = parseInt(totalSeconds / 3600);
if (hours > 0) {
text += hours + ":";
totalSeconds %= 3600;
minsPad = 2;
}
const mins = parseInt(totalSeconds / 60);
const secs = parseInt(totalSeconds % 60);
text += zeroPad(mins, minsPad) + ":" + zeroPad(secs, 2);
return text
}
const TimeFormatter = ({time, ...others}) => {
if (isNaN(time)) {
return null;
}
return <span {...others}>{timeText(time)}</span>;
}
export default TimeFormatter;
|
#!/bin/bash
# http://www.itl.nist.gov/div897/ctg/dm/sql_examples.htm
./test4.1.bash | ../dist/build/tssql/tssql 'select * from `-` WHERE TEMP_C < 0 AND MONTH = 1 ORDER BY RAIN_C + 0'
|
<gh_stars>1-10
package com.atlassian.maven.plugins.sample;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.maven.repository.RepositorySystem;
/**
* Change artifact classifier to 'fork'.
*/
@Mojo(name = "add-classifier")
public class AddClassifierMojo extends AbstractMojo {
@Parameter(defaultValue = "${project}", required = true)
private MavenProject project;
@Component
private RepositorySystem repositorySystem;
public void execute() {
final Artifact oldArtifact = project.getArtifact();
final Artifact newArtifact = repositorySystem.createArtifactWithClassifier(
oldArtifact.getGroupId(),
oldArtifact.getArtifactId(),
oldArtifact.getVersion(),
oldArtifact.getType(),
"fork");
project.setArtifact(newArtifact);
}
}
|
<filename>src/main/java/at/joanneum/swd/esamvc/entity/Todo.java
package at.joanneum.swd.esamvc.entity;
import javax.persistence.*;
@Entity
public class Todo {
@Id
@GeneratedValue(strategy= GenerationType.IDENTITY)
private int id;
@Column
private String name;
@Column
private Boolean done;
@Column
private String description;
public Todo(String name, String description, Boolean done) {
this.name = name;
this.done = done;
this.description = description;
}
//Default Constructor for JPA
protected Todo(){
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public Boolean getDone() {
return done;
}
public void setName(String name) {
this.name = name;
}
public void setDone(Boolean done) {
this.done = done;
}
public void setDescription(String description) {
this.description = description;
}
}
|
#!/bin/bash
mkdir ../heavystuffishere/testloadimagenet
cd ../heavystuffishere/testloadimagenet
# go login
# http://www.image-net.org/login
#username lertlove
#password obodroid
OBJECT_LIST=('n01695060' 'n01729322' 'n03127747' 'n03701391')
echo ${OBJECT_LIST[*]}
for wnid in ${OBJECT_LIST[@]};
do
mkdir "${wnid[@]}"
cd "${wnid[@]}"
#download images (have to be logged in/or maybe no need?)
wget "http://www.image-net.org/download/synset?wnid=${wnid}&username=lertlove&accesskey=0b188567e69ce114b046082cb49cbd40f1d5e414&release=latest&src=stanford"
tar xvf synset?wnid=${wnid}&username=lertlove&accesskey=0b188567e69ce114b046082cb49cbd40f1d5e414&release=latest&src=stanford
#or another way no need to login
#first download list of urls and save into a text file
#wget http://image-net.org/api/text/imagenet.synset.geturls?wnid=n01695060 -O n01695060.txt
#wget http://image-net.org/api/text/imagenet.synset.geturls?wnid=${wnid} -O ${wnid}.txt
#then wget each line or wget the whole file
#while read line in ${wnid}.txt;do
#for line in ${wnid}.txt; do
#wget -i ${wnid}.txt
#wget --tries=2 ${line}
#but if a line hangs, skip the line and move onto next line
#skip if hang s
#timeout 10
#done
#rm ${wnid}.txt
cd ..
done
#cat file.txt | while read line; do
# echo $line
#done
|
cd ../../src
# Also add CoNLL-2003 - mixup
# Script for training all models on MRPC, COLA, SST2-10% and CoNLL-2003 for mixup
# train - MSD loss, no SN
HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=0.6 mixup.lam1\=0.05 mixup.lam2\=0.01 training.learning_rate\=5e-5 training.num_train_epochs\=11 training.per_device_train_batch_size\=64 +training.weight_decay\=0.01' task_configs=mrpc_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra/mrpc
HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=0.85 mixup.lam1\=0.2 mixup.lam2\=0.05 training.learning_rate\=3e-5 training.num_train_epochs\=12 training.per_device_train_batch_size\=32 +training.weight_decay\=0.1' task_configs=cola_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra/cola
HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=0.6 mixup.lam1\=0.006 mixup.lam2\=0.2 training.learning_rate\=2e-5 training.num_train_epochs\=11 training.per_device_train_batch_size\=32 +training.weight_decay\=0.1' task_configs=sst2_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra/sst2
# CoNLL
HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py script=run_conll2003.py task_configs=conll2003_mixup.yaml args='do_ue_estimate\=False data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False mixup.self_ensembling\=True mixup.omega\=0.55 mixup.lam1\=0.05 mixup.lam2\=1.0 training.learning_rate\=7e-5 training.num_train_epochs\=6 training.per_device_train_batch_size\=8 +training.weight_decay\=0.01' seeds=[23419,705525,4837,10671619,1084218,43] cuda_devices=[0] output_dir='../workdir/run_train_models/mixup_electra/conll'
# Uncomment if want to train MSD with SN
# train - MSD loss, SN
#HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=0.5 mixup.lam1\=0.2 mixup.lam2\=0.05 training.learning_rate\=7e-5 training.num_train_epochs\=3 training.per_device_train_batch_size\=16 +training.weight_decay\=0.01 +ue.use_spectralnorm\=True spectralnorm_layer\=last' task_configs=mrpc_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra_sn/mrpc
#HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=0.85 mixup.lam1\=0.2 mixup.lam2\=0.03 training.learning_rate\=2e-5 training.num_train_epochs\=14 training.per_device_train_batch_size\=32 +training.weight_decay\=0.1 +ue.use_spectralnorm\=True spectralnorm_layer\=last' task_configs=cola_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra_sn/cola
#HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] args='ue\=mc do_ue_estimate\=False ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.dropout_subs\=last ue.use_cache\=False mixup.self_ensembling\=True mixup.omega\=1.0 mixup.lam1\=0.002 mixup.lam2\=0.01 training.learning_rate\=7e-5 training.num_train_epochs\=3 training.per_device_train_batch_size\=16 +training.weight_decay\=0.01 +ue.use_spectralnorm\=True spectralnorm_layer\=last' task_configs=sst2_mixup.yaml output_dir=../workdir/run_train_models/mixup_electra_sn/sst2
# CoNLL
#HYDRA_CONFIG_PATH=../configs/run_train_models.yaml python ./run_train_models.py script=run_conll2003.py task_configs=conll2003_mixup.yaml args='do_ue_estimate\=False data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False mixup.self_ensembling\=True mixup.omega\=0.5 mixup.lam1\=0.001 mixup.lam2\=1.0 training.learning_rate\=5e-5 training.num_train_epochs\=13 training.per_device_train_batch_size\=16 +training.weight_decay\=0.01 ue.use_spectralnorm\=True spectralnorm_layer\=last' seeds=[23419,705525,4837,10671619,1084218,43] cuda_devices=[0] output_dir='../workdir/run_train_models/mixup_electra_sn/conll'
# MC maha
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/mrpc_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/mrpc/models/mrpc_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False' output_dir='../workdir/run_glue_for_model_series/mixup_electra/mrpc/msd/all'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/mrpc_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/mrpc/models/mrpc_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra/mrpc/msd/last'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/cola_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/cola/models/cola_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False' output_dir='../workdir/run_glue_for_model_series/mixup_electra/cola/msd/all'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/cola_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/cola/models/cola_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra/cola/msd/last'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/sst2_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/sst2/models/sst2_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False' output_dir='../workdir/run_glue_for_model_series/mixup_electra/sst2/msd/all'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/sst2_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/sst2/models/sst2_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra/sst2/msd/last'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_ner_for_model_series.py config_path=../configs/conll2003_mixup.yaml model_series_dir='../workdir/run_train_models/mixup_electra/conll/models/conll2003_mixup/' args='ue\=msd ue.calibrate\=True ue.use_cache\=False do_ue_estimate\=True ue.ue_type\=msd ue.dropout_subs\=last data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False' cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] output_dir='../workdir/run_ner_for_model_series/mixup_electra/conll/mixup/last'
HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_ner_for_model_series.py config_path=../configs/conll2003_mixup.yaml model_series_dir='../workdir/run_train_models/mixup_electra/conll/models/conll2003_mixup/' args='ue\=msd ue.calibrate\=True ue.use_cache\=False do_ue_estimate\=True ue.ue_type\=msd ue.dropout_subs\=all data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False' cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] output_dir='../workdir/run_ner_for_model_series/mixup_electra/conll/mixup/all'
# Same with SN
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/mrpc_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/mrpc/models/mrpc_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/mrpc/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/mrpc_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/mrpc/models/mrpc_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/mrpc/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/cola_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/cola/models/cola_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/cola/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/cola_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/cola/models/cola_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/cola/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/sst2_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/sst2/models/sst2_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/sst2/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/sst2_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra_sn/sst2/models/sst2_mixup/ args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last ue.use_spectralnorm\=True spectralnorm_layer\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/sst2/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_ner_for_model_series.py config_path=../configs/conll2003_mixup.yaml model_series_dir='../workdir/run_train_models/mixup_electra_sn/conll/models/conll2003_mixup/' args='ue\=msd ue.calibrate\=True ue.use_cache\=False do_ue_estimate\=True ue.ue_type\=msd ue.dropout_subs\=last data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False ue.use_spectralnorm\=True spectralnorm_layer\=last' cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] output_dir='../workdir/run_ner_for_model_series/mixup_electra_sn/conll/mixup/last'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_ner_for_model_series.py config_path=../configs/conll2003_mixup.yaml model_series_dir='../workdir/run_train_models/mixup_electra_sn/conll/models/conll2003_mixup/' args='ue\=msd ue.calibrate\=True ue.use_cache\=False do_ue_estimate\=True ue.ue_type\=msd ue.dropout_subs\=all data.subsample_perc_val\=0.1 data.subsample_perc\=0.1 ue.use_selective\=False ue.use_spectralnorm\=True spectralnorm_layer\=last' cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] output_dir='../workdir/run_ner_for_model_series/mixup_electra_sn/conll/mixup/all'
# 20ng
# TODO: switch script in run_glue_for_model_series
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py script=run_newsgroup.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/20ng_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/20ng/models/20ng_mixup/ script=run_newsgroup.py args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False' output_dir='../workdir/run_glue_for_model_series/mixup_electra/20ng/mixup/all'
#HYDRA_CONFIG_PATH=../configs/run_glue_for_model_series.yaml python ./run_glue_for_model_series.py script=run_newsgroup.py cuda_devices=[0] seeds=[23419,705525,4837,10671619,1084218,43] config_path=../configs/20ng_mixup.yaml model_series_dir=../workdir/run_train_models/mixup_electra/20ng/models/20ng_mixup/ script=run_newsgroup.py args='ue\=msd do_ue_estimate\=True ue.use_selective\=False ue.calibrate\=True data.validation_subsample\=0.0 training\=electra_base ue.use_cache\=False ue.dropout_subs\=last' output_dir='../workdir/run_glue_for_model_series/mixup_electra/20ng/mixup/last'
# Finally, calc metrics
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py table_metrics=['table_f1_micro','table_f1_macro'] runs_dir='../workdir/run_glue_for_model_series/mixup_electra/mrpc/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/mrpc/msd/all'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra/mrpc/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/mrpc/msd/last'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra/cola/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/cola/msd/all'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra/cola/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/cola/msd/last'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra/sst2/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/sst2/msd/all'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra/sst2/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/sst2/msd/last'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics_ner.py runs_dir='../workdir/run_ner_for_model_series/mixup_electra/conll/mixup/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/conll/mixup/last'
HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics_ner.py runs_dir='../workdir/run_ner_for_model_series/mixup_electra/conll/mixup/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra/conll/mixup/all'
# Metrics for sn case
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py table_metrics=['table_f1_micro','table_f1_macro'] runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/mrpc/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/mrpc/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn_test/mrpc/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/mrpc/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/cola/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/cola/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/cola/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/cola/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/sst2/msd/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/sst2/msd/all'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics.py runs_dir='../workdir/run_glue_for_model_series/mixup_electra_sn/sst2/msd/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/sst2/msd/last'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics_ner.py runs_dir='../workdir/run_ner_for_model_series/mixup_electra_sn/conll/mixup/last/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/conll/mixup/last'
#HYDRA_CONFIG_PATH=../configs/run_calc_ues_metrics.yaml python ./run_calc_ues_metrics_ner.py runs_dir='../workdir/run_ner_for_model_series/mixup_electra_sn/conll/mixup/all/results' output_dir='../workdir/run_calc_ues_metrics/mixup_electra_sn/conll/mixup/all'
|
def factorial(num):
factorial = 1
for i in range(2, num+1):
factorial = factorial * i
return factorial |
#!/bin/bash
set -xe
make
sudo chown -R $(id -u):$(id -g) out
echo "Before compression:"
ls -lh out/
cd out
echo "After compression:"
pigz *.img
ls -lh .
|
python3 ../../../main_pretrain.py \
--dataset imagenet100 \
--backbone resnet18 \
--data_dir /datasets \
--train_dir imagenet-100/train \
--val_dir imagenet-100/val \
--max_epochs 400 \
--gpus 0,1 \
--accelerator gpu \
--strategy ddp \
--sync_batchnorm \
--precision 16 \
--optimizer sgd \
--lars \
--grad_clip_lars \
--eta_lars 0.02 \
--exclude_bias_n_norm \
--scheduler warmup_cosine \
--lr 0.3 \
--weight_decay 1e-4 \
--batch_size 128 \
--num_workers 4 \
--brightness 0.8 \
--contrast 0.8 \
--saturation 0.8 \
--hue 0.2 \
--crop_size 224 96 \
--num_crops_per_aug 2 6 \
--name multicrop-simclr-400ep-imagenet100 \
--dali \
--project solo-learn \
--entity unitn-mhug \
--wandb \
--save_checkpoint \
--method simclr \
--proj_hidden_dim 2048 \
--temperature 0.1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.