text stringlengths 1 1.05M |
|---|
/*
*
*/
package net.community.apps.common.test;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Dimension;
import java.awt.Image;
import java.awt.event.ActionEvent;
import java.io.File;
import java.util.List;
import javax.swing.ImageIcon;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JScrollPane;
import javax.swing.filechooser.FileNameExtensionFilter;
import net.community.chest.awt.image.AbstractImageReader;
import net.community.chest.awt.image.BMPReader;
import net.community.chest.awt.image.DefaultImageReader;
import net.community.chest.awt.image.ICOReader;
import net.community.chest.swing.component.label.DefaultLabelScroll;
import net.community.chest.swing.options.BaseOptionPane;
import net.community.chest.ui.components.combobox.ImagesComboBox;
import net.community.chest.ui.helpers.combobox.TypedComboBox;
import net.community.chest.ui.helpers.combobox.TypedComboBoxActionListener;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* <P>Displays loaded ICO files</P>
*
* @author <NAME>.
* @since Nov 13, 2008 1:41:49 PM
*/
public class TestFilesReaderFrame extends TestMainFrame {
/**
*
*/
private static final long serialVersionUID = 5742449879087735812L;
private JLabel _lblImage /* =null */;
protected void showImage (final Image img)
{
if (_lblImage != null)
_lblImage.setIcon(new ImageIcon(img));
}
private TypedComboBox<Image> _imgSel /* =null */;
public void setImages (final List<? extends Image> il)
{
if (_imgSel != null)
{
_imgSel.removeAllItems();
_imgSel.setEnabled(false);
}
final int numImages=(null == il) ? 0 : il.size();
if ((numImages <= 0) || (null == _imgSel))
return;
int idx1=(-1);
for (int i=0; i < numImages; i++)
{
final Image img=il.get(i);
if (null == img)
continue;
_imgSel.addItem(String.valueOf(i), img);
if (idx1 < 0)
idx1 = i;
}
if (idx1 >= 0)
{
_imgSel.setSelectedIndex(idx1);
showImage(il.get(idx1));
_imgSel.setEnabled(true);
}
}
/*
* @see net.community.apps.common.BaseMainFrame#loadFile(java.io.File, java.lang.String, org.w3c.dom.Element)
*/
@Override
public void loadFile (File f, String cmd, Element dlgElement)
{
final String filePath=(null == f) ? null : f.getAbsolutePath();
if ((null == filePath) || (filePath.length() <= 0))
return;
try
{
final AbstractImageReader r;
if (ICOReader.isIconFile(filePath))
r = ICOReader.DEFAULT;
else if (BMPReader.isBitmapFile(filePath))
r = new BMPReader();
else
r = DefaultImageReader.DEFAULT;
final List<? extends Image> il=r.readImages(filePath);
final int numImages=(null == il) ? 0 : il.size();
JOptionPane.showMessageDialog(this, "Loaded " + numImages + " images", "Images loaded", JOptionPane.INFORMATION_MESSAGE);
setTitle(filePath);
setImages(il);
}
catch(Exception e)
{
BaseOptionPane.showMessageDialog(this, e);
}
}
/*
* @see net.community.apps.common.BaseMainFrame#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
final Container ctPane=getContentPane();
if (null == _imgSel)
{
_imgSel = new ImagesComboBox.SimpleImagesComboBox();
_imgSel.setOpaque(false);
_imgSel.addActionListener(new TypedComboBoxActionListener<Image,ImagesComboBox<Image>>() {
/*
* @see net.community.chest.ui.helpers.combobox.TypedComboBoxActionListener#handleSelectedItem(java.awt.event.ActionEvent, net.community.chest.ui.helpers.combobox.TypedComboBox, java.lang.String, java.lang.Object)
*/
@Override
public void handleSelectedItem (ActionEvent e, ImagesComboBox<Image> cb, String text, Image value)
{
showImage(value);
}
});
ctPane.add(_imgSel, BorderLayout.NORTH);
}
if (null == _lblImage)
{
_lblImage = new JLabel();
_lblImage.setPreferredSize(new Dimension(300, 300));
final JScrollPane sp=new DefaultLabelScroll(_lblImage);
sp.setOpaque(false);
sp.getViewport().setOpaque(false);
ctPane.add(sp, BorderLayout.CENTER);
}
}
/*
* @see net.community.apps.common.FilesLoadMainFrame#getFileChooser(org.w3c.dom.Element, java.lang.String, java.lang.Boolean)
*/
@Override
protected JFileChooser getFileChooser (
final Element dlgElement, final String cmd, final Boolean isSaveDialog)
{
final JFileChooser fc=super.getFileChooser(dlgElement, cmd, isSaveDialog);
if (fc != null)
fc.setFileFilter(new FileNameExtensionFilter("Image files", ICOReader.ICO_SUFFIX, BMPReader.BMP_SUFFIX));
return fc;
}
public TestFilesReaderFrame (String... args) throws Exception
{
super(args);
if ((args != null) && (args.length == 1))
loadFile(new File(args[0]), LOAD_CMD, null);
}
}
|
import os
import json
from flask import Flask, request, Response
app = Flask(__name__)
SLACK_WEBHOOK_SECRET = os.environ.get('SLACK_WEBHOOK_SECRET')
@app.route('/listening', methods=['POST'])
def inbound():
slack_event = json.loads(request.data)
print "--------REQUEST--------------"
print json.dumps(slack_event)
print "_____________________________"
if slack_event.get('token') == SLACK_WEBHOOK_SECRET:
event = slack_event.get('event')
channel = event.get('channel')
username = event.get('user')
text = event.get('text')
inbound_message = username + " in " + channel + " says: " + text
print(inbound_message)
else:
print("Token received {} is not equal to {}".format(slack_event.get('token'), SLACK_WEBHOOK_SECRET))
return Response(), 200
@app.route('/', methods=['GET'])
def test():
return Response('It works!')
if __name__ == "__main__":
app.run(debug=True)
|
class Invoice
include HTTParty
base_uri 'https://portal.fastbill.com'
attr_accessor :id, :invoice_type, :customer_id, :customer_costcenter_id, :currency_code, :template_id, :intro_text, :invoice_number, :payed_date, :is_canceled, :invoice_date, :due_date, :delivery_date, :sub_total, :vat_total, :total, :vat_items, :invoice_items, :is_new, :eu_delivery
def initialize(auth = nil)
@auth = auth
@is_new = true
end
def get(id = nil, customer_id = nil, year = nil, month = nil)
invoices = []
body = '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.get</SERVICE><FILTER>'
if id
body = body + '<INVOICE_ID>' + id.to_s + '</INVOICE_ID>'
elsif customer_id
body = body + '<CUSTOMER_ID>' + customer_id.to_s + '</CUSTOMER_ID>'
elsif year
body = body + '<YEAR>' + year.to_s + '</YEAR>'
elsif month
body = body + '<MONTH>' + month.to_s + '</MONTH>'
end
body = body + '</FILTER></FBAPI>'
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => body
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if body['FBAPI']["RESPONSE"]["INVOICES"]["INVOICE"].class.to_s == 'Hash'
inv = Invoice.new(@auth)
inv.hydrate(body['FBAPI']["RESPONSE"]["INVOICES"]["INVOICE"])
invoices.push inv
else
for invoice in body['FBAPI']["RESPONSE"]["INVOICES"]["INVOICE"].each
inv = Invoice.new(@auth)
inv.hydrate(invoice)
invoices.push inv
end
end
invoices
end
def hydrate(body)
@is_new = false
@id = body["INVOICE_ID"]
@invoice_type = body["INVOICE_TYPE"]
@customer_id = body["CUSTOMER_ID"]
@customer_costcenter_id = body["CUSTOMER_COSTCENTER_ID"]
@currency_code = body["CURRENCY_CODE"]
@template_id = body["TEMPLATE_ID"]
@invoice_number = body["INVOICE_NUMBER"]
@introtext = body["INTROTEXT"]
@payed_date = parse_date body["PAYED_DATE"]
@is_canceled = body["IS_CANCELED"] == "1" ? true : false
@invoice_date = parse_date body["INVOICE_DATE"]
@due_date = parse_date body["INVOICE_DATE"]
@delivery_date = parse_date body["DELIVERY_DATE"]
@sub_total = body["SUB_TOTAL"]
@vat_total = body["VAT_TOTAL"]
@eu_delivery = body["EU_DELIVERY"] == "1" ? true : false
@vat_items = []
@total = body["TOTAL"]
for vat_item in body["VAT_ITEMS"].each
@vat_items.push InvoiceVatItem.new vat_item.last
end
@invoice_items = []
for item in body["ITEMS"].each
begin
i = InvoiceItem.new(@auth)
i.hydrate(item.last)
@invoice_items.push i
rescue
end
end
end
def parse_date(date)
if date != nil && date != "0000-00-00 00:00:00"
Time.parse date
else
false
end
end
def to_xml
xml = "<INVOICE>"
unless @id.nil?
xml = xml + "<INVOICE_ID>#{@id}</INVOICE_ID>"
end
unless @invoice_type.nil?
xml = xml + "<INVOICE_TYPE>#{@invoice_type}</INVOICE_TYPE>"
end
unless @customer_id.nil?
xml = xml + "<CUSTOMER_ID>#{@customer_id}</CUSTOMER_ID>"
end
unless @customer_costcenter_id.nil?
xml = xml + "<CUSTOMER_COSTCENTER_ID>#{@customer_costcenter_id}</CUSTOMER_COSTCENTER_ID>"
end
unless @currency_code.nil?
xml = xml + "<CURRENCY_CODE>#{@currency_code}</CURRENCY_CODE>"
end
unless @template_id.nil?
xml = xml + "<TEMPLATE_ID>#{@template_id}</TEMPLATE_ID>"
end
unless @invoice_number.nil?
xml = xml + "<INVOICE_NUMBER>#{@invoice_number}</INVOICE_NUMBER>"
end
unless @introtext.nil?
xml = xml + "<INTROTEXT>#{@introtext}</INTROTEXT>"
end
unless @eu_delivery.nil?
eu = @eu_delivery ? 1 : 0
xml = xml + "<EU_DELIVERY>#{eu}</EU_DELIVERY>"
end
if @payed_date
xml = xml + "<PAYED_DATE>#{@payed_date.strftime("%Y-%m-%d")}</PAYED_DATE>"
end
unless @is_canceled.nil?
c = @is_canceled ? 1 : 0
xml = xml + "<IS_CANCELED>#{c}</IS_CANCELED>"
end
if @invoice_date
xml = xml + "<IVOICE_DATE>#{@invoice_date.strftime("%Y-%m-%d")}</INVOICE_DATE>"
end
if @due_date
xml = xml + "<DUE_DATE>#{@due_date.strftime("%Y-%m-%d")}</DUE_DATE>"
end
if @delivery_date
xml = xml + "<DELIVERY_DATE>#{@delivery_date.strftime("%Y-%m-%d")}</DELIVERY_DATE>"
end
if @payed_date
xml = xml + "<PAYED_DATE>#{@payed_date.strftime("%Y-%m-%d")}</PAYED_DATE>"
end
unless @sub_total.nil?
xml = xml + "<SUB_TOTAL>#{@sub_total}</SUB_TOTAL>"
end
unless @vat_total.nil?
xml = xml + "<VAT_TOTAL>#{@vat_total}</VAT_TOTAL>"
end
unless @total.nil?
xml = xml + "<TOTAL>#{@total}</TOTAL>"
end
unless @vat_items.length == 0
xml = xml + "<VAT_ITEMS>"
for vat_item in @vat_items.each
xml = xml + vat_item.to_xml
end
xml = xml + "</VAT_ITEMS>"
end
unless @invoice_items.length == 0
xml = xml + "<ITEMS>"
for item in @invoice_items.each
xml = xml + item.to_xml
end
xml = xml + "</ITEMS>"
end
xml + "</INVOICE>"
end
def complete
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.complete</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
def sign!
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.sign</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
def send_by_email(from = nil, to = nil, cc = nil, subject = nil, message = nil, confirmation = false)
unless to.nil? && cc.nil?
xml = "<INVOICE_ID>#{@id}</INVOICE_ID>"
xml = xml + "<RECIPIENT>"
unless to.nil?
xml = xml + "<TO>#{to}</TO>"
end
unless cc.nil?
xml = xml + "<CC>#{cc}</CC>"
end
xml = xml + "</RECIPIENT>"
unless subject.nil?
xml = xml + "<SUBJECT>#{subject}</SUBJECT>"
end
unless message.nil?
xml = xml + "<MESSAGE>#{message}</MESSAGE>"
end
c = confirmation ? 1 : 0
xml = xml + "<CONFIRMATION>#{c}</CONFIRMATION>"
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.sendbyemail</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA>'+ xml +'</FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
end
def send_by_ground_mail
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.sendbypost</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
def set_paid!
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.setpaid</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
def safe!
if @is_new
#create
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.create</SERVICE><DATA>' + self.to_xml + '</DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
unless body['FBAPI']["RESPONSE"]["STATUS"]["INVOICE_ID"].nil?
@id = body['FBAPI']["RESPONSE"]["STATUS"]["INVOICE_ID"]
end
@is_new = false
self
else
false
end
else
#update
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.update</SERVICE><DATA>' + self.to_xml + '</DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
unless body['FBAPI']["RESPONSE"]["STATUS"]["INVOICE_ID"].nil?
@id = body['FBAPI']["RESPONSE"]["STATUS"]["INVOICE_ID"]
end
@is_new = false
self
else
false
end
end
end
def delete!
options = {
:basic_auth => @auth,
:headers => {
"Content-Type" => "application/xml"
},
:body => '<?xml version="1.0" encoding="utf-8"?><FBAPI><SERVICE>invoice.delete</SERVICE><DATA><INVOICE_ID>' + @id + '</INVOICE_ID></DATA></FBAPI>'
}
r = self.class.post('/api/0.1/api.php', options)
body = Crack::XML.parse r.body
if !body['FBAPI']["RESPONSE"]["STATUS"].nil? && body['FBAPI']["RESPONSE"]["STATUS"] == "success"
true
else
false
end
end
end |
#!/bin/bash
echo "Question 1: List the top 10 web sites from which requests came (non-404 status, external addresses looking in)."
echo "count website"
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | awk '$9 != 404 {print}' | awk '$11 ~ /^\"https?:\/\/users.csc.tntech.edu/ {next}{print}' | awk '$11 ~ /\"-\"/ {next}{print $11}' | sed '/^$/d' | sort | uniq -c | sort -nr | head -n 10
echo " "
echo "Question 2: List the top 10 local web pages requested (non-404 status)."
echo "count webpage"
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | awk '$9 != 404 {print}' | awk '{print $7}' | sort | uniq -c | sort -nr | head -n 10
echo " "
echo "Question 3: List the top 10 web browsers used to access the site. It is not necessary to get fancy and parse out all of the browser string. Simply print out the information that is there. Display the percentage of all browser types that each line represents."
echo "percent browser"
n=`curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | wc -l`
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | cut -f12- -d " " | sed '/^$/d' | sort | uniq -c | sort -nr | head -n 10 | awk -v "n=$n" '{first = $1; $1 = ""; print first/n*100"%", $0}'
echo " "
echo "Question 4: List the number of 404 errors that were reported in the log."
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | awk '$9 == 404 {print}' | wc -l
echo " "
echo "Question 5: List the number of 500 errors that were reported in the log."
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | awk '$9 == 500 {print}' | wc -l
echo " "
echo "Question 6: What time of day is the site active? When is it quiet? "
echo "count hour"
curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | awk '{print $4}' | cut -d':' -f 2 | sort | uniq -c | sort -nr | head -n 24
echo "From the result we found that the site is active during the day, and quiet during the night. 3:00 p.m. is the most active period."
echo " "
echo "Question 7: Is the traffic \"real\" or mostly the result of robots or automated processes?"
n=`curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | wc -l`
echo "We have" $n "requests in total. I have calculated the number of requests with some key words like 'robot', 'bot', 'crawler', 'spider', 'wanderer' in the User Agent field."
tmp=`curl -s http://users.csc.tntech.edu/~elbrown/access_log.bz2 | bunzip2 - | cut -f12- -d " " | grep -i -E "robot|bot|crawler|spider|wanderer" | wc -l`
echo "At least" $tmp "of them are generated by robots."
echo " "
|
#! /usr/bin/env ruby
# coding: utf-8
require "helper"
#require "test/unit"
#require "helper"
#require 'mageo.rb'
#require "mageo/vector.rb"
#require "mageo/polar2d.rb"
class TC_Vector_Polar < Test::Unit::TestCase
include Math
$tolerance = 10.0**(-10)
def setup
@v0 = Vector[ 1.0, 2.0, 3.0 ]
@v1 = Vector[ 1.0, 1.0 ]
@v2 = Vector[ 1.0, 2.0, 3.0, 4.0 ]
end
def test_to_p2d
assert_raise( Vector::SizeError ){ @v0.to_p2d }
assert_raise( Vector::SizeError ){ @v2.to_p2d }
assert_equal( Mageo::Polar2D , @v1.to_p2d.class )
assert_equal( Math::sqrt(2.0), @v1.to_p2d.r )
assert_equal( 0.25*PI , @v1.to_p2d.theta )
assert_in_delta( 0.00*PI, Vector[ 0.0, 0.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 0.00*PI, Vector[ 2.0, 0.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 0.25*PI, Vector[ 2.0, 2.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 0.50*PI, Vector[ 0.0, 2.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 0.75*PI, Vector[ -2.0, 2.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 1.00*PI, Vector[ -2.0, 0.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 1.25*PI, Vector[ -2.0, -2.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 1.50*PI, Vector[ 0.0, -2.0 ].to_p2d.theta, $tolerance )
assert_in_delta( 1.75*PI, Vector[ 2.0, -2.0 ].to_p2d.theta, $tolerance )
end
end
class TC_Polar2D < Test::Unit::TestCase
$tolerance = 10**(-10)
include Math
def setup
@p2d00 = Mageo::Polar2D.new( 0.0, 0.0*PI)
@p2d01 = Mageo::Polar2D.new( 0.0, 2.0*PI)
@p2d02 = Mageo::Polar2D.new( 2.0, 0.0*PI)
@p2d03 = Mageo::Polar2D.new( 2.0, 0.1*PI)
end
def test_to_v
assert_equal( Vector[ 0.0, 0.0 ], @p2d00.to_v )
assert_equal( Vector[ 0.0, 0.0 ], @p2d01.to_v )
assert_equal( Vector[ 2.0, 0.0 ], @p2d02.to_v )
assert_equal( Vector[ 2.0*cos(0.1*PI), 2.0*sin(0.1*PI) ], @p2d03.to_v )
end
def test_rotate
assert_equal( Mageo::Polar2D, @p2d00.rotate( 0.2 * PI ).class )
assert_in_delta( 0.0 , @p2d00.rotate( 0.2 * PI ).r , $tolerance )
assert_in_delta( 0.2*PI, @p2d00.rotate( 0.2 * PI ).theta, $tolerance )
assert_in_delta( 0.0 , @p2d01.rotate( 0.2 * PI ).r , $tolerance )
assert_in_delta( 2.2*PI, @p2d01.rotate( 0.2 * PI ).theta, $tolerance )
assert_in_delta( 2.0 , @p2d02.rotate( 0.2 * PI ).r , $tolerance )
assert_in_delta( 0.2*PI, @p2d02.rotate( 0.2 * PI ).theta, $tolerance )
assert_in_delta( 2.0 , @p2d03.rotate( 0.2 * PI ).r , $tolerance )
assert_in_delta( 0.3*PI, @p2d03.rotate( 0.2 * PI ).theta, $tolerance )
#変化していないことを確認
assert_equal( Vector[ 0.0, 0.0 ], @p2d00.to_v )
assert_equal( Vector[ 0.0, 0.0 ], @p2d01.to_v )
assert_equal( Vector[ 2.0, 0.0 ], @p2d02.to_v )
assert_equal( Vector[ 2.0*cos(0.1*PI), 2.0*sin(0.1*PI) ], @p2d03.to_v )
end
def test_rotate!
@p2d00.rotate!( 0.2 * PI )
@p2d01.rotate!( 0.2 * PI )
@p2d02.rotate!( 0.2 * PI )
@p2d03.rotate!( 0.2 * PI )
assert_in_delta( 0.0 , @p2d00.r , $tolerance )
assert_in_delta( 0.2*PI, @p2d00.theta, $tolerance )
assert_in_delta( 0.0 , @p2d01.r , $tolerance )
assert_in_delta( 2.2*PI, @p2d01.theta, $tolerance )
assert_in_delta( 2.0 , @p2d02.r , $tolerance )
assert_in_delta( 0.2*PI, @p2d02.theta, $tolerance )
assert_in_delta( 2.0 , @p2d03.r , $tolerance )
assert_in_delta( 0.3*PI, @p2d03.theta, $tolerance )
end
def test_minimize_theta!
p2pA = Mageo::Polar2D.new( 2.0, -2.5*PI )
p2pA.minimize_theta!
assert_in_delta( 1.5*PI, p2pA.theta, $tolerance )
p2pB = Mageo::Polar2D.new( 2.0, -0.5*PI )
p2pB.minimize_theta!
assert_in_delta( 1.5*PI, p2pB.theta, $tolerance )
p2pC = Mageo::Polar2D.new( 2.0, 1.5*PI )
p2pC.minimize_theta!
assert_in_delta( 1.5*PI, p2pC.theta, $tolerance )
p2pD = Mageo::Polar2D.new( 2.0, 3.5*PI )
p2pD.minimize_theta!
assert_in_delta( 1.5*PI, p2pD.theta, $tolerance )
p2pE = Mageo::Polar2D.new( 2.0, 5.5*PI )
p2pE.minimize_theta!
assert_in_delta( 1.5*PI, p2pE.theta, $tolerance )
p2pF = Mageo::Polar2D.new( 2.0, 4.5*PI )
p2pF.minimize_theta!
assert_in_delta( 0.5*PI, p2pF.theta, $tolerance )
end
def test_minimize_theta
p2pA = Mageo::Polar2D.new( 2.0, -2.5*PI ).minimize_theta
assert_in_delta( 1.5*PI, p2pA.theta, $tolerance )
p2pB = Mageo::Polar2D.new( 2.0, -0.5*PI ).minimize_theta
assert_in_delta( 1.5*PI, p2pB.theta, $tolerance )
p2pC = Mageo::Polar2D.new( 2.0, 1.5*PI ).minimize_theta
assert_in_delta( 1.5*PI, p2pC.theta, $tolerance )
p2pD = Mageo::Polar2D.new( 2.0, 3.5*PI ).minimize_theta
assert_in_delta( 1.5*PI, p2pD.theta, $tolerance )
p2pE = Mageo::Polar2D.new( 2.0, 5.5*PI ).minimize_theta
assert_in_delta( 1.5*PI, p2pE.theta, $tolerance )
p2pF = Mageo::Polar2D.new( 2.0, 4.5*PI ).minimize_theta
assert_in_delta( 0.5*PI, p2pF.theta, $tolerance )
end
def test_minimum_radian
assert_in_delta( 1.5*PI, Mageo::Polar2D.minimum_radian( -2.5*PI ), $tolerance )
assert_in_delta( 1.5*PI, Mageo::Polar2D.minimum_radian( -0.5*PI ), $tolerance )
assert_in_delta( 1.5*PI, Mageo::Polar2D.minimum_radian( 1.5*PI ), $tolerance )
assert_in_delta( 1.5*PI, Mageo::Polar2D.minimum_radian( 3.5*PI ), $tolerance )
assert_in_delta( 0.5*PI, Mageo::Polar2D.minimum_radian( 4.5*PI ), $tolerance )
assert_in_delta( 1.5*PI, Mageo::Polar2D.minimum_radian( 5.5*PI ), $tolerance )
end
end
|
def find_subarrays(arr):
# Number of subarrays
count = 0
# Iterate through the array
for i in range(len(arr)):
# Sum of all the elements so far
sum = arr[i]
# Iterate through the array again
for j in range(i+1,len(arr)):
# Add the current element
sum = sum + arr[j]
# Check if the sum is divisible by 3
if (sum % 3 == 0):
count += 1
return count
# Main Program
arr = [1, 5, 2, 3, 9]
result = find_subarrays(arr)
print("Number of subarrays whose sum is divisible by 3: {:d}".format(result)) |
<filename>Algorithm/src/main/java/com/leetcode/Solution_75.java
package com.leetcode;
public class Solution_75 {
public void sortColors(int[] nums) {
}
private void swap(int[] ints, int i, int j) {
int tmp = ints[i];
ints[i] = ints[j];
ints[j] = tmp;
}
}
|
#!/bin/bash
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (C) 2019 - 2020 Intel Corporation.
basedir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
PROGNAME=`basename $0`
# Default path (to installation dir of memkind-tests RPM)
TEST_PATH="$basedir/"
# Gtest binaries
GTEST_BINARIES=(dax_kmem_test)
# Pytest files
PYTEST_FILES=(dax_kmem_env_var_test.py)
red=`tput setaf 1`
green=`tput setaf 2`
yellow=`tput setaf 3`
default=`tput sgr0`
err=0
function usage () {
cat <<EOF
Usage: $PROGNAME [-h]
OPTIONS
-h,
display script usage
EOF
}
function emit() {
if [ "$LOG_FILE" != "" ]; then
echo "$@" 2>&1 | tee -a $LOG_FILE;
else
echo "$@"
fi
}
function normalize_path {
local PATH=$1
if [ ! -d $PATH ];
then
echo "Not a directory: '$PATH'"
usage
exit 1
fi
if [[ $PATH != /* ]]; then
PATH=`pwd`/$PATH
fi
echo $PATH
}
function show_skipped_tests()
{
SKIP_PATTERN=$1
DEFAULT_IFS=$IFS
# Search for gtests that match given pattern
for i in ${!GTEST_BINARIES[*]}; do
GTEST_BINARY_PATH=$TEST_PATH${GTEST_BINARIES[$i]}
for LINE in $($GTEST_BINARY_PATH --gtest_list_tests); do
if [[ $LINE == *. ]]; then
TEST_SUITE=$LINE;
else
if [[ "$TEST_SUITE$LINE" == *"$SKIP_PATTERN"* ]]; then
emit "$TEST_SUITE$LINE,${yellow}SKIPPED${default}"
fi
fi
done
done
# Search for pytests that match given pattern
for i in ${!PYTEST_FILES[*]}; do
PTEST_BINARY_PATH=$TEST_PATH${PYTEST_FILES[$i]}
IFS=$'\n'
for LINE in $(py.test $PTEST_BINARY_PATH --collect-only); do
if [[ $LINE == *"<Class "* ]]; then
TEST_SUITE=$(sed "s/^.*'\(.*\)'.*$/\1/" <<< $LINE)
elif [[ $LINE == *"<Function "* ]]; then
LINE=$(sed "s/^.*'\(.*\)'.*$/\1/" <<< $LINE)
if [[ "$TEST_SUITE.$LINE" == *"$SKIP_PATTERN"* ]]; then
emit "$TEST_SUITE.$LINE,${yellow}SKIPPED${default}"
fi
fi
done
done
IFS=$DEFAULT_IFS
emit ""
}
function execute_gtest()
{
ret_val=1
TESTCMD=$1
TEST=$2
# Apply filter (if provided)
if [ "$TEST_FILTER" != "" ]; then
if [[ $TEST != $TEST_FILTER ]]; then
return
fi
fi
# Concatenate test command
TESTCMD=$(printf "$TESTCMD" "$TEST""$SKIPPED_GTESTS""$RUN_DISABLED_GTEST")
# And test prefix if applicable
if [ "$TEST_PREFIX" != "" ]; then
TESTCMD=$(printf "$TEST_PREFIX" "$TESTCMD")
fi
OUTPUT=`eval $TESTCMD`
PATOK='.*OK ].*'
PATFAILED='.*FAILED ].*'
PATSKIPPED='.*PASSED ] 0.*'
if [[ $OUTPUT =~ $PATOK ]]; then
RESULT="$TEST,${green}PASSED${default}"
ret_val=0
elif [[ $OUTPUT =~ $PATFAILED ]]; then
RESULT="$TEST,${red}FAILED${default}"
elif [[ $OUTPUT =~ $PATSKIPPED ]]; then
RESULT="$TEST,${yellow}SKIPPED${default}"
ret_val=0
else
RESULT="$TEST,${red}CRASH${default}"
fi
if [ "$CSV" != "" ]; then
emit "$OUTPUT"
echo $RESULT >> $CSV
else
echo $RESULT
fi
return $ret_val
}
function execute_pytest()
{
ret=1
TESTCMD=$1
TEST_SUITE=$2
TEST=$3
# Apply filter (if provided)
if [ "$TEST_FILTER" != "" ]; then
if [[ $TEST_SUITE.$TEST != $TEST_FILTER ]]; then
return
fi
fi
# Concatenate test command
TESTCMD=$(printf "$TESTCMD" "$TEST$SKIPPED_PYTESTS")
# And test prefix if applicable
if [ "$TEST_PREFIX" != "" ]; then
TESTCMD=$(printf "$TEST_PREFIX" "$TESTCMD")
fi
OUTPUT=`eval $TESTCMD`
PATOK='.*1 passed.*'
PATFAILED='.*1 failed.*'
PATSKIPPED='.*deselected.*'
if [[ $OUTPUT =~ $PATOK ]]; then
RESULT="$TEST_SUITE.$TEST,${green}PASSED${default}"
ret=0
elif [[ $OUTPUT =~ $PATFAILED ]]; then
RESULT="$TEST_SUITE.$TEST,${red}FAILED${default}"
elif [[ $OUTPUT =~ $PATSKIPPED ]]; then
return 0
else
RESULT="$TEST_SUITE.$TEST,${red}CRASH${default}"
fi
if [ "$CSV" != "" ]; then
emit "$OUTPUT"
echo $RESULT >> $CSV
else
echo $RESULT
fi
return $ret
}
#Check support for numa nodes (at least two)
function check_numa()
{
numactl --hardware | grep "^node 1" > /dev/null
if [ $? -ne 0 ]; then
echo "ERROR: $0 requires a NUMA enabled system with more than one node."
exit 1
fi
}
#Check automatic support for persistent memory NUMA node - simulate one if no one was found
function check_auto_dax_kmem_nodes()
{
if [ ! -f /usr/bin/memkind-auto-dax-kmem-nodes ]; then
if [ -x ./memkind-auto-dax-kmem-nodes ]; then
export PATH=$PATH:$PWD
else
echo "Cannot find 'memkind-auto-dax-kmem-nodes' in $PWD. Did you run 'make'?"
exit 1
fi
fi
ret=$(memkind-auto-dax-kmem-nodes)
emit "The binary memkind-auto-dax-kmem-nodes returned code $ret."
if [[ $ret == "" ]]; then
export MEMKIND_DAX_KMEM_NODES=1
fi
}
#begin of main script
check_numa
check_auto_dax_kmem_nodes
OPTIND=1
while getopts ":h" opt; do
case "$opt" in
h)
usage;
exit 0;
;;
esac
done
TEST_PATH=`normalize_path "$TEST_PATH"`
# Clear any remnants of previous execution(s)
rm -rf $CSV
rm -rf $LOG_FILE
# Run tests written in gtest
for i in ${!GTEST_BINARIES[*]}; do
GTEST_BINARY_PATH=$TEST_PATH${GTEST_BINARIES[$i]}
emit
emit "### Processing gtest binary '$GTEST_BINARY_PATH' ###"
for LINE in $($GTEST_BINARY_PATH --gtest_list_tests); do
if [[ $LINE == *. ]]; then
TEST_SUITE=$LINE;
else
TEST_CMD="$GTEST_BINARY_PATH --gtest_filter=%s 2>&1"
execute_gtest "$TEST_CMD" "$TEST_SUITE$LINE"
ret=$?
if [ $err -eq 0 ]; then err=$ret; fi
fi
done
done
# Run tests written in pytest
for i in ${!PYTEST_FILES[*]}; do
PTEST_BINARY_PATH=$TEST_PATH${PYTEST_FILES[$i]}
emit
emit "### Processing pytest file '$PTEST_BINARY_PATH' ###"
IFS=$'\n'
for LINE in $(py.test $PTEST_BINARY_PATH --collect-only); do
if [[ $LINE == *"<Class "* ]]; then
TEST_SUITE=$(sed "s/^.*'\(.*\)'.*$/\1/" <<< $LINE)
elif [[ $LINE == *"<Function "* ]]; then
LINE=$(sed "s/^.*'\(.*\)'.*$/\1/" <<< $LINE)
TEST_CMD="py.test $PTEST_BINARY_PATH -k='%s' 2>&1"
execute_pytest "$TEST_CMD" "$TEST_SUITE" "$LINE"
ret=$?
if [ $err -eq 0 ]; then err=$ret; fi
fi
done
done
exit $err
|
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=1>
# Using Iris to access NCEP CFSR 30-year Wave Hindcast
# <codecell>
# <codecell>
from IPython.core.display import HTML
HTML('<iframe src=http://scitools.org.uk/iris/ width=800 height=350></iframe>')
# <codecell>
import numpy
import matplotlib.pyplot as plt
import datetime as dt
import iris
import iris.plot as iplt
import cartopy.crs as ccrs
# <codecell>
def time_near(cube,start):
timevar=cube.coord('time')
itime = timevar.nearest_neighbour_index(timevar.units.date2num(start))
return timevar.points[itime]
# <codecell>
# DAP URL: 30 year East Coast wave hindcast (Wave Watch 3 driven by CFSR Winds)
#cubes = iris.load('http://geoport.whoi.edu/thredds/dodsC/fmrc/NCEP/ww3/cfsr/4m/best'); # 4 arc minute resolution
cubes = iris.load('http://geoport.whoi.edu/thredds/dodsC/fmrc/NCEP/ww3/cfsr/10m/best'); # 10 arc minute resolution
# <codecell>
print cubes
# <codecell>
hsig=cubes[0]
# <codecell>
# use contraints to select geographic subset and nearest time
mytime=dt.datetime(1991,10,31,12) #specified time... Nov 1, 1991 was the "Perfect Storm"
#mytime=dt.datetime.utcnow() # .... or now
slice=hsig.extract(iris.Constraint(time=time_near(hsig,mytime),
longitude=lambda cell: -71.5 < cell < -64.0,
latitude=lambda cell: 40.0 < cell < 46.0))
# <codecell>
print slice
# <codecell>
slice.coord(axis='X').coord_system=iris.coord_systems.GeogCS(654321)
slice.coord(axis='Y').coord_system=iris.coord_systems.GeogCS(654321)
slice.add_aux_coord(iris.coords.DimCoord(0, standard_name='forecast_period', units='hours'))
slice.add_aux_coord(iris.coords.DimCoord(0, "height", units="m"))
# <codecell>
slice.dim_coords[0]
# <codecell>
print[coord.name() for coord in slice.coords()]
# <codecell>
#save slice as grib2
iris.save(slice,'hsig.grib2')
|
#!/bin/sh
# Converts a relative path to an absolute path, with thanks to
# https://stackoverflow.com/questions/3572030/bash-script-absolute-path-with-os-x
realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
# Displays usage information and exits.
usage() {
echo "Usage: $0
[-d <database>] the type of database to generate migrations for
[-i <input directory>] the path to the input directory containing the modules/versions
[-o <output directory>] the path to the output directory to write to
[-v <version>] [optional] the version to be processed
[-k <version scheme>] [optional] the scheme by which the versions abide
[-m <module1, module2, ..., moduleN>] [optional] the names of the modules to be processed as a CSV string
" 1>&2;
exit 1;
}
# Parses the script args to set up the execution environment.
parseArgs() {
while getopts :d:i:k:m:o:v:h option; do
case "$option" in
d)
DATABASE_ENGINE="${OPTARG}"
;;
i)
INPUT_DIRECTORY=$OPTARG
;;
k)
VERSION_SCHEME=$OPTARG
;;
m)
REQUESTED_MODULES=${OPTARG}
;;
o)
OUTPUT_DIRECTORY=${OPTARG}
;;
v)
REQUESTED_VERSION=${OPTARG}
;;
*)
usage
;;
esac
done
}
# Validates the input args.
validateArgs() {
if [ -z "${DATABASE_ENGINE}" ] || [ -z "${INPUT_DIRECTORY}" ] || [ -z "${OUTPUT_DIRECTORY}" ]; then
usage
fi
if [ ! -z $SCRIPT_SETTINGS_FILE ] && [ ! -f $SCRIPT_SETTINGS_FILE ]; then
echo "Script settings not found: $SCRIPT_SETTINGS_FILE"
usage
fi
if [ ! -d $INPUT_DIRECTORY ]; then
echo "Input directory not found: $INPUT_DIRECTORY"
usage
fi
if [ -z "${VERSION_SCHEME}" ]; then
echo "No version scheme provided"
usage
fi
}
# Calls on the jar file with the required arguments.
invokeApplication() {
# Find the main jar.
# Add in the 3 core arguments.
APPLICATION_ARGS="--input-directory-path $INPUT_DIRECTORY --output-directory-path $OUTPUT_DIRECTORY --database-engine $DATABASE_ENGINE"
# Version: if not provided => run for the latest version.
if [ ! -z "${REQUESTED_VERSION}" ]; then
APPLICATION_ARGS="$APPLICATION_ARGS --version $REQUESTED_VERSION"
fi
# Module(s): if not provided => run for the default module.
if [ ! -z "${REQUESTED_MODULES}" ]; then
APPLICATION_ARGS="$APPLICATION_ARGS --modules $REQUESTED_MODULES"
fi
# Settings file.
if [ ! -f $OVERRIDE_CONFIG_FILE ]; then
APPLICATION_ARGS="$APPLICATION_ARGS --config-file-override $OVERRIDE_CONFIG_FILE"
fi
# Version scheme.
APPLICATION_ARGS="$APPLICATION_ARGS --version-scheme $VERSION_SCHEME"
# Run.
command="$JAVA_EXE -jar $APPLICATION_JAR_FILE $APPLICATION_ARGS"
eval $command
}
# Sets up the environment for running the application.
setupEnvironment() {
# Find the main jar file.
for f in `find $APP_LIB_DIRECTORY -name $APP_JAR_NAME_PATTERN`; do
APPLICATION_JAR_FILE=$f
done
if [ -z $APPLICATION_JAR_FILE ]; then
echo "Application jar file ($APP_JAR_NAME_PATTERN) not found in $APP_LIB_DIRECTORY"
exit 1
fi
# Now find the java exe.
JAVA_HOME=`env | grep JAVA_HOME`
if [ -z $JAVA_HOME ]; then
JAVA_EXE=`which java`
else
JAVA_EXE=$JAVA_HOME/bin/java
fi
if [ ! -f $JAVA_EXE ]; then
echo "No java executable found"
exit 1
fi
}
# The name pattern of the jar containing our main class.
APP_JAR_NAME_PATTERN="db-migration-cli*.jar"
# The directory the script resides in.
APP_SCRIPT_DIRECTORY=`realpath $0 | xargs dirname`
# The base directory where the application resides.
APP_BASE_DIRECTORY=`dirname $APP_SCRIPT_DIRECTORY | xargs dirname`
# The conf directory.
APP_CONF_DIRECTORY="$APP_BASE_DIRECTORY/conf"
# The lib directory.
APP_LIB_DIRECTORY="$APP_BASE_DIRECTORY/libs"
# The path to the override config file.
OVERRIDE_CONFIG_FILE="$APP_CONF_DIRECTORY/config.yaml"
# The full path to the main jar file.
APPLICATION_JAR_FILE=""
# The java executable.
JAVA_EXE="java"
# The modules to be processed.
REQUESTED_MODULES=""
# The version of the module(s) to be processed.
REQUESTED_VERSION=""
# The scheme by which the versions abide.
VERSION_SCHEME="default"
# The input directory where the modules/version reside.
INPUT_DIRECTORY=""
# The output directory to write the output to.
OUTPUT_DIRECTORY=""
# The database engine to generate the migration scripts for.
DATABASE_ENGINE=""
# Main!
parseArgs $*
validateArgs
setupEnvironment
invokeApplication
|
-- phpMyAdmin SQL Dump
-- version 4.7.2
-- https://www.phpmyadmin.net/
--
-- Host: localhost
-- Generation Time: Nov 22, 2017 at 03:38 AM
-- Server version: 10.1.26-MariaDB
-- PHP Version: 7.1.8
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `cnpoo`
--
-- --------------------------------------------------------
--
-- Table structure for table `clases`
--
CREATE TABLE `clases` (
`id` int(11) NOT NULL,
`semana` varchar(2) NOT NULL,
`objetivo` text NOT NULL,
`padre` int(11) NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `clases`
--
INSERT INTO `clases` (`id`, `semana`, `objetivo`, `padre`) VALUES
(1, '01', 'Presentacion de la Asignatura', 0),
(2, '01', 'Practica de HTML', 1),
(3, '01', 'Practica de JavaScript', 1),
(4, '01', 'Practica de Css', 1),
(5, '02', 'Curso de Sun Java Studio Creator', 0),
(6, '02', 'Sesion 01', 5),
(7, '02', 'Sesion 02', 5),
(8, '03', 'Mer del Sistema', 0),
(9, '03', 'Diseno Detallado', 8);
-- --------------------------------------------------------
--
-- Table structure for table `estudiantes`
--
CREATE TABLE `estudiantes` (
`id` int(11) NOT NULL,
`cedula` varchar(10) NOT NULL DEFAULT '',
`na` varchar(100) NOT NULL DEFAULT '',
`email` varchar(250) NOT NULL DEFAULT '',
`celular` varchar(20) NOT NULL DEFAULT ''
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `estudiantes`
--
INSERT INTO `estudiantes` (`id`, `cedula`, `na`, `email`, `celular`) VALUES
(1, '21000000', '<NAME>', '<EMAIL>', '04145541207'),
(2, '22000000', '<NAME>', '<EMAIL>', '04184589511'),
(6, '12', '12', '12', '11');
-- --------------------------------------------------------
--
-- Table structure for table `mensajes`
--
CREATE TABLE `mensajes` (
`id` int(11) NOT NULL,
`nro` varchar(6) NOT NULL DEFAULT '',
`descripcion` varchar(250) NOT NULL DEFAULT '',
`vinculo` varchar(250) NOT NULL DEFAULT ''
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `mensajes`
--
INSERT INTO `mensajes` (`id`, `nro`, `descripcion`, `vinculo`) VALUES
(1, '01', 'Bienvenidos al Curso nivelatorio de POO', 'inicio.php');
-- --------------------------------------------------------
--
-- Table structure for table `migrations`
--
CREATE TABLE `migrations` (
`id` int(10) UNSIGNED NOT NULL,
`migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data for table `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2017_06_20_184101_create_notas_table', 1);
-- --------------------------------------------------------
--
-- Table structure for table `notas`
--
CREATE TABLE `notas` (
`id` int(10) UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `productos`
--
CREATE TABLE `productos` (
`code` int(10) NOT NULL,
`name` text NOT NULL,
`Stock` int(5) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
-- --------------------------------------------------------
--
-- Table structure for table `usuarios`
--
CREATE TABLE `usuarios` (
`id` int(11) NOT NULL,
`usuario` varchar(10) NOT NULL DEFAULT '',
`clave` varchar(10) NOT NULL DEFAULT '',
`nivel` char(2) NOT NULL DEFAULT '',
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`updated_at` timestamp NOT NULL DEFAULT '0000-00-00 00:00:00'
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
--
-- Dumping data for table `usuarios`
--
INSERT INTO `usuarios` (`id`, `usuario`, `clave`, `nivel`, `created_at`, `updated_at`) VALUES
(1, '21000000', 'yiyi', '2', '2017-11-20 03:14:47', '2017-11-20 08:14:47'),
(5, '22000000', 'camila', '2', '2016-08-26 21:21:24', '2016-08-26 21:21:24');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `clases`
--
ALTER TABLE `clases`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `estudiantes`
--
ALTER TABLE `estudiantes`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `mensajes`
--
ALTER TABLE `mensajes`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `notas`
--
ALTER TABLE `notas`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `usuarios`
--
ALTER TABLE `usuarios`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `clases`
--
ALTER TABLE `clases`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT for table `estudiantes`
--
ALTER TABLE `estudiantes`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `mensajes`
--
ALTER TABLE `mensajes`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `notas`
--
ALTER TABLE `notas`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `usuarios`
--
ALTER TABLE `usuarios`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
|
source ./library-symlinks.sh
remove_lib_symlinks 'fixtures' 'messaging'
|
package com.github.shimmerjordan.common.basic.utils.excel.exception;
/**
* @author shimmerjordan
* @date 2021/05/09 21:47
*/
public class ExcelException extends RuntimeException {
public ExcelException(String msg) {
super(msg);
}
}
|
#!/bin/bash
#
# this script is used to configure a VPS to run SS(go) and kcptun with tcp_bbr enabled
#
# NOTE: tcp_bbr needs kernel version >= 4.9.?
# tested OS: Ubuntu 16.04 LTS (x64) (KVM)
#
###### var
CUR_DIR=`pwd`
GO_DIR=~/go
SS_DIR=~/ss
SS_PORT=8388
SS_PASSWORD=666666
KCPTUN_DIR=~/kcptun
KCPTUN_PORT=4000
#
###### BBR
#
echo "" >> /etc/sysctl.conf
echo "# BBR" >> /etc/sysctl.conf
echo "net.core.default_qdisc = fq" >> /etc/sysctl.conf
echo "net.ipv4.tcp_congestion_control = bbr" >> /etc/sysctl.conf
sysctl -p
#
###### GO
#
# install golang
apt update
apt install -y golang-go git
# set go env
mkdir $GO_DIR
export GOPATH=$GO_DIR
export PATH=$GOPATH/bin:$PATH
#
###### SS
#
# install SS(go)
go get github.com/shadowsocks/shadowsocks-go/cmd/shadowsocks-server
# configure/scripts
mkdir $SS_DIR
cd $SS_DIR
# config.json
rm -rf config.json
echo "{" >> config.json
echo " \"server_port\": $SS_PORT," >> config.json
echo " \"password\": \"$SS_PASSWORD\"" >> config.json
echo "}" >> config.json
# restart.sh
rm -rf restart.sh
echo "#!/bin/bash" >> restart.sh
echo "kill \`pidof shadowsocks-server\`" >> restart.sh
echo "export PATH=$GO_DIR/bin:\$PATH" >> restart.sh
echo "shadowsocks-server start > log.txt &" >> restart.sh
chmod +x restart.sh
# optimize
# refer to: https://shadowsocks.org/en/config/advanced.html
# NOTE: we use bbr but not hybla for 'net.ipv4.tcp_congestion_control'
echo "" >> /etc/security/limits.conf
echo "# SS" >> /etc/security/limits.conf
echo "* soft nofile 51200" >> /etc/security/limits.conf
echo "* hard nofile 51200" >> /etc/security/limits.conf
echo "root soft nofile 51200" >> /etc/security/limits.conf
echo "root hard nofile 51200" >> /etc/security/limits.conf
ulimit -n 51200
echo "" >> /etc/sysctl.conf
echo "# SS" >> /etc/sysctl.conf
echo "fs.file-max = 51200" >> /etc/sysctl.conf
echo "net.core.rmem_max = 67108864" >> /etc/sysctl.conf
echo "net.core.wmem_max = 67108864" >> /etc/sysctl.conf
echo "net.core.netdev_max_backlog = 250000" >> /etc/sysctl.conf
echo "net.core.somaxconn = 4096" >> /etc/sysctl.conf
echo "net.ipv4.tcp_syncookies = 1" >> /etc/sysctl.conf
echo "net.ipv4.tcp_tw_reuse = 1" >> /etc/sysctl.conf
echo "net.ipv4.tcp_tw_recycle = 0" >> /etc/sysctl.conf
echo "net.ipv4.tcp_fin_timeout = 30" >> /etc/sysctl.conf
echo "net.ipv4.tcp_keepalive_time = 1200" >> /etc/sysctl.conf
echo "net.ipv4.ip_local_port_range = 10000 65000" >> /etc/sysctl.conf
echo "net.ipv4.tcp_max_syn_backlog = 8192" >> /etc/sysctl.conf
echo "net.ipv4.tcp_max_tw_buckets = 5000" >> /etc/sysctl.conf
echo "net.ipv4.tcp_fastopen = 3" >> /etc/sysctl.conf
echo "net.ipv4.tcp_mem = 25600 51200 102400" >> /etc/sysctl.conf
echo "net.ipv4.tcp_rmem = 4096 87380 67108864" >> /etc/sysctl.conf
echo "net.ipv4.tcp_wmem = 4096 65536 67108864" >> /etc/sysctl.conf
echo "net.ipv4.tcp_mtu_probing = 1" >> /etc/sysctl.conf
#echo "net.ipv4.tcp_congestion_control = hybla" >> /etc/sysctl.conf
sysctl -p
# run SS
shadowsocks-server start > log.txt &
#
###### kcptun
#
# install kcptun
go get -u github.com/xtaci/kcptun/server
# rename
mv $GOPATH/bin/server $GOPATH/bin/kcptun_server
# configure/scripts
mkdir $KCPTUN_DIR
cd $KCPTUN_DIR
# restart.sh
rm -rf restart.sh
echo "#!/bin/bash" >> restart.sh
echo "kill \`pidof kcptun_server\`" >> restart.sh
echo "export PATH=$GO_DIR/bin:\$PATH" >> restart.sh
echo "kcptun_server -t \"127.0.0.1:$SS_PORT\" -l \":$KCPTUN_PORT\" -mode fast2 > /dev/null 2>/dev/null &" >> restart.sh
chmod +x restart.sh
# run kcptun
kcptun_server -t "127.0.0.1:$SS_PORT" -l ":$KCPTUN_PORT" -mode fast2 > /dev/null 2>/dev/null &
# reset dir
cd $CUR_DIR
echo
echo "DONE!!!"
echo
|
#!/bin/bash
# Test script run by jenkins
outfile=gotest.out
go test -v | tee $outfile
go2xunit -fail -input $outfile -output tests.xml
|
//if (typeof (Storage) !== "undefined") {
// //localStorage.setItem("vega_datasets", "20d83c0b22924853972b0dde79cf0b98");
//} else {
// console.log("Sorry, your browser does not support Web Storage...");
//}
"use strict";
if ((typeof (VegaLiteScript) !== typeof (Function))) {
VegaLiteScript = (iD, variableName, vegalite_spec, dataDims) => {
let vega_require = requirejs.config({
context: "vega",
paths: {
"d3-color": "https://d3js.org/d3-color.v1.min",
"vega": "https://cdn.jsdelivr.net/npm/vega?noext",
"vega-lite": "https://cdn.jsdelivr.net/npm/vega-lite?noext",
"vega-embed": "https://cdn.jsdelivr.net/npm/vega-embed?noext",
"vega-webgl": "https://unpkg.com/vega-webgl-renderer/build/vega-webgl-renderer"
},
map: {
'*': { 'vega-scenegraph': "vega" }
}
});
let create2dArray = function(rows, columns) {
return [...Array(rows).keys()].map(i => new Float32Array(columns));
};
let copyDataToBuffer = function(id, csharpVariable) {
const rows = dataDims.rows;
const columns = dataDims.columns;
if (rows === 0 || columns === 0) {
return csharpVariable;
}
const vis_element = document.getElementById(`vis-${id}`);
const canvas = vis_element.firstElementChild;
const gl = canvas.getContext("webgl");
const data = create2dArray(rows, columns);
const buffers = new Array(rows);
for (var i = 0; i < csharpVariable.length; ++i) {
const obj = csharpVariable[i];
var col_index = 0;
for (const key in obj) {
data[i][col_index++] = obj[key];
}
buffers[i] = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffers[i]);
gl.bufferData(gl.ARRAY_BUFFER, data[i], gl.STATIC_DRAW);
}
console.log("gl Buffer Enabled.");
notebookScript[id].buffer = data;
return data;
};
let updateViewDataId = function(view, id, csharpVariable) {
try {
const data = copyDataToBuffer(id, csharpVariable);
view.data(variableName, /*data*/csharpVariable);
} catch (err) {
console.log(err);
}
};
let renderVegaLite = function(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) {
const vlSpec = vegalite_spec;
//const opt = {
// renderer: "webgl",
// logLevel: vegaEmbed.Info
//};
const vgSpec = vegaLite.compile(vlSpec).spec;
var view = new vega.View(vega.parse(vgSpec)).initialize("#vis-" + `${iD}`)
.renderer('webgl')
.run();
return vegaEmbed("#vis-" + `${iD}`, vlSpec, opt);
};
vega_require(["d3-color", "vega", "vega-lite", "vega-embed", "vega-webgl"],
function(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) {
interactive.csharp.getVariable(variableName).then(function(csharpVariable) {
renderVegaLite(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl).then(function(result) {
//vega.sceneFromJSON(csharpVariable);
updateViewDataId(result.view, iD, csharpVariable);
});
});
});
};
function runVegaLite(id, dataName, rows, columns, vegalite_spec) {
class DataDim {
constructor(rows, columns) {
this.rows = rows;
this.columns = columns;
}
}
const dataDims = new DataDim(rows, columns);
const spec = vegalite_spec;
VegaLiteScript(id, dataName, spec, dataDims);
}
};
var VegaLiteScript;
!function(global) {
if ((typeof (global.requirejs) !== typeof (Function)) ||
(typeof (global.requirejs.config) !== typeof (Function))) {
var script = document.createElement("script");
script.setAttribute("type", "text/javascript");
script.setAttribute("src", "https://cdnjs.cloudflare.com/ajax/libs/require.js/2.3.6/require.min.js");
document.getElementsByTagName("head")[0].appendChild(script);
}
let vega_require = global.requirejs.config({
context: "vega",
paths: {
"d3-color": "https://d3js.org/d3-color.v1.min",
"vega": "https://cdn.jsdelivr.net/npm/vega?noext",
"vega-lite": "https://cdn.jsdelivr.net/npm/vega-lite?noext",
"vega-embed": "https://cdn.jsdelivr.net/npm/vega-embed?noext",
"vega-webgl": "https://unpkg.com/vega-webgl-renderer/build/vega-webgl-renderer"
},
map: {
'*': { 'vega-scenegraph': "vega" }
}
});
function create2dArray(rows, columns) {
return [...Array(rows).keys()].map(i => new Float32Array(columns));
}
function copyDataToBuffer(id, csharpVariable, dataDims) {
const rows = dataDims.rows;
const columns = dataDims.columns;
if (rows === 0 || columns === 0) {
return csharpVariable;
}
const vis_element = document.getElementById(`vis-${id}`);
const canvas = vis_element.firstElementChild;
const gl = canvas.getContext("webgl");
const data = create2dArray(rows, columns);
const buffers = new Array(rows);
for (var i = 0; i < csharpVariable.length; ++i) {
const obj = csharpVariable[i];
var col_index = 0;
for (const key in obj) {
data[i][col_index++] = obj[key];
}
buffers[i] = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffers[i]);
gl.bufferData(gl.ARRAY_BUFFER, data[i], gl.STATIC_DRAW);
}
console.log("gl Buffer Enabled.");
return data;
}
function updateViewDataId(view, id, variableName, csharpVariable, dataDims) {
try {
const data = copyDataToBuffer(id, csharpVariable, dataDims);
view.data(variableName, data);
} catch (err) {
console.log(err);
}
}
function renderVegaLiteWebgl(id, vegalite_spec) {
return (d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) => {
const vlSpec = vegalite_spec;
const opt = {
renderer: "webgl",
logLevel: vegaEmbed.Info
};
return vegaEmbed("#vis-" + `${id}`, vlSpec, opt);
};
}
function renderVegaLiteSvg(id, vegalite_spec) {
return (d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) => {
const vlSpec = vegalite_spec;
const opt = {
renderer: "svg",
logLevel: vegaEmbed.Info
};
return vegaEmbed("#vis-" + `${id}`, vlSpec, opt);
};
}
function requireVegaLiteWebgl(id, vegalite_spec, variableName, rows, columns) {
class DataDim {
constructor(rows, columns) {
this.rows = rows;
this.columns = columns;
}
}
const dataDims = new DataDim(rows, columns);
vega_require(["d3-color", "vega", "vega-lite", "vega-embed", "vega-webgl"],
function(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) {
interactive.csharp.getVariable(variableName).then(function(csharpVariable) {
renderVegaLiteWebgl(id, vegalite_spec)(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl).then(function(result) {
updateViewDataId(result.view, id, variableName, csharpVariable, dataDims);
});
});
});
}
VegaLiteScript.requireVegaLiteWebgl = requireVegaLiteWebgl;
function requireVegaLiteSvg(id, vegalite_spec) {
vega_require(["d3-color", "vega", "vega-lite", "vega-embed", "vega-webgl"],
function(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl) {
renderVegaLiteSvg(id, vegalite_spec)(d3Color, vega, vegaLite, vegaEmbed, vegaWebgl).then();
});
}
VegaLiteScript.requireVegaLiteSvg = requireVegaLiteSvg;
}(this); |
def filter_noisy_data(signal):
"""
Filters out noisy data points from a time series signal.
Args:
signal: a list of floats representing a time series signal.
Returns:
filtered_data: a list of floats containing the filtered signals.
"""
filtered_data = []
for point in signal:
if point >= 0 and point <= 5:
filtered_data.append(point)
return filtered_data |
<reponame>bowlofstew/blockchain-samples
/*
Copyright (c) 2016 IBM Corporation and other Contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
Contributors:
<NAME> - Initial Contribution
*/
// ************************************
// Rules for Contract
// KL 16 Feb 2016 Initial rules package for contract v2.8
// KL 22 Feb 2016 Add compliance calculation
// KL 09 Mar 2016 Logging replaces printf for v3.1
// KL 12 Mar 2016 Conversion to externally present as alert names
// KL 29 Mar 2016 Fixed subtle bug in OVERTEMP discovered while
// documenting the rules engine for 3.0.5
// ************************************
package main
import (
//"time"
// "strconv"
)
func (a *ArgsMap) executeRules(alerts *AlertStatus) (bool) {
log.Debugf("Executing rules input: %v", *alerts)
var internal = (*alerts).asAlertStatusInternal()
// rule 1 -- Create and mod time check
internal.timeCheck(a)
// rule 2 --RPM check : if motor is running at 20% or below, it will likely overheat
internal.rpmCheck(a)
// rule 3 -- HVAC Check. If the HVAC is not running, that is an alert scenario
//internal.hvacCheck(a)
// now transform internal back to external in order to give the contract the
// appropriate JSON to send externally
*alerts = internal.asAlertStatus()
log.Debugf("Executing rules output: %v", *alerts)
// set compliance true means out of compliance
compliant := internal.calculateContractCompliance(a)
// returns true if anything at all is active (i.e. NOT compliant)
return !compliant
}
//***********************************
//** RULES **
//***********************************
func (alerts *AlertStatusInternal) timeCheck (a *ArgsMap) {
//var createTime time.Time
//var modTime time.Time
/*
now := time.Now()
unixNano := now.UnixNano()
umillisec := unixNano / 1000000 */
crTime, found := getObject(*a, "create_date")
mdTime, found2 := getObject(*a, "last_mod_date")
if found && found2 {
//modTime= time.Unix(0, msInt*int64(time.Millisecond))
if crTime.(float64) > mdTime.(float64) {
alerts.raiseAlert(AlertsTIMEERROR)
return
}
alerts.clearAlert(AlertsTIMEERROR)
}
}
// Need to modify so that for motor, this ic called first
func (alerts *AlertStatusInternal) rpmCheck (a *ArgsMap) {
//Reference : http://www.vfds.in/be-aware-of-vfd-running-in-low-speed-frequency-655982.html
maxRPM, found := getObject(*a, "max_rpm")
if found {
curRPM, found2 := getObject(*a, "rpm")
if found2 {
percRPM := (curRPM.(float64)/maxRPM.(float64))*100
if percRPM <=30 {
alerts.raiseAlert(AlertsRPMERROR)
return
}
}
}
alerts.clearAlert(AlertsRPMERROR)
}
/*
func (alerts *AlertStatusInternal) hvacCheck (a *ArgsMap) {
hvacMode, found := getObject(*a, "hvac_mode")
if found {
tgtTemp, found2 := getObject(*a, "target_temperature_c")
if found2 {
ambTemp, found3 := getObject(*a, "ambient_temperature_c")
if found3 {
if (ambTemp.(float64) >tgtTemp.(float64) && hvacMode =="heat") {
alerts.raiseAlert(AlertsHVACOVERHEAT)
return
}
alerts.clearAlert(AlertsHVACOVERHEAT)
if (ambTemp.(float64) <tgtTemp.(float64) && hvacMode =="cool") {
alerts.raiseAlert(AlertsHVACOVERCOOL)
return
}
alerts.clearAlert(AlertsHVACOVERCOOL)
}
}
}
alerts.clearAlert(AlertsHVACOVERHEAT)
alerts.clearAlert(AlertsHVACOVERCOOL)
}
*/
//***********************************
//** COMPLIANCE **
//***********************************
func (alerts *AlertStatusInternal) calculateContractCompliance (a *ArgsMap) (bool) {
// a simplistic calculation for this particular contract, but has access
// to the entire state object and can thus have at it
// compliant is no alerts active
return alerts.NoAlertsActive()
// NOTE: There could still a "cleared" alert, so don't go
// deleting the alerts from the ledger just on this status.
} |
from enum import Enum
from typing import NamedTuple, Optional
from pyrsistent import pmap, pvector
from pyrsistent.typing import PVector, PMap
from typeit import type_constructor
class AppConfig(NamedTuple):
name: str
url_prefix: str
setup: PVector[PMap] = pvector([])
class Session(NamedTuple):
cookie_name: str
cookie_secure: bool
cookie_httponly: bool
class Redis(NamedTuple):
host: str = '127.0.0.1'
port: int = 6379
db: int = 0
min_connections: int = 1
max_connections: int = 10
class Postgresql(NamedTuple):
user: str = 'solo'
dbname: str = 'solo'
password: str = '<PASSWORD>'
host: str = '127.0.0.1'
port: int = 5432
min_connections: int = 1
max_connections: int = 10
class EventLoopType(Enum):
ASYNCIO = 'asyncio'
UVLOOP = 'uvloop'
class Server(NamedTuple):
public_uri: str = 'http://127.0.0.1:8000'
host: str = '127.0.0.1'
port: int = 8000
keep_alive: bool = True
keep_alive_timeout: int = 30
# asyncio/uvloop
event_loop: EventLoopType = EventLoopType.ASYNCIO
class Testing(NamedTuple):
docker_pull: bool = True
""" Pull images from registry if they are not available locally yet
"""
class Config(NamedTuple):
server: Server
session: Session
apps: PVector[AppConfig] = pvector([])
logging: PMap = pmap({'version': 1})
debug: bool = True
postgresql: Postgresql = Postgresql()
redis: Redis = Redis()
testing: Testing = Testing()
mk_config, dict_config = type_constructor ^ Config
|
<filename>src/test/java/org/olat/modules/immunityproof/ImmunityProofDaoTest.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.immunityproof;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Date;
import java.util.List;
import java.util.Random;
import org.junit.Before;
import org.junit.Test;
import org.olat.core.commons.persistence.DB;
import org.olat.core.id.Identity;
import org.olat.core.util.DateUtils;
import org.olat.modules.immunityproof.manager.ImmunityProofDAO;
import org.olat.test.JunitTestHelper;
import org.olat.test.OlatTestCase;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Initial date: 16.09.2021<br>
*
* @author aboeckle, <EMAIL>, http://www.frentix.com
*/
public class ImmunityProofDaoTest extends OlatTestCase {
private Identity id1;
private Identity id2;
@Autowired
private DB dbInstance;
@Autowired
private ImmunityProofDAO immunityProofDAO;
@Before
public void setUpTestCase() {
Random random = new Random();
id1 = JunitTestHelper.createAndPersistIdentityAsUser(String.valueOf(random.nextInt()));
id2 = JunitTestHelper.createAndPersistIdentityAsUser(String.valueOf(random.nextInt()));
}
@Test
public void shouldCreateImmunityProof() {
ImmunityProof proof = immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
dbInstance.commitAndCloseSession();
assertTrue(proof.getKey() != null);
}
@Test
public void shouldGetProofByIdentity() {
immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
dbInstance.commitAndCloseSession();
ImmunityProof loadedProof1 = immunityProofDAO.getImmunitiyProof(id1);
ImmunityProof loadedProof2 = immunityProofDAO.getImmunitiyProof(id2);
assertTrue(loadedProof1 != null);
assertTrue(loadedProof2 == null);
}
@Test
public void shouldGetAll() {
immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
immunityProofDAO.createImmunityProof(id2, new Date(), true, true);
dbInstance.commitAndCloseSession();
List<ImmunityProof> proofs = immunityProofDAO.getAllCertificates();
assertTrue(proofs.size() >= 2);
}
@Test
public void shouldGetCount() {
immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
immunityProofDAO.createImmunityProof(id2, new Date(), true, true);
dbInstance.commitAndCloseSession();
long count = immunityProofDAO.getCount();
assertTrue(count >= 2);
}
@Test
public void shouldDeleteImmunityProofByIdentity() {
immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
immunityProofDAO.createImmunityProof(id2, new Date(), true, true);
dbInstance.commitAndCloseSession();
immunityProofDAO.deleteImmunityProof(id1);
ImmunityProof proof1 = immunityProofDAO.getImmunitiyProof(id1);
ImmunityProof proof2 = immunityProofDAO.getImmunitiyProof(id2);
assertTrue(proof1 == null);
assertTrue(proof2 != null);
}
@Test
public void shouldDeleteImmunityProof() {
ImmunityProof proof1 = immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
ImmunityProof proof2 = immunityProofDAO.createImmunityProof(id2, new Date(), true, true);
dbInstance.commitAndCloseSession();
immunityProofDAO.deleteImmunityProof(proof1);
proof1 = immunityProofDAO.getImmunitiyProof(id1);
proof2 = immunityProofDAO.getImmunitiyProof(id2);
assertTrue(proof1 == null);
assertTrue(proof2 != null);
}
@Test
public void shouldPruneProofs() {
ImmunityProof proof1 = immunityProofDAO.createImmunityProof(id1, DateUtils.addDays(new Date(), -36), true, true);
ImmunityProof proof2 = immunityProofDAO.createImmunityProof(id2, DateUtils.addDays(new Date(), -18), true, true);
dbInstance.commitAndCloseSession();
immunityProofDAO.pruneImmunityProofs(DateUtils.addDays(new Date(), -26));
dbInstance.commitAndCloseSession();
List<ImmunityProof> allProofs = immunityProofDAO.getAllCertificates();
assertFalse(allProofs.contains(proof1));
assertTrue(allProofs.contains(proof2));
}
@Test
public void shouldDeleteAllProofs() {
ImmunityProof proof1 = immunityProofDAO.createImmunityProof(id1, new Date(), true, true);
ImmunityProof proof2 = immunityProofDAO.createImmunityProof(id2, new Date(), true, true);
dbInstance.commitAndCloseSession();
immunityProofDAO.deleteAllImmunityProofs();
dbInstance.commitAndCloseSession();
List<ImmunityProof> allProofs = immunityProofDAO.getAllCertificates();
assertFalse(allProofs.contains(proof1));
assertFalse(allProofs.contains(proof2));
}
}
|
<filename>scrapydd/schedule.py
"""
Schedule job status and periodical jobs.
"""
import uuid
import logging
import datetime
import json
from abc import ABC
from typing import List
from apscheduler.schedulers.tornado import TornadoScheduler
from apscheduler.executors.pool import ThreadPoolExecutor
from apscheduler.triggers.cron import CronTrigger
from tornado.ioloop import IOLoop, PeriodicCallback
from sqlalchemy import distinct, desc, func
from six import string_types, ensure_str
import chardet
from .models import Session, Trigger, Spider, Project, SpiderExecutionQueue
from .models import HistoricalJob, session_scope, SpiderSettings, Node
from .exceptions import NodeNotFound, InvalidCronExpression, JobRunning
from .config import Config
from .mail import MailSender
from .storage import ProjectStorage
LOGGER = logging.getLogger(__name__)
JOB_STATUS_PENDING = 0
JOB_STATUS_RUNNING = 1
JOB_STATUS_SUCCESS = 2
JOB_STATUS_FAIL = 3
JOB_STATUS_WARNING = 4
JOB_STATUS_STOPPING = 5
JOB_STATUS_CANCEL = 6
class JobNotFound(Exception):
pass
class InvalidJobStatus(Exception):
pass
def generate_job_id():
"""
Generate unique job id.
:return:
"""
jobid = uuid.uuid4().hex
return jobid
class JobObserver(ABC):
def on_job_finished(self, job: HistoricalJob):
pass
class SchedulerManager():
_job_observers: List[JobObserver] = []
def __init__(self, config=None, syncobj=None, scheduler=None):
if config is None:
config = Config()
self.config = config
self.project_storage_dir = config.get('project_storage_dir')
if scheduler:
self.scheduler = scheduler
else:
executors = {
'default': ThreadPoolExecutor(20),
}
self.scheduler = TornadoScheduler(executors=executors)
self.poll_task_queue_callback = None
self.pool_task_queue_interval = 10
self.ioloop = IOLoop.instance()
self.clear_finished_jobs_callback = PeriodicCallback(self.clear_finished_jobs, 60*1000)
self.reset_timeout_job_callback = PeriodicCallback(self.reset_timeout_job, 10*1000)
self.sync_obj = syncobj
if syncobj is not None:
self.sync_obj.set_on_remove_schedule_job(self.on_cluster_remove_scheduling_job)
self.sync_obj.set_on_add_schedule_job(self.on_cluster_add_scheduling_job)
def init(self):
session = Session()
self._transfer_complete_jobs(session)
# init triggers
triggers = session.query(Trigger)
for trigger in triggers:
try:
self.add_job(trigger.id, trigger.cron_pattern)
except InvalidCronExpression:
LOGGER.warning('Trigger %d,%s cannot be added ',
(trigger.id, trigger.cron_pattern))
session.close()
self.clear_finished_jobs_callback.start()
self.reset_timeout_job_callback.start()
def _transfer_complete_jobs(self, session):
# move completed jobs into history
for job in session.query(SpiderExecutionQueue)\
.filter(SpiderExecutionQueue.status.in_((2, 3))):
historical_job = HistoricalJob()
historical_job.id = job.id
historical_job.spider_id = job.spider_id
historical_job.project_name = job.project_name
historical_job.spider_name = job.spider_name
historical_job.fire_time = job.fire_time
historical_job.start_time = job.start_time
historical_job.complete_time = job.update_time
historical_job.status = job.status
session.delete(job)
session.add(historical_job)
session.commit()
def build_cron_trigger(self, cron):
cron_parts = cron.split(' ')
if len(cron_parts) != 5:
raise InvalidCronExpression()
try:
crontrigger = CronTrigger(minute=cron_parts[0],
hour=cron_parts[1],
day=cron_parts[2],
month=cron_parts[3],
day_of_week=cron_parts[4],
)
return crontrigger
except ValueError:
raise InvalidCronExpression()
def add_job(self, trigger_id, cron):
LOGGER.debug('adding trigger %s %s' % (trigger_id, cron))
crontrigger = self.build_cron_trigger(cron)
job = self.scheduler.add_job(func=self.trigger_fired,
trigger=crontrigger,
kwargs={'trigger_id': trigger_id},
id=str(trigger_id),
replace_existing=True)
if self.sync_obj:
self.ioloop.call_later(0, self.sync_obj.add_schedule_job, trigger_id)
def on_cluster_remove_scheduling_job(self, job_id):
LOGGER.debug('on_cluster_remove_scheduling_job')
if self.scheduler.get_job(job_id):
self.scheduler.remove_job(job_id)
def on_cluster_add_scheduling_job(self, trigger_id):
LOGGER.debug('on_cluster_add_scheduling_job')
with session_scope() as session:
trigger = session.query(Trigger).get(trigger_id)
if trigger is None:
return
crontrigger = self.build_cron_trigger(trigger.cron_pattern)
job = self.scheduler.add_job(func=self.trigger_fired, trigger=crontrigger,
kwargs={'trigger_id': trigger_id},
id=str(trigger_id), replace_existing=True)
def trigger_fired(self, trigger_id):
with session_scope() as session:
trigger = session.query(Trigger).filter_by(id=trigger_id).first()
if not trigger:
LOGGER.error('Trigger %s not found.' % trigger_id)
return
spider = session.query(Spider).filter_by(id=trigger.spider_id).first()
if not spider:
LOGGER.error('Spider %s not found' % spider.name)
return
project = session.query(Project).filter_by(id=spider.project_id).first()
if not project:
LOGGER.error('Project %s not found' % project.name)
return
try:
self.add_spider_task(session, spider)
except JobRunning:
LOGGER.info('Job for spider %s.%s already reach the '
'concurrency limit' % (project.name, spider.name))
def add_schedule(self, project, spider, cron):
with session_scope()as session:
triggers = session.query(Trigger)\
.filter(Trigger.spider_id == spider.id)
found = False
for trigger in triggers:
if trigger.cron_pattern == cron:
found = True
break
if not found:
# create a cron_trigger for just validating
cron_trigger = self.build_cron_trigger(cron)
trigger = Trigger()
trigger.spider_id = spider.id
trigger.cron_pattern = cron
session.add(trigger)
session.commit()
self.add_job(trigger.id, cron)
def add_spider_task(self, session, spider, settings=None):
executing = SpiderExecutionQueue()
spider_tag_vo = session.query(SpiderSettings)\
.filter_by(spider_id=spider.id, setting_key='tag').first()
spider_tag = spider_tag_vo.value if spider_tag_vo else None
jobid = generate_job_id()
executing.id = jobid
executing.spider_id = spider.id
executing.project_name = spider.project.name
executing.spider_name = spider.name
executing.fire_time = datetime.datetime.now()
executing.update_time = datetime.datetime.now()
executing.tag = spider_tag
if settings:
executing.settings = json.dumps(settings)
session.add(executing)
session.commit()
return executing
def add_task(self, project_name, spider_name, settings=None):
with session_scope() as session:
project = session.query(Project)\
.filter(Project.name == project_name).first()
spider = session.query(Spider)\
.filter(Spider.name == spider_name,
Spider.project_id == project.id).first()
executing = SpiderExecutionQueue()
spider_tag_vo = session.query(SpiderSettings)\
.filter_by(spider_id=spider.id, setting_key='tag').first()
spider_tag = spider_tag_vo.value if spider_tag_vo else None
jobid = generate_job_id()
executing.id = jobid
executing.spider_id = spider.id
executing.project_name = project.name
executing.spider_name = spider.name
executing.fire_time = datetime.datetime.now()
executing.update_time = datetime.datetime.now()
executing.tag = spider_tag
if settings:
executing.settings = json.dumps(settings)
session.add(executing)
session.commit()
session.refresh(executing)
return executing
def cancel_task(self, job_id):
with session_scope() as session:
job = session.query(SpiderExecutionQueue).get(job_id)
if not job:
raise JobNotFound()
if job.status not in (JOB_STATUS_PENDING,
JOB_STATUS_RUNNING):
raise InvalidJobStatus('Invliad status.')
job.status = JOB_STATUS_CANCEL
job.update_time = datetime.datetime.now()
historical_job = HistoricalJob()
historical_job.id = job.id
historical_job.spider_id = job.spider_id
historical_job.project_name = job.project_name
historical_job.spider_name = job.spider_name
historical_job.fire_time = job.fire_time
historical_job.start_time = job.start_time
historical_job.complete_time = job.update_time
historical_job.status = job.status
session.delete(job)
session.add(historical_job)
session.commit()
session.refresh(historical_job)
def on_node_expired(self, node_id):
session = Session()
for job in session.query(SpiderExecutionQueue)\
.filter(SpiderExecutionQueue.node_id == node_id,
SpiderExecutionQueue.status == 1):
job.status = 0
job.update_time = datetime.datetime.now()
job.start_time = None
job.pid = None
job.node_id = None
session.add(job)
session.commit()
session.close()
def jobs(self, session):
pending = list(session.query(SpiderExecutionQueue)
.filter_by(status=JOB_STATUS_PENDING))
running = list(session.query(SpiderExecutionQueue)
.filter_by(status=JOB_STATUS_RUNNING))
finished = list(session.query(HistoricalJob)
.order_by(desc(HistoricalJob.complete_time))
.slice(0, 100))
return pending, running, finished
def job_start(self, jobid, pid):
with session_scope() as session:
job = session.query(SpiderExecutionQueue).filter_by(id=jobid).first()
if job.start_time is None:
job.start_time = datetime.datetime.now()
job.update_time = datetime.datetime.now()
if job.pid is None and pid:
job.pid = pid
session.add(job)
session.commit()
session.close()
def _regular_agent_tags(self, agent_tags):
if agent_tags is None:
return None
if isinstance(agent_tags, string_types):
return agent_tags.split(',')
return agent_tags
def get_next_task(self, node_id):
"""
Get next task for node_id, if exists, update
the job status, track it with node_id.
:param node_id:
node_id
:return:
the running job
"""
with session_scope() as session:
node = session.query(Node).filter(Node.id == node_id).first()
if not node:
raise NodeNotFound()
node_tags = node.tags
next_task = self._get_next_task(session, node_tags)
if not next_task:
return None
now = self._now()
next_task.start_time = now
next_task.update_time = now
next_task.node_id = node_id
next_task.status = JOB_STATUS_RUNNING
session.add(next_task)
session.commit()
session.refresh(next_task)
return next_task
def _get_next_task(self, session, agent_tags):
# result = session.query(func.)
# obj, func.avg(obj.value).label("value_avg")
# ).group_by(
# func.strftime('%s', obj.date)
# ).all()
result = session.execute("""
select * from spider_execution_queue
join (select min(fire_time) as fire_time, spider_id
from spider_execution_queue
where status=0
group by spider_id
) as a
on spider_execution_queue.fire_time = a.fire_time
and spider_execution_queue.spider_id = a.spider_id
order by a.fire_time
""")
for job in session.query(SpiderExecutionQueue).instances(result):
spider_max_concurrency = 1
spider_concurrency = session.query(
func.count(SpiderExecutionQueue.id)
) \
.filter(
SpiderExecutionQueue.status == JOB_STATUS_RUNNING,
SpiderExecutionQueue.spider_id == job.spider_id,
) \
.scalar() or 0
if spider_concurrency >= spider_max_concurrency:
continue
spider_tags = self.get_spider_tags(job.spider, session)
if self._match_tags(spider_tags, agent_tags):
return job
return None
def _match_tags(self, spider_tags, node_tags):
# both empty
if not spider_tags and not node_tags:
return True
# one empty and one not
if not spider_tags or not node_tags:
return False
for spider_tag in spider_tags:
if spider_tag not in node_tags:
return False
return True
def get_spider_tags(self, spider, session):
tags_setting = session.query(SpiderSettings) \
.filter(SpiderSettings.setting_key == 'tag',
SpiderSettings.spider_id == spider.id).first()
if not tags_setting:
return []
if not tags_setting.value:
return []
return [x for x in tags_setting.value.split(',') if x]
def has_task(self, node_id):
with session_scope() as session:
node = session.query(Node).filter(Node.id == node_id).first()
if node is None:
raise NodeNotFound()
node_tags = self._regular_agent_tags(node.tags)
next_task = self._get_next_task(session, node_tags)
return next_task is not None
def jobs_running(self, node_id, job_ids):
'''
Update running jobs for node.
If any job status is wrong, let node kill it
:param node_id:
:param job_ids:
:return:(job_id) to kill
'''
jobs_to_kill = []
with session_scope() as session:
for job_id in job_ids:
job = session.query(SpiderExecutionQueue).filter(
SpiderExecutionQueue.id == job_id).first()
if job:
if job.node_id is None:
job.node_id = node_id
if job.node_id != node_id or \
job.status != 1:
jobs_to_kill.append(job.id)
else:
job.update_time = self._now()
session.add(job)
else:
jobs_to_kill.append(job_id)
session.commit()
return jobs_to_kill
def job_finished(self, job, log_file=None, items_file=None):
session = Session()
if job.status not in (JOB_STATUS_SUCCESS, JOB_STATUS_FAIL):
raise Exception('Invalid status.')
job_status = job.status
job = session.query(SpiderExecutionQueue).filter_by(id=job.id).first()
job.status = job_status
job.update_time = datetime.datetime.now()
project_storage = ProjectStorage(self.project_storage_dir,
job.spider.project)
historical_job = HistoricalJob()
historical_job.id = job.id
historical_job.spider_id = job.spider_id
historical_job.project_name = job.project_name
historical_job.spider_name = job.spider_name
historical_job.fire_time = job.fire_time
historical_job.start_time = job.start_time
historical_job.complete_time = job.update_time
historical_job.status = job.status
if log_file:
#historical_job.log_file = log_file
import re
items_crawled_pattern = re.compile(r"\'item_scraped_count\': (\d+),")
error_log_pattern = re.compile(r"\'log_count/ERROR\': (\d+),")
warning_log_pattern = re.compile(r"\'log_count/WARNING\': (\d+),")
log_file.seek(0)
log_raw = log_file.read()
log_encoding = chardet.detect(log_raw)['encoding']
try:
log_content = ensure_str(log_raw, log_encoding)
m = items_crawled_pattern.search(log_content)
if m:
historical_job.items_count = int(m.group(1))
m = error_log_pattern.search(log_content)
if m and historical_job.status == JOB_STATUS_SUCCESS:
historical_job.status = JOB_STATUS_FAIL
m = warning_log_pattern.search(log_content)
if m and historical_job.status == JOB_STATUS_SUCCESS:
historical_job.status = JOB_STATUS_WARNING
log_file.seek(0)
except (UnicodeDecodeError, TypeError):
# use TypeError when detected log_encoding be null.
LOGGER.warning('Cannot read unicode in log file.')
log_file.seek(0)
#if items_file:
# historical_job.items_file = items_file
if items_file:
items_file.seek(0)
project_storage.put_job_data(job, log_file, items_file)
session.delete(job)
session.add(historical_job)
session.commit()
session.refresh(historical_job)
self.notify_job_finished(historical_job)
# send mail
if historical_job.status == JOB_STATUS_FAIL:
self.try_send_job_failed_mail(historical_job)
session.close()
return historical_job
def _now(self):
return datetime.datetime.now()
def try_send_job_failed_mail(self, job):
LOGGER.debug('try_send_job_failed_mail')
job_fail_send_mail = self.config.getboolean('job_fail_send_mail')
if job_fail_send_mail:
try:
mail_sender = MailSender(self.config)
subject = 'scrapydd job failed'
to_address = self.config.get('job_fail_mail_receiver')
content = 'bot:%s \r\nspider:%s \r\n job_id:%s \r\n' % (job.spider.project.name,
job.spider_name,
job.id)
mail_sender.send(to_addresses=to_address, subject=subject, content=content)
except Exception as e:
LOGGER.error('Error when sending job_fail mail %s' % e)
def clear_finished_jobs(self):
job_history_limit_each_spider = 100
with session_scope() as session:
spiders = list(session.query(distinct(HistoricalJob.spider_id)))
for row in spiders:
spider_id = row[0]
with session_scope() as session:
over_limitation_jobs = session.query(HistoricalJob)\
.filter_by(spider_id=spider_id)\
.order_by(desc(HistoricalJob.complete_time))\
.slice(job_history_limit_each_spider, 1000)\
.all()
for over_limitation_job in over_limitation_jobs:
self._remove_histical_job(over_limitation_job)
def reset_timeout_job(self):
KILL_TIMEOUT = 120
now = self._now()
with session_scope() as session:
for job in session.query(SpiderExecutionQueue)\
.filter(SpiderExecutionQueue.status == JOB_STATUS_RUNNING):
# check job time_out expire start
# the next status is STOPPING then ERROR
spider = session.query(Spider).get(job.spider_id)
job_timeout_setting = session.query(SpiderSettings)\
.filter_by(spider_id=spider.id,
setting_key='timeout').first()
job_timeout = int(job_timeout_setting.value) \
if job_timeout_setting else 3600
if now > job.start_time + \
datetime.timedelta(seconds=job_timeout):
job.status = JOB_STATUS_STOPPING
job.update_time = self._now()
session.add(job)
LOGGER.info('Job %s is running timeout, stopping.', job.id)
session.commit()
continue
# expire in not updated in a update_timeout.
# may be node error, restart it
# the status is PENDING
if now > job.update_time + datetime.timedelta(minutes=1):
job.status = JOB_STATUS_PENDING
job.pid = None
job.node_id = None
job.update_time = self._now()
session.add(job)
session.commit()
LOGGER.info('Job %s is update timeout, reset.', job.id)
continue
for job in session.query(SpiderExecutionQueue)\
.filter(SpiderExecutionQueue.status.in_([JOB_STATUS_STOPPING])):
if (datetime.datetime.now() - job.start_time).seconds > KILL_TIMEOUT:
# job is running too long, should be killed
historical_job = HistoricalJob()
historical_job.id = job.id
historical_job.spider_id = job.spider_id
historical_job.project_name = job.project_name
historical_job.spider_name = job.spider_name
historical_job.fire_time = job.fire_time
historical_job.start_time = job.start_time
historical_job.complete_time = job.update_time
historical_job.status = 3
session.delete(job)
session.add(historical_job)
LOGGER.info('Job %s is timeout, killed.' % job.id)
session.commit()
def _remove_histical_job(self, job):
'''
@type job: HistoricalJob
'''
with session_scope() as session:
job = session.query(HistoricalJob).filter(HistoricalJob.id == job.id).first()
spider = job.spider
project = spider.project
project_storage_dir = self.config.get('project_storage_dir')
project_storage = ProjectStorage(project_storage_dir, project)
project_storage.delete_job_data(job)
session.delete(job)
session.commit()
def remove_schedule(self, spider, trigger_id):
with session_scope() as session:
trigger = session.query(Trigger)\
.filter_by(spider_id=spider.id, id=trigger_id).first()
session.delete(trigger)
if self.scheduler.get_job(str(trigger_id)):
self.scheduler.remove_job(str(trigger.id))
if self.sync_obj:
LOGGER.info('remove_schedule')
self.sync_obj.remove_schedule_job(trigger.id)
def attach_job_observer(self, observer: JobObserver):
LOGGER.debug('adding observer')
self._job_observers.append(observer)
def detach_job_observer(self, observer: JobObserver):
LOGGER.debug('deattch')
self._job_observers.remove(observer)
def notify_job_finished(self, job):
for observer in self._job_observers:
observer.on_job_finished(job)
def build_scheduler() -> TornadoScheduler:
executors = {
'default': ThreadPoolExecutor(20),
}
scheduler = TornadoScheduler(executors=executors)
return scheduler
|
import React from 'react'
import { Button } from 'mdbreact'
class VideoManager extends React.Component {
constructor (props) {
super(props)
this.state = {
video: null
}
}
render () {
if (this.props.user) {
if (this.props.user.admin) {
return (
<div>
<select
className="form-control"
onChange={event => {
this.setState({
video: this.props.videoList.find(
video => video._id === event.target.value
)
})
}}
>
<option>Choose a video</option>
{this.props.videoList.map(video => {
return (
<option value={video._id} key={video._id}>
{video.fileName}
</option>
)
})}
</select>
<br />
<EditForm video={this.state.video} />
</div>
)
} else if (this.props.user.hasOwnProperty('videos')) {
return (
<div>
<select
className="form-control"
onChange={event => {
this.setState({
video: this.props.videoList.find(
video => video._id === event.target.value
)
})
}}
>
<option>Choose a video</option>
{this.props.videoList.map(video => {
return this.props.user.videos.includes(video._id) ? (
<option value={video._id} key={video._id}>
{video.fileName}
</option>
) : null
})}
</select>
<br />
<EditForm video={this.state.video} />
</div>
)
}
} else {
return null
}
}
}
class EditForm extends React.Component {
constructor (props) {
super(props)
this.EditForm = this.EditForm.bind(this)
this.renderObject = this.renderObject.bind(this)
this.patchChanges = this.patchChanges.bind(this)
this.state = {
changes: {}
}
this.id = 0
}
EditForm (video) {
let inputArray = []
for (let key in video) {
if (typeof video[key] !== 'object') {
inputArray.push(<h6>{key}</h6>)
inputArray.push(
<input
className="form-control"
key={video._id + '.' + video[key]}
id={key}
name={key}
placeholder={key}
defaultValue={video[key]}
/>
)
} else {
let resultArray = this.renderObject(video[key], key, video._id)
inputArray = [...inputArray, ...resultArray]
}
}
return inputArray
}
renderObject (object, objectName, id, resultArray = []) {
for (let key in object) {
if (typeof object[key] === 'object') {
if (!isNaN(key)) {
resultArray.push(
<h3 key={objectName + '.' + key}>{objectName + key}</h3>
)
} else {
resultArray.push(<h5 key={objectName + '.' + key}>{key}</h5>)
}
this.renderObject(object[key], objectName + '.' + key, id, resultArray)
resultArray.push(<br />)
resultArray.push(<br />)
} else {
resultArray.push(<h6>{key}</h6>)
resultArray.push(
<input
className="form-control"
id={objectName + '.' + key}
key={id + objectName + '.' + key + '.' + object[key]}
name={objectName + '.' + key}
placeholder={key}
defaultValue={object[key]}
/>
)
}
}
return resultArray
}
patchChanges () {
if (this.props.video) {
fetch('https://video.laaksonen.me/api/videos/' + this.props.video._id, {
method: 'PATCH',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(this.state.changes)
})
}
}
render () {
return (
<div>
<form
onChange={event => {
let key = event.target.id
let value = event.target.value
let object
if (this.id === this.props.video._id) {
object = Object.assign({}, this.state.changes)
} else {
object = {}
this.id = this.props.video._id
}
object[key] = value
this.setState({ changes: object })
}}
>
{this.props.video ? this.EditForm(this.props.video) : null}
</form>
<Button onClick={() => this.patchChanges()}>Save</Button>
</div>
)
}
}
export default VideoManager
|
#model.py
from django.db import models
class Book(models.Model):
title = models.CharField(max_length=200)
publisher = models.CharField(max_length=200)
#views.py
from django.shortcuts import render
from books.models import Book
def book_list(request):
books = Book.objects.all()
return render(request, 'books/book_list.html', {'books': books})
def book_detail(request, pk):
book = Book.objects.get(pk=pk)
return render(request, 'books/book_detail.html', {'book': book})
#urls.py
from django.urls import path
from books.views import book_list, book_detail
urlpatterns = [
path('books/', book_list, name='book_list'),
path('books/<int:pk>/', book_detail, name='book_detail'),
] |
#!/bin/bash
dieharder -d 9 -g 39 -S 503490415
|
L1 = [x for x in L]
|
#!/bin/sh
cd "`dirname $0`/config"
dep lik:commit "$@"
|
import refreshToken from "./refreshToken";
export default {
refreshToken,
};
|
package info.fetter.logstashforwarder;
/*
* Copyright 2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
public class Event {
private Map<String,Object> keyValues = new HashMap<String,Object>(10);
public Event() {
}
public Event(Event event) {
if(event != null) {
keyValues.putAll(event.keyValues);
}
}
public Event(Map<String,String> fields) throws UnsupportedEncodingException {
for(String key : fields.keySet()) {
addField(key, fields.get(key));
}
}
// public Event addField(String key, byte[] value) {
// keyValues.put(key, value);
// return this;
// }
public Event addField(String key, String value) throws UnsupportedEncodingException {
keyValues.put(key, value);
return this;
}
public Event addField(String key, long value) throws UnsupportedEncodingException {
keyValues.put(key, value);
return this;
}
public Map<String,Object> getKeyValues() {
return keyValues;
}
public Object getValue(String fieldName) {
return keyValues.get(fieldName);
}
}
|
<!DOCTYPE html>
<html>
<head>
<style>
div{
position:relative;
width:400px;
height:400px;
background:#CCC;
padding:10px;
}
/*Create a style for the ball by assigning css properties of position, width, height, border and background-color*/
#animation_ball{
position:absolute;
width:20px;
height:20px;
border-radius:20px;
background-color:white;
}
</style>
<script>
// Create variable to define initial position and number of steps
var position=0;
var step=1;
// Function to move the box
function moveBall() {
// Get the ball element
var ball = document.getElementById("animation_ball");
// Move the ball
position = position + step;
ball.style.left = position + 'px';
// Change the direction of the ball movement
if(position >200) {
step=-0.5;
}
if(position <0) {
step=1;
}
}
// Call the moveBall() function every 30 milliseconds
setInterval(moveBall, 30);
</script>
</head>
<body>
<div>
<div id="animation_ball"></div>
</div>
</body>
</html> |
#!/bin/bash
###-- this file mainly install applications for liuyc's devices --###
###-- update and upgrade --###
sudo apt-get update
sudo apt-get upgrade
###-- install vim --###
sudo apt-get install vim-gnome # (with clipboard function)
# put "set clipboard=unnamed" in your .vimrc and restart your vim sessions.
###-- install snapd --###
# after Ubuntu 16.04 snapd is included
# sudo apt update
# sudo apt install snapd
###-- chromium --###
sudo snap install chromium
###-- visual studio code --###
sudo snap install vscode --classic
###-- slack --###
sudo snap install slack --classic
###-- LaTex (Texmaker) --###
sudo apt-get install texlive-full
sudo apt-get install texmaker
###--LaTeX equation on LibreOffice Impress--###
###-----------------------------------------###
#https://extensions.libreoffice.org/extensions/texmaths-1
#Note:restart after adding the extenstion
###-- mendeley --###
#https://www.mendeley.com/download-desktop/Linux/#downloading
#sudo apt-get install mendeleydesktop #update automatically
###-- minecraft --###
sudo snap install minecraft
###-- chinese text input --###
sudo apt-get install ibus-chewing #新酷音輸入法
###-- f.lux --###o
# f.lux makes your computer screen look like the room you're in, all the time.
sudo add-apt-repository ppa:nathan-renniewaldock/flux
sudo apt-get update
sudo apt-get install fluxgui
|
squeue -u adbadre | grep 759 | awk '{print $1}' | xargs -n 1 scancel
|
#!/usr/bin/env bash
set -ex
if [ -d "$HOME/.local/bin" ]; then
export PATH="$HOME/.local/bin:$PATH"
fi
SRC_ROOT=${SRC_ROOT:-"${PWD}"}
PYTHON=${PYTHON:-"python3"}
if [ "x${VIRTUAL_ENV}" != "x" ]; then
PYTHON="python"
fi
TEMP_DIRS=()
# Copy temporary fixes to a temporary directory in case we change branches
TEMPFIX="$(mktemp -d)"
TEMP_DIRS+=("${TEMPFIX}")
cp -r ${SRC_ROOT}/scripts/tempfix/* "${TEMPFIX}/"
python_version="$(${PYTHON} -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')"
function run_plugin_examples() {
if [ ! -d "${SRC_ROOT}/${PLUGIN}/examples" ]; then
return
fi
cd "${SRC_ROOT}/${PLUGIN}/examples"
if [ -f "requirements.txt" ]; then
"${PYTHON}" -m pip install -r requirements.txt
fi
"${PYTHON}" -m unittest discover -v
cd "${SRC_ROOT}/${PLUGIN}"
}
test_no_skips() {
# Log skipped tests to file
check_skips="$(mktemp)"
TEMP_DIRS+=("${check_skips}")
# Run all if nothing given
if [ "x$@" == "x" ]; then
UNITTEST_ARGS="discover -v"
else
UNITTEST_ARGS=$@
fi
# Run with coverage
TEST_DOCS=1 "${PYTHON}" -u -m coverage run -m unittest $UNITTEST_ARGS 2>&1 | tee "${check_skips}"
"${PYTHON}" -m coverage report -m
# Fail if any coroutines were not awaited
unawaited=$(grep -nE 'coroutine .* was never awaited' "${check_skips}" | wc -l)
if [ "$unawaited" -ne 0 ]; then
echo "Found un-awaited coroutines" >&2
exit 1
fi
# Fail if any tests were skipped or errored
skipped=$(tail -n 1 "${check_skips}" | grep -E '(skipped=[0-9]+)' | wc -l)
if [ "$skipped" -ne 0 ]; then
echo "Tests were skipped" >&2
exit 1
fi
errors=$(grep -E '(errors=[0-9]+)' "${check_skips}" | wc -l)
if [ "$errors" -ne 0 ]; then
echo "Tests errored" >&2
exit 1
fi
failures=$(grep -E '(failures=[0-9]+)' "${check_skips}" | wc -l)
if [ "$failures" -ne 0 ]; then
echo "Tests failed" >&2
exit 1
fi
}
function run_plugin() {
export PLUGIN="${1}"
cd "${SRC_ROOT}/${PLUGIN}"
# Install plugin
"${PYTHON}" -m pip install -U -e .[dev]
if [ "x${PLUGIN}" != "x." ]; then
# Test ensuring no tests were skipped
test_no_skips
# Run examples if they exist and we aren't at the root
run_plugin_examples
else
# If we are at the root. Install plugsin and run various integration tests
# Run the tests but not the long documentation consoletests
"${PYTHON}" -u -m unittest discover -v
# Try running create command
plugin_creation_dir="$(mktemp -d)"
TEMP_DIRS+=("${plugin_creation_dir}")
cd "${plugin_creation_dir}"
# Plugins we know how to make
PLUGINS=(\
"model" \
"operations" \
"service" \
"source" \
"config")
for plugin in ${PLUGINS[@]}; do
dffml service dev create "${plugin}" "ci-test-${plugin}"
cd "ci-test-${plugin}"
"${PYTHON}" -m pip install -U .
"${PYTHON}" -m unittest discover -v
cd "${plugin_creation_dir}"
done
# Install all the plugins so examples can use them
"${PYTHON}" -m dffml service dev install
# Run the examples
run_plugin_examples
# Test ensuring no tests were skipped
test_no_skips
fi
cd "${SRC_ROOT}"
# Report installed versions of packages
"${PYTHON}" -m pip freeze
if [[ "x${GITHUB_ACTIONS}" == "xtrue" ]] && \
[[ "x${GITHUB_REF}" =~ xrefs/heads/[a-zA-Z0-9]*\.[a-zA-Z0-9]*\.[a-zA-Z0-9]* ]]; then
git status
dffml service dev release "${PLUGIN}"
fi
}
function run_consoletest() {
export PLUGIN="${1/docs\//}"
export PLUGIN="${PLUGIN//\//_}"
export PLUGIN="${PLUGIN/\.rst/}"
cd "${SRC_ROOT}"
# Log tests to file
test_log="$(mktemp)"
TEMP_DIRS+=("${test_log}")
# Install base package with testing and development utilities
"${PYTHON}" -m pip install -U -e ".[dev]"
test_no_skips -v "tests.docs.test_consoletest.TestDocs.test_${PLUGIN}"
cd "${SRC_ROOT}"
git status
}
function run_changelog() {
# Only run this check on pull requests
if [ "x$GITHUB_EVENT_NAME" != "xpull_request" ]; then
exit 0
fi
# Ensure the number of lines added in the changelog is not 0
added_to_changelog=$(git diff origin/master --numstat -- CHANGELOG.md \
| awk '{print $1}')
if [ "x$added_to_changelog" == "x" ] || [ "$added_to_changelog" -eq 0 ]; then
echo "No changes to CHANGELOG.md" >&2
exit 1
fi
}
function run_whitespace() {
export whitespace=$(mktemp -u)
function rmtempfile () {
rm -f "$whitespace"
}
trap rmtempfile EXIT
find . -type f -name '*.py' -o -name '*.rst' -o -name '*.md' -exec grep -EHn " +$" {} \; 2>&1 > "$whitespace"
lines=$(wc -l < "$whitespace")
if [ "$lines" -ne 0 ]; then
echo "Trailing whitespace found" >&2
cat "${whitespace}" >&2
exit 1
fi
}
function run_style() {
black --check "${SRC_ROOT}"
for filename in $(git ls-files \*.js); do
echo "Checking JavaScript file \'${filename}\'"
diff <(js-beautify -n -s 2 "${filename}") "${filename}"
done
}
function run_commit(){
BRANCH="$(echo $GITHUB_REF | cut -d'/' -f 3)"
echo "On Branch: ${BRANCH}"
if [[ "$BRANCH" != "master" ]]; then
dffml service dev lint commits
fi
}
function run_imports(){
dffml service dev lint imports
if [[ -z $(git status -s) ]]
then
echo "Yay ! No unused imports found"
else
echo "There maybe unused imports in the following files:"
git status -s | grep "M" | awk '{print $2}'
exit 1
fi
}
function run_docs() {
export GIT_SSH_COMMAND='ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
cd "${SRC_ROOT}"
"${PYTHON}" -m pip install --prefix=~/.local -U -e "${SRC_ROOT}[dev]"
"${PYTHON}" -m dffml service dev install -user
last_release=$(git log -p -- dffml/version.py \
| grep \+VERSION \
| grep -v rc \
| sed -e 's/.* = "//g' -e 's/"//g' \
| head -n 1)
# Fail if there are any changes to the Git repo
changes=$(git status --porcelain | wc -l)
if [ "$changes" -ne 0 ]; then
echo "Running docs.py resulted in changes to the Git repo" >&2
echo "Need to run dffml service dev docs and commit changes" >&2
exit 1
fi
# Make master docs
master_docs="$(mktemp -d)"
TEMP_DIRS+=("${master_docs}")
rm -rf pages
dffml service dev docs || ./scripts/docs.sh
mv pages "${master_docs}/html"
# Make last release docs
release_docs="$(mktemp -d)"
TEMP_DIRS+=("${release_docs}")
rm -rf pages
git clean -fdx
git reset --hard HEAD
echo "Checking out last release ${last_release}"
git checkout "${last_release}"
git clean -fdx
git reset --hard HEAD
# Uninstall dffml
"${PYTHON}" -m pip uninstall -y dffml
# Remove .local to force install of correct dependency versions
rm -rf ~/.local
"${PYTHON}" -m pip install --prefix=~/.local -U -e "${SRC_ROOT}[dev]"
"${PYTHON}" -m dffml service dev install -user
dffml service dev docs || ./scripts/docs.sh
mv pages "${release_docs}/html"
git clone https://github.com/intel/dffml -b gh-pages \
"${release_docs}/old-gh-pages-branch"
mv "${release_docs}/old-gh-pages-branch/.git" "${release_docs}/html/"
mv "${master_docs}/html" "${release_docs}/html/master"
# Make webui
git clone https://github.com/intel/dffml -b webui "${release_docs}/webui"
cd "${release_docs}/webui/service/webui/webui"
yarn install
yarn build
mv build/ "${release_docs}/html/master/webui"
cd "${release_docs}/html"
git config user.name 'John Andersen'
git config user.email 'johnandersenpdx@gmail.com'
git add -A
git commit -sam "docs: $(date)"
# Don't push docs unless we're running on master
if [ "x${GITHUB_ACTIONS}" == "xtrue" ] && [ "x${GITHUB_REF}" != "xrefs/heads/master" ]; then
return
fi
ssh_key_dir="$(mktemp -d)"
TEMP_DIRS+=("${ssh_key_dir}")
mkdir -p ~/.ssh
chmod 700 ~/.ssh
"${PYTHON}" -c "import pathlib, base64, os; keyfile = pathlib.Path(\"${ssh_key_dir}/github\").absolute(); keyfile.write_bytes(b''); keyfile.chmod(0o600); keyfile.write_bytes(base64.b32decode(os.environ['SSH_DFFML_GH_PAGES']))"
ssh-keygen -y -f "${ssh_key_dir}/github" > "${ssh_key_dir}/github.pub"
export GIT_SSH_COMMAND="${GIT_SSH_COMMAND} -o IdentityFile=${ssh_key_dir}/github"
git remote set-url origin git@github.com:intel/dffml
git push -f
cd -
git reset --hard HEAD
git checkout master
}
function run_lines() {
"${PYTHON}" ./scripts/check_literalincludes.py
}
function run_container() {
docker build --build-arg DFFML_RELEASE=master -t intelotc/dffml .
docker run --rm intelotc/dffml version
docker run --rm intelotc/dffml service dev entrypoints list dffml.model
}
function cleanup_temp_dirs() {
if [ "x${NO_RM_TEMP}" != "x" ]; then
return
fi
for temp_dir in ${TEMP_DIRS[@]}; do
rm -rf "${temp_dir}"
done
}
# Clean up temporary directories on exit
trap cleanup_temp_dirs EXIT
if [ "x${1}" == "xchangelog" ]; then
run_changelog
elif [ "x${1}" == "xwhitespace" ]; then
run_whitespace
elif [ "x${1}" == "xstyle" ]; then
run_style
elif [ "x${1}" == "xcommit" ]; then
run_commit
elif [ "x${1}" == "ximport" ]; then
run_imports
elif [ "x${1}" == "xdocs" ]; then
run_docs
elif [ "x${1}" == "xlines" ]; then
run_lines
elif [ "x${1}" == "xcontainer" ]; then
run_container
elif [ "x${1}" == "xconsoletest" ]; then
run_consoletest "${2}"
elif [ -d "${1}" ]; then
run_plugin "${1}"
else
echo "Not sure what to do" 2>&1
exit 1
fi
|
#!/bin/bash
VERSION=1.13.2.r0
DESTINATION=${DESTINATION:-docker.io}
IMAGE=barkbay/gateway:${VERSION}
docker build . -t ${IMAGE}
if [[ $@ == **--push** ]]
then
docker tag ${IMAGE} ${DESTINATION}/${IMAGE}
docker push ${DESTINATION}/${IMAGE}
fi
|
module.exports = function(sails, _){
return {
sendRequest: require('supertest')(sails.hooks.http.app) // setup our "browser" instance
};
};
|
#!/bin/sh
### General options
### –- specify queue -- (gpuv100/gpua100) --
#BSUB -q gpuv100
### -- set the job Name --
JOB="danet"
#BSUB -J "danet"
### -- ask for number of cores (default: 1) --
#BSUB -n 24
#BSUB -R "span[ptile=24]"
### -- Select the resources: 3 gpu in exclusive process mode --
#BSUB -gpu "num=2:mode=exclusive_process"
### -- specify gpu memory
#BSUB -R "select[gpu32gb]"
### -- set walltime limit: hh:mm -- maximum 24 hours for GPU-queues right now
#BSUB -W 24:00
# request 5GB of system-memory
#BSUB -R "rusage[mem=10GB]"
### -- set the email address --
# please uncomment the following line and put in your e-mail address,
# if you want to receive e-mail notifications on a non-default address
##BSUB -u s210203@student.dtu.dk
### -- send notification at start --
##BSUB -B
### -- send notification at completion--
##BSUB -N
### -- Specify the output and error file. %J is the job-id --
### -- -o and -e mean append, -oo and -eo mean overwrite --
#BSUB -o gpu_%J.out
#BSUB -e gpu_%J.err
# -- end of LSF options --
nvidia-smi
|
#!/bin/bash
########################################################################################################################
# Run this to tar the static cloud directory to logs/cloud_copy/static_cloud_copy_<timestamp>.tar.gz so that it can be
# copied to the hosting server.
########################################################################################################################
set -e # Stop script if any command fails
ORIGINAL_WORKING_DIR="$(pwd)"
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
source "${SCRIPT_DIR}/server_config_vars.sh"
cd "${PROJECT_DIR}" || exit 1
CLOUD_ARCHIVE_DIR="${PROJECT_LOG_DIR}/static_cloud_archives"
if [ ! -d "${CLOUD_ARCHIVE_DIR}" ]; then
mkdir -p "${CLOUD_ARCHIVE_DIR}"
fi
cd "${PROJECT_STATIC_CLOUD_DIR}" || exit 1
ARCHIVE_PATH="${CLOUD_ARCHIVE_DIR}/static_cloud_${FILENAME_DATETIME_NOW_SUFFIX}.tar.gz"
FILES_TO_TAR="$(find -L "." -mindepth 1 '(' -type f -regextype posix-extended -regex '^.*\.(jpg|jpeg|png|ttf|woff|js|css|xml|txt|pdf|json|ico)$' ')' -printf "\"%P\" ")"
echo -n "${FILES_TO_TAR}" | xargs tar -czvf "${ARCHIVE_PATH}" --mode=644
cd "${PROJECT_DIR}" || exit 1
cd "${ORIGINAL_WORKING_DIR}" || exit 1 |
<reponame>achamberland/vWallet<gh_stars>10-100
import IContact from './IContact'
import { computed, action, observable } from 'mobx'
export default class Contact implements IContact {
@observable name: string
@observable address: string
static dezerializeJSON(object: any): Contact {
const contact = new Contact()
if (object && object.name) {
contact.name = object.name
}
if (object && object.address) {
contact.address = object.address
}
return contact
}
constructor(name?: string, address?: string) {
this.name = name || ''
this.address = address || ''
}
@computed
get id() {
return btoa(this.address + this.name)
}
@computed
get fullname() {
return `${this.name}`
}
@action
setName(name: string) {
this.name = name
}
@action
setAddress(address: string) {
this.address = address
}
getShortenedName(): string {
const words = this.name.split(' ')
// max lenght of avatar name two words
if (words.length >= 2) {
return `${words[0]} ${words[1]}`
}
// else we can return the normal name :)
return this.name
}
}
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.floody.service;
import com.google.api.services.drive.Drive;
import com.google.api.services.drive.model.Permission;
import com.google.common.flogger.GoogleLogger;
import com.google.common.flogger.StackSize;
import java.io.IOException;
import java.util.function.Predicate;
public class UserSpreadsheetRoleChecker {
public enum SpreadsheetRole {
OWNER,
EDITOR,
COMMENTER,
READER
}
private static final GoogleLogger logger = GoogleLogger.forEnclosingClass();
private final Drive driveService;
private final String userEmail;
public UserSpreadsheetRoleChecker(Drive driveService, String userEmail) {
this.driveService = driveService;
this.userEmail = userEmail;
}
/** Checks if user has editor or owner permission on the spreadsheet. */
public boolean isUserEditor(String spreadsheetId) {
return new UserSheetAccessVerifier(spreadsheetId)
.checkUserPermission(buildPermissionPredicate(SpreadsheetRole.EDITOR));
}
public boolean isUserAccess(String spreadsheetId) {
return new UserSheetAccessVerifier(spreadsheetId)
.checkUserPermission(buildPermissionPredicate(SpreadsheetRole.READER));
}
public static Predicate<Permission> buildPermissionPredicate(SpreadsheetRole role) {
Predicate<Permission> permissionPredicate = (Permission permission) -> false;
switch (role) {
case READER:
permissionPredicate = permissionPredicate.or(buildPermissionFor("reader"));
case COMMENTER:
permissionPredicate = permissionPredicate.or(buildPermissionFor("commenter"));
case EDITOR:
permissionPredicate = permissionPredicate.or(buildPermissionFor("writer"));
case OWNER:
permissionPredicate = permissionPredicate.or(buildPermissionFor("owner"));
}
return permissionPredicate;
}
private static Predicate<Permission> buildPermissionFor(String role) {
return (Permission permission) -> permission.getRole().equalsIgnoreCase(role);
}
private final class UserSheetAccessVerifier {
private final String spreadsheetId;
public UserSheetAccessVerifier(String spreadsheetId) {
this.spreadsheetId = spreadsheetId;
}
private boolean checkUserPermission(Predicate<Permission> permissionPredicate) {
String nextPageToken = null;
try {
do {
var listResponse =
driveService
.permissions()
.list(spreadsheetId)
.setPageToken(nextPageToken)
.setFields("permissions(emailAddress, role)")
.execute();
nextPageToken = listResponse.getNextPageToken();
var isUserEditor =
listResponse.getPermissions().stream()
.filter(permission -> permission.getEmailAddress().equals(userEmail))
.anyMatch(permissionPredicate);
if (isUserEditor) {
return true;
}
} while (nextPageToken != null);
} catch (IOException ioException) {
logger.atWarning().withStackTrace(StackSize.SMALL).log(
"Error accessing permissions for %s", spreadsheetId);
}
return false;
}
}
}
|
array = []
for i in range(2, 10):
array.append(i+1)
print(array) # [3, 4, 5, 6, 7, 8, 9, 10] |
def isValid(s: str) -> bool:
if not s:
return True # Empty string is considered valid
stack = []
dic = {'{': '}', '[': ']', '(': ')'} # Dictionary to store opening and closing brackets
for char in s:
if not stack or char in dic: # If stack is empty or current character is an opening bracket
stack.append(char)
elif stack and dic.get(stack[-1]) != char: # If stack is not empty and current character is not the expected closing bracket
return False # Return False if brackets are not correctly matched
else:
stack.pop() # Pop the matching opening bracket from the stack
return not stack # Return True if stack is empty (all brackets are correctly matched and closed), False otherwise |
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app mcf --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app mcf --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app omnetpp --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app omnetpp --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app xz --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app xz --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app gcc --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app gcc --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app cc --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app cc --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app nibble --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app nibble --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app bfs --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app bfs --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel clean --app pr --num_experiments 20 --result_dir $(pwd)/results
./launch_exp.py --experiment_tag asplos21_ae --kernel modified --app pr --num_experiments 20 --result_dir $(pwd)/results
|
import { childrenSettled } from './scheduler';
import Sprite from './sprite';
import { Transition } from './transition';
const spriteContext = new WeakMap();
export function* runToCompletion(
context: TransitionContext,
transition: Transition,
) {
yield* transition(context);
yield childrenSettled();
}
export default class TransitionContext {
static forSprite(sprite: Sprite): TransitionContext {
return spriteContext.get(sprite);
}
private _prepared: Set<Sprite> = new Set();
prepareSprite: ((sprite: Sprite) => Sprite) | undefined;
constructor(
private _duration: number,
private _insertedSprites: Sprite[],
private _keptSprites: Sprite[],
private _removedSprites: Sprite[],
private _sentSprites: Sprite[],
private _receivedSprites: Sprite[],
private _beacons: { [name: string]: Sprite },
readonly onMotionStart: (sprite: Sprite) => void,
readonly onMotionEnd: (sprite: Sprite) => void,
) {}
// the following things are all accessors in order to make them
// read-only, and to let us tell which classes of sprites a user's
// transition is actually using.
get duration() {
return this._duration;
}
get insertedSprites() {
return this._prepareSprites(this._insertedSprites);
}
get keptSprites() {
return this._prepareSprites(this._keptSprites);
}
get removedSprites() {
return this._prepareSprites(this._removedSprites);
}
get sentSprites() {
return this._prepareSprites(this._sentSprites);
}
get receivedSprites() {
return this._prepareSprites(this._receivedSprites);
}
get beacons() {
return this._beacons;
}
private _prepareSprites(sprites: Sprite[]): Sprite[] {
// Link them up, so that users can conveniently pass sprites
// around to Motions without also passing the transition context.
sprites.forEach(sprite => {
spriteContext.set(sprite, this);
});
if (!this.prepareSprite) {
return sprites;
}
return sprites.map(sprite => {
if (!this._prepared.has(sprite)) {
this._prepared.add(sprite);
sprite = this.prepareSprite!(sprite);
}
return sprite;
});
}
}
|
'use strict';
var defaultEngine = require('../lib/engine').defaultEngine;
var inMemoryEngine = require('../lib/engine').inMemoryEngine;
var assert = require('proclaim');
// Skip the "supported" tests when its not actually supported
var xdescribe = window.localStorage ? describe : describe.skip;
describe('localStorage', function() {
var engine;
xdescribe('when supported', function() {
beforeEach(function() {
engine = defaultEngine;
engine.clear();
});
it('should function', function() {
engine.setItem('test-key', 'abc');
assert.strictEqual(engine.getItem('test-key'), 'abc');
assert.strictEqual(engine.length, 1);
assert.strictEqual(engine.key(0), 'test-key');
engine.removeItem('test-key');
assert.strictEqual(engine.getItem('test-key'), null);
assert.strictEqual(engine.length, 0);
engine.setItem('test-key', 'abc');
engine.clear();
assert.strictEqual(engine.length, 0);
});
});
describe('when not supported', function() {
beforeEach(function() {
engine = inMemoryEngine;
engine.clear();
});
it('should function', function() {
engine.setItem('test-key', 'abc');
assert.strictEqual(engine.getItem('test-key'), 'abc');
assert.strictEqual(engine.length, 1);
assert.strictEqual(engine.key(0), 'test-key');
engine.removeItem('test-key');
assert.strictEqual(engine.getItem('test-key'), null);
assert.strictEqual(engine.length, 0);
engine.setItem('test-key', 'abc');
engine.clear();
assert.strictEqual(engine.length, 0);
});
});
});
|
#!/bin/bash
set -e
NAME="Mutserve"
VERSION="v2.0.0-rc12"
GITHUB_USER="seppinho"
GITHUB_REPO="mutserve"
EXECUTABLE="mutserve"
ZIP="mutserve.zip"
INSTALLER_URL=https://github.com/${GITHUB_USER}/${GITHUB_REPO}/releases/download/${VERSION}/${ZIP}
echo "Installing ${NAME} ${VERSION}..."
echo "Downloading ${NAME} from ${INSTALLER_URL}..."
curl -fL ${INSTALLER_URL} -o ${ZIP}
# execute installer
unzip ./${ZIP}
# change mod for executables
chmod +x ./${EXECUTABLE}
# remove installer
rm ./${ZIP}
echo ""
GREEN='\033[0;32m'
NC='\033[0m'
echo -e "${GREEN}${NAME} ${VERSION} installation completed. Have fun!${NC}"
echo ""
|
SELECT COUNT(*) AS num_orders,
EXTRACT(MONTH FROM orders.order_date) AS month
FROM orders
GROUP BY month
ORDER BY month |
import random
class Player:
def __init__(self, name):
self.name = name
self.score = 0
def roll_die(self):
return random.randint(1, 6)
def update_score(self, points):
self.score += points
class Game:
def __init__(self, player1_name, player2_name, rounds):
self.player1 = Player(player1_name)
self.player2 = Player(player2_name)
self.rounds = rounds
def play_round(self):
player1_roll = self.player1.roll_die()
player2_roll = self.player2.roll_die()
self.player1.update_score(player1_roll)
self.player2.update_score(player2_roll)
def determine_winner(self):
for _ in range(self.rounds):
self.play_round()
if self.player1.score > self.player2.score:
return f"{self.player1.name} wins with a score of {self.player1.score}"
elif self.player2.score > self.player1.score:
return f"{self.player2.name} wins with a score of {self.player2.score}"
else:
return "It's a tie!"
# Example usage
game = Game("Alice", "Bob", 5)
winner = game.determine_winner()
print(winner) |
<filename>src/main/java/com/searchbox/framework/model/SearchboxEntity.java
/*******************************************************************************
* Copyright Searchbox - http://www.searchbox.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.searchbox.framework.model;
import java.util.SortedSet;
import java.util.TreeSet;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.OneToMany;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.hibernate.annotations.LazyCollection;
import org.hibernate.annotations.LazyCollectionOption;
import org.hibernate.annotations.SortNatural;
@Entity
public class SearchboxEntity extends BaseEntity<Long>
implements Comparable<SearchboxEntity>{
enum Privacy {
PUBLIC("public"),
PRIVATE("private"),
SOCIAL("registration");
private final String stringValue;
private Privacy(final String s) { stringValue = s; }
public String toString() { return stringValue; }
}
@Column(unique = true)
private String slug;
private Privacy privacy= Privacy.PUBLIC;
private String name;
private String logo = "assets/images/searchbox-logo-big.png";
private String alias;
private String description;
@OneToMany(mappedBy = "searchbox",
orphanRemoval = true,
cascade = CascadeType.ALL,
fetch=FetchType.LAZY)
@LazyCollection(LazyCollectionOption.TRUE)
@SortNatural
private SortedSet<PresetEntity> presets;
// @OneToMany(mappedBy = "searchbox", cascade = CascadeType.ALL)
// @LazyCollection(LazyCollectionOption.FALSE)
// @MapKey(name = "user")
// private Map<UserEntity, UserRole> userRoles;
public SearchboxEntity() {
this.presets = new TreeSet<>();
// this.userRoles = new HashMap<>();
}
public Privacy getPrivacy() {
return privacy;
}
public void setPrivacy(Privacy privacy) {
this.privacy = privacy;
}
public String getLogo() {
return logo;
}
public SearchboxEntity setLogo(String logo) {
this.logo = logo;
return this;
}
public String getSlug() {
return slug;
}
public SearchboxEntity setSlug(String slug) {
this.slug = slug;
return this;
}
public String getName() {
return name;
}
public SearchboxEntity setName(String name) {
this.name = name;
return this;
}
public String getAlias() {
return alias;
}
public SearchboxEntity setAlias(String alias) {
this.alias = alias;
return this;
}
public String getDescription() {
return description;
}
public SearchboxEntity setDescription(String description) {
this.description = description;
return this;
}
public SortedSet<PresetEntity> getPresets() {
return presets;
}
public void setPresets(SortedSet<PresetEntity> presets) {
this.presets = presets;
}
// public Map<UserEntity, UserRole> getUserRoles() {
// return userRoles;
// }
//
// public void setUserRoles(Map<UserEntity, UserRole> userRoles) {
// this.userRoles = userRoles;
// }
//
//
// public SearchboxEntity addUserRole(UserRole userRole) {
// userRole.setSearchbox(this);
// this.userRoles.put(userRole.getUser(), userRole);
// return this;
// }
@Override
public int compareTo(SearchboxEntity o) {
return this.getName().compareTo(o.getName());
}
public PresetEntity newPreset() {
return new PresetEntity()
.setSearchbox(this)
.setPosition(this.getPresets().size()+1);
}
@Override
public String toString() {
return "SearchboxEntity [slug=" + slug + ", name=" + name + "]";
}
}
|
#!/bin/bash
sampleName="SRR3083850"
picard="/fh/fast/sun_w/bin/picard.jar"
gatk="/fh/fast/sun_w/bin/GenomeAnalysisTK-3.6/GenomeAnalysisTK.jar"
gatkBundle="/fh/fast/sun_w/research/data/GATK_bundle/hg38"
projDir="/fh/fast/sun_w/research/Immuno/data/Hugo_2016"
javaTmpDir="/fh/fast/sun_w/tmp4java/"
reference="/fh/fast/sun_w/research/data/human/hg38/Homo_sapiens_assembly38.fasta"
ml java
# Add read groups, sort by coordinate, save as a bam file and index the bam file
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} AddOrReplaceReadGroups \
INPUT=${projDir}/sams/${sampleName}.sam \
OUTPUT=${projDir}/bams/${sampleName}_sorted_rg.bam \
SORT_ORDER=coordinate \
CREATE_INDEX=true \
ID=${sampleName}.LANE001 SM=${sampleName} LB=${sampleName} \
PL=ILLUMINA PU=${sampleName} &&
#
# Mark duplicated reads
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} MarkDuplicates \
I=${projDir}/bams/${sampleName}_sorted_rg.bam \
O=${projDir}/bams/${sampleName}_sorted_dedup.bam \
M=${projDir}/bams/${sampleName}_sorted_dedup_metric.txt \
ASSUME_SORT_ORDER=coordinate \
CREATE_INDEX=true &&
#
# RealignerTargetCreator
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T RealignerTargetCreator \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup.bam \
-o ${projDir}/bams/${sampleName}_sorted_dedup.bam.intervals \
-known ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-known ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz &&
#
# Realign
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T IndelRealigner \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup.bam \
-targetIntervals ${projDir}/bams/${sampleName}_sorted_dedup.bam.intervals \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-known ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-known ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz \
--filter_bases_not_stored &&
#
# Build index
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} BuildBamIndex \
INPUT=${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam &&
#
# Base quality recalibration: create table
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T BaseRecalibrator \
-R ${reference} \
-knownSites ${gatkBundle}/dbsnp_146.hg38.vcf.gz \
-knownSites ${gatkBundle}/Homo_sapiens_assembly38.known_indels.vcf.gz \
-knownSites ${gatkBundle}/Mills_and_1000G_gold_standard.indels.hg38.vcf.gz \
-I ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recal_table.txt &&
#
# Base quality recalibration
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${gatk} \
-T PrintReads \
-R ${reference} \
-I ${projDir}/bams/${sampleName}_sorted_dedup_realigned.bam \
-BQSR ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recal_table.txt \
-o ${projDir}/bams/${sampleName}_sorted_dedup_realigned_recaled.bam &&
#
# Build index
java -Xmx7g -Djava.io.tmpdir=${javaTmpDir} \
-jar ${picard} BuildBamIndex \
INPUT=${projDir}/bams/${sampleName}_sorted_dedup_realigned_recaled.bam
|
package fwcd.fructose.genetic.operators;
import java.util.Arrays;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
public class GaussianFloatMutator implements Mutator<float[]> {
private static final long serialVersionUID = 23465873645873645L;
private final float upperBound;
private final float lowerBound;
private final float multiplier;
private final float bias;
public GaussianFloatMutator() {
upperBound = Float.POSITIVE_INFINITY;
lowerBound = Float.NEGATIVE_INFINITY;
multiplier = 5;
bias = 0;
}
public GaussianFloatMutator(float lowerBound, float upperBound, float multiplier, float bias) {
this.upperBound = upperBound;
this.lowerBound = lowerBound;
this.multiplier = multiplier;
this.bias = bias;
}
private float probability(float[] genes) {
return 1F / genes.length;
}
@Override
public float[] mutate(float[] genes) {
float[] result = Arrays.copyOf(genes, genes.length);
mutateInPlace(result);
return result;
}
@Override
public void mutateInPlace(float[] genes) {
Random random = ThreadLocalRandom.current();
float probability = probability(genes);
for (int i=0; i<genes.length; i++) {
if (random.nextFloat() < probability) {
genes[i] = (genes[i] * (float) random.nextGaussian() * multiplier) + bias;
if (genes[i] < lowerBound) {
genes[i] = lowerBound;
} else if (genes[i] > upperBound) {
genes[i] = upperBound;
}
}
}
}
}
|
// VectorMath.java
public class VectorMath {
public static double[] addVectors(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions must be the same for addition");
}
double[] result = new double[vector1.length];
for (int i = 0; i < vector1.length; i++) {
result[i] = vector1[i] + vector2[i];
}
return result;
}
public static double[] subtractVectors(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions must be the same for subtraction");
}
double[] result = new double[vector1.length];
for (int i = 0; i < vector1.length; i++) {
result[i] = vector1[i] - vector2[i];
}
return result;
}
public static double dotProduct(double[] vector1, double[] vector2) {
if (vector1.length != vector2.length) {
throw new IllegalArgumentException("Vector dimensions must be the same for dot product");
}
double result = 0;
for (int i = 0; i < vector1.length; i++) {
result += vector1[i] * vector2[i];
}
return result;
}
public static double[] crossProduct(double[] vector1, double[] vector2) {
if (vector1.length != 3 || vector2.length != 3) {
throw new IllegalArgumentException("Cross product is only defined for 3D vectors");
}
double[] result = new double[3];
result[0] = vector1[1] * vector2[2] - vector1[2] * vector2[1];
result[1] = vector1[2] * vector2[0] - vector1[0] * vector2[2];
result[2] = vector1[0] * vector2[1] - vector1[1] * vector2[0];
return result;
}
public static double[] scalarMultiply(double[] vector, double scalar) {
double[] result = new double[vector.length];
for (int i = 0; i < vector.length; i++) {
result[i] = vector[i] * scalar;
}
return result;
}
}
// Kinematics.java
public class Kinematics {
public static double displacement(double initialVelocity, double acceleration, double time) {
return initialVelocity * time + 0.5 * acceleration * time * time;
}
public static double finalVelocity(double initialVelocity, double acceleration, double time) {
return initialVelocity + acceleration * time;
}
public static double averageVelocity(double initialVelocity, double finalVelocity) {
return (initialVelocity + finalVelocity) / 2;
}
}
// Dynamics.java
public class Dynamics {
public static double force(double mass, double acceleration) {
return mass * acceleration;
}
public static double mass(double force, double acceleration) {
if (acceleration == 0) {
throw new IllegalArgumentException("Acceleration cannot be zero for mass calculation");
}
return force / acceleration;
}
public static double acceleration(double force, double mass) {
if (mass == 0) {
throw new IllegalArgumentException("Mass cannot be zero for acceleration calculation");
}
return force / mass;
}
} |
#!/bin/bash
# Copyright (c) [2012]-[2021] Shanghai Yitu Technology Co., Ltd.
#
# This source code is licensed under the Clear BSD License
# LICENSE file in the root directory of this file
# All rights reserved.
NUM_PROC=$1
shift
python -m torch.distributed.launch --nproc_per_node=$NUM_PROC main.py "$@"
|
#!/bin/bash
Usage() {
echo "vpnip: [--time] [--src_ip] <user> [-help]"
echo " Returns the IP address in which the <user> (default is $USER) is currently connected via VPN"
echo " -t|--time : Returns the time of the last connection"
echo " -s|--src_ip: Returns the Source IP address of the connection"
echo ""
echo " -h|--help : Displays this message"
}
TIME="NO"
SRC_IP="NO"
USER=`whoami`
USER_SET="NO"
while [[ $# -gt 0 ]]; do
switch="$1"
#echo "Switch=$switch"
case $switch in
-t*|--time)
TIME="YES"
shift
;;
-s*|--src_ip)
SRC_IP="YES"
shift
;;
-h*|--help)
Usage
exit 0
;;
*)
if [[ "$USER_SET" == "YES" ]]; then
echo "User already set to [$USER]."
synologset1 sys err 0x90020001 $USER
exit -1
fi
USER="$1"
USER_SET="YES"
shift
;;
esac
done
synologset1 sys info 0x90020002 $USER $USER_SET $TIME $SRC_IP
sleep 1s
#note time in table is based on the linux epoch of 12/31/1969 at 1600 Pacific Time
#this represents 8 hours before 1/1/1970 at midnight
#echo "Finding VPN Connection infor for user [$USER] Explicit User? [$USER_SET] Report Time? [$TIME] Report Source IP? [$SRC_IP]"
LAST_CONNECT=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select id from synovpn_log_tb where user='$USER' AND event like 'Connected%' order by id desc limit 1;"`
NUM_CONNECT=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select count(*) from synovpn_log_tb where user='$USER' AND event like 'Connected%';"`
LAST_DISCONNECT=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select id from synovpn_log_tb where user='$USER' AND event like 'Disconnected%' order by id desc limit 1;"`
NUM_DISCONNECT=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select count(*) from synovpn_log_tb where user='$USER' AND event like 'Disconnected%';"`
#| sed s/.*as// | sed s/.$// | tr -d ' []'`
#echo "CONNECT=$LAST_CONNECT"
#echo "NUM_CONNECT=$NUM_CONNECT"
#echo "DISCONNECT=$LAST_DISCONNECT"
#echo "NUM_DISCONNECT=$NUM_DISCONNECT"
if [[ $NUM_DISCONNECT > $NUM_CONNECT && $LAST_DISCONNECT > $LAST_CONNECT ]]; then
synologset1 sys info 0x90020003 $USER
echo "Disconnected"
else
VPN_IP=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select event from synovpn_log_tb where ID='$LAST_CONNECT';" | sed s/.*as// | sed s/.$// | tr -d ' []'`
synologset1 sys info 0x90020004 $USER $VPN_IP
if [[ "$SRC_IP" == "YES" ]]; then
VPN_SRC=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select event from synovpn_log_tb where ID='$LAST_CONNECT';" | sed s/.*from// | sed s/as.*// | tr -d ' []'`
synologset1 sys info 0x90020005 $USER $VPN_SRC
VPN_SRC=,${VPN_SRC}
fi
if [[ "$TIME" == "YES" ]]; then
VPN_TIME=`sqlite3 /usr/syno/etc/packages/VPNCenter/synovpnlog.db "select time from synovpn_log_tb where ID='$LAST_CONNECT';"`
synologset1 sys info 0x90020006 $USER $VPN_TIME
VPN_TIME=,${VPN_TIME}
fi
echo ${VPN_IP}${VPN_SRC}${VPN_TIME}
fi
exit 0
|
<reponame>digirati-co-uk/taxonomy-manager
package com.digirati.taxman.rest.server.infrastructure.config;
import com.digirati.taxman.common.rdf.RdfModelFactory;
import com.digirati.taxman.rest.server.taxonomy.identity.ConceptIdResolver;
import com.digirati.taxman.rest.server.taxonomy.identity.ConceptSchemeIdResolver;
import com.digirati.taxman.rest.server.taxonomy.identity.CollectionUriResolver;
import com.digirati.taxman.rest.server.taxonomy.identity.ProjectIdResolver;
import com.digirati.taxman.rest.server.taxonomy.mapper.ProjectListingMapper;
import com.digirati.taxman.rest.server.taxonomy.mapper.ProjectMapper;
import com.digirati.taxman.rest.server.taxonomy.mapper.SearchResultsMapper;
import com.digirati.taxman.rest.server.taxonomy.mapper.ConceptMapper;
import com.digirati.taxman.rest.server.taxonomy.mapper.ConceptSchemeMapper;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Produces;
import javax.inject.Inject;
@ApplicationScoped
public class TaxonomyConfig {
@Inject
RdfModelFactory modelFactory;
@Inject
ConceptIdResolver conceptIdResolver;
@Inject
CollectionUriResolver collectionUriResolver;
@Inject
ConceptSchemeIdResolver conceptSchemeIdResolver;
@Inject
ProjectIdResolver projectIdResolver;
@Produces
ConceptMapper conceptMapper() {
return new ConceptMapper(conceptIdResolver, modelFactory);
}
@Produces
SearchResultsMapper searchResultsMapper() {
return new SearchResultsMapper(conceptIdResolver, collectionUriResolver, modelFactory);
}
@Produces
ConceptSchemeMapper conceptSchemeMapper() {
return new ConceptSchemeMapper(conceptSchemeIdResolver, conceptIdResolver, modelFactory, conceptMapper());
}
@Produces
ProjectMapper projectMapper() {
return new ProjectMapper(projectIdResolver, conceptSchemeIdResolver, modelFactory);
}
@Produces
ProjectListingMapper projectListingMapper() {
return new ProjectListingMapper(projectIdResolver, collectionUriResolver, modelFactory);
}
}
|
<gh_stars>0
'use strict';
const assert = require('assert');
const NamespaceMixin = require('../lib/namespace')(class {});
const fields = [
'complete',
'scope',
'unscoped',
'packageNamespace',
'id',
'instanceId',
'methods',
'generatorHint',
'flags',
'optional',
'instanceId',
'semver',
'versionedHint'
];
const equalsNamespace = function (namespace, expected) {
fields.forEach(field => assert.deepStrictEqual(
namespace[field], expected[field],
`Field ${field} differs: ${namespace[field]} === ${expected[field]}`
));
return true;
};
describe('Namespace', () => {
const namespace = new NamespaceMixin();
describe('#isNamespace()', () => {
it('returns true if a YeomanNamespace is passed', () => {
assert(namespace.isNamespace(namespace.requireNamespace('foo-bar')));
});
});
describe('#requireNamespace()', () => {
it('returns namespace', () => {
const parsed = namespace.requireNamespace('foo-bar');
assert(equalsNamespace(parsed, {
complete: 'foo-bar',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar'
}));
});
it('returns namespace with scope', () => {
const parsed = namespace.requireNamespace('@scope/foo-bar');
assert(equalsNamespace(parsed, {
complete: '@scope/foo-bar',
scope: '@scope',
unscoped: 'foo-bar',
generatorHint: '@scope/generator-foo-bar',
versionedHint: '@scope/generator-foo-bar',
namespace: '@scope/foo-bar',
id: '@scope/foo-bar',
packageNamespace: '@scope/foo-bar'
}));
});
it('returns namespace with scope and generator', () => {
const parsed = namespace.requireNamespace('@scope/foo-bar:app');
assert(equalsNamespace(parsed, {
complete: '@scope/foo-bar:app',
scope: '@scope',
unscoped: 'foo-bar',
generatorHint: '@scope/generator-foo-bar',
versionedHint: '@scope/generator-foo-bar',
namespace: '@scope/foo-bar:app',
id: '@scope/foo-bar:app',
packageNamespace: '@scope/foo-bar',
generator: 'app'
}));
});
it('returns namespace with generator', () => {
const parsed = namespace.requireNamespace('foo-bar:app');
assert(equalsNamespace(parsed, {
complete: 'foo-bar:app',
unscoped: 'foo-bar',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar:app',
id: 'foo-bar:app',
packageNamespace: 'foo-bar',
generator: 'app'
}));
});
it('returns namespace with id', () => {
const parsed = namespace.requireNamespace('foo-bar#1');
assert(equalsNamespace(parsed, {
complete: 'foo-bar#1',
unscoped: 'foo-bar',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
id: 'foo-bar#1',
instanceId: '1',
packageNamespace: 'foo-bar'
}));
});
it('returns namespace with generator and id', () => {
const parsed = namespace.requireNamespace('foo-bar:app#1');
assert(equalsNamespace(parsed, {
complete: 'foo-bar:app#1',
unscoped: 'foo-bar',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar:app',
id: 'foo-bar:app#1',
instanceId: '1',
packageNamespace: 'foo-bar',
generator: 'app'
}));
});
it('returns namespace with scope, generator, id and optional', () => {
const parsed = namespace.requireNamespace('@scope/foo-bar:app#1?');
assert(equalsNamespace(parsed, {
complete: '@scope/foo-bar:app#1?',
scope: '@scope',
unscoped: 'foo-bar',
generatorHint: '@scope/generator-foo-bar',
versionedHint: '@scope/generator-foo-bar',
namespace: '@scope/foo-bar:app',
id: '@scope/foo-bar:app#1',
instanceId: '1',
packageNamespace: '@scope/foo-bar',
generator: 'app',
flags: '?',
optional: true
}));
});
it('throws exception with namespace with scope, generator, id and invalid flags', () => {
assert.throws(() => namespace.requireNamespace('@scope/foo-bar:app#1!$'));
});
it('returns namespace with scope, multiples generator and id', () => {
const parsed = namespace.requireNamespace('@scope/foo-bar:app:client#1');
assert(equalsNamespace(parsed, {
complete: '@scope/foo-bar:app:client#1',
scope: '@scope',
unscoped: 'foo-bar',
generatorHint: '@scope/generator-foo-bar',
versionedHint: '@scope/generator-foo-bar',
namespace: '@scope/foo-bar:app:client',
id: '@scope/foo-bar:app:client#1',
instanceId: '1',
packageNamespace: '@scope/foo-bar',
generator: 'app:client'
}));
});
it('returns with semver', () => {
const complete = 'foo-bar@1.0.0-beta+exp.sha.5114f85';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@"1.0.0-beta+exp.sha.5114f85"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '1.0.0-beta+exp.sha.5114f85'
}));
});
it('returns with semver +', () => {
const complete = 'foo-bar@1.0.0-beta+exp.sha.5114f85';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@"1.0.0-beta+exp.sha.5114f85"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '1.0.0-beta+exp.sha.5114f85'
}));
});
it('returns with semver ^', () => {
const complete = 'foo-bar@^1.0.4';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@"^1.0.4"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '^1.0.4'
}));
});
it('returns with semver *', () => {
const complete = 'foo-bar@*';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@"*"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '*'
}));
});
it('semver space', () => {
const complete = 'foo-bar@1.0.0 - 1.2.0';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@"1.0.0 - 1.2.0"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '1.0.0 - 1.2.0'
}));
});
it('returns with semver <=>', () => {
const complete = 'foo-bar@>=1.2.3 <2.0.0';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@">=1.2.3 <2.0.0"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
semver: '>=1.2.3 <2.0.0'
}));
});
it('returns with semver and instanceId', () => {
const complete = 'foo-bar@>=1.2.3 <2.0.0@#1';
const parsed = namespace.requireNamespace(complete);
assert(equalsNamespace(parsed, {
complete,
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar@">=1.2.3 <2.0.0"',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar#1',
instanceId: '1',
packageNamespace: 'foo-bar',
semver: '>=1.2.3 <2.0.0'
}));
});
it('returns method update', () => {
const parsed = namespace.requireNamespace('foo-bar+update');
assert(equalsNamespace(parsed, {
complete: 'foo-bar+update',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
methods: ['update']
}));
});
it('returns method update and done', () => {
const parsed = namespace.requireNamespace('foo-bar+update+done');
assert(equalsNamespace(parsed, {
complete: 'foo-bar+update+done',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
methods: ['update', 'done']
}));
});
it('accepts upper case methods', () => {
const parsed = namespace.requireNamespace('foo-bar+UPDATE+done');
assert(equalsNamespace(parsed, {
complete: 'foo-bar+UPDATE+done',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar',
packageNamespace: 'foo-bar',
methods: ['UPDATE', 'done']
}));
});
it('returns instanceId with methods update and done', () => {
const parsed = namespace.requireNamespace('foo-bar#foo+update+done');
assert(equalsNamespace(parsed, {
complete: 'foo-bar#foo+update+done',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar#foo',
instanceId: 'foo',
packageNamespace: 'foo-bar',
methods: ['update', 'done']
}));
});
it('returns instanceId *', () => {
const parsed = namespace.requireNamespace('foo-bar#*');
assert(equalsNamespace(parsed, {
complete: 'foo-bar#*',
generatorHint: 'generator-foo-bar',
versionedHint: 'generator-foo-bar',
namespace: 'foo-bar',
unscoped: 'foo-bar',
id: 'foo-bar#*',
instanceId: '*',
packageNamespace: 'foo-bar'
}));
});
});
});
|
<html>
<body>
<form>
<input type="text" id="name" placeholder="Name">
<input type="text"id="age" placeholder="Age">
<button type="button" onclick="showAlert()">Submit</button>
</form>
<script>
function showAlert() {
const name = document.getElementById('name').value;
const age = document.getElementById('age').value;
alert(`Hello ${name} you are ${age} years old!`);
}
</script>
</body>
</html> |
#!/bin/bash
set -e -o pipefail
readonly PACKAGE_NAME="abduct"
function each_iname {
local iname=${1}; shift
find * -type f -iname "${iname}" | while read filename; do
"$@" "${filename}"
done
}
function static_analysis {
each_iname "*.rst" rst2html.py --exit-status=2 > /dev/null
python setup.py check --strict --restructuredtext --metadata
flake8 setup.py "${PACKAGE_NAME}"
pyflakes setup.py "${PACKAGE_NAME}"
pylint --rcfile=.pylintrc "${PACKAGE_NAME}"
}
function unit_test {
nosetests \
--with-doctest \
--doctest-options="+NORMALIZE_WHITESPACE" \
--with-coverage \
--cover-tests \
--cover-inclusive \
--cover-package="${PACKAGE_NAME}" \
"${PACKAGE_NAME}"
}
function main {
if [ "${1}" == "--static-analysis" ]; then
static_analysis
fi
unit_test
}
if [ "${BASH_SOURCE[0]}" == "${0}" ]; then
main "$@"
fi
|
<reponame>jwplayer/jw-showcase-lib
/**
* Copyright 2017 Longtail Ad Solutions Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language
* governing permissions and limitations under the License.
**/
(function () {
angular
.module('jwShowcase.core')
.run(registerListener);
registerListener.$inject = ['$rootScope', '$q', '$location', 'appStore'];
function registerListener ($rootScope, $q, $location, appStore) {
$rootScope.$on('$stateChangeSuccess', function (event, toState) {
var path = $location.$$path;
$q.resolve().then(function () {
var scrollTop = 0;
if (toState.scrollTop === 'last' && angular.isNumber(appStore.scrollTopCache[path])) {
scrollTop = appStore.scrollTopCache[path];
}
else if (angular.isNumber(toState.scrollTop)) {
scrollTop = toState.scrollTop;
}
document.body.scrollTop = scrollTop;
});
});
}
}());
|
#!/bin/sh
set -ax
# json data file
json_file="/tmp/prospector.json"
# get prospector version
prospector --version
# run prospector, save exit code
# shellcheck disable=2086
prospector $1 -o json:"$json_file" -o text
exit_code=$?
# analyze json
python /github.py "$json_file"
exit $exit_code
|
#!/bin/bash
## VARIABLES
SERIAL_NUMBER="XXXXX-XXXXX-XXXXX-XXXXX-XXXXX"
COMPANY_NAME="Travelex"
FUSION="/Applications/VMware-Fusion.app"
## DO NOT EDIT THIS SECTION
PRODUCT_VERSION="7.1"
PRODUCT_NAME="VMware Fusion for Mac OS"
sudo chmod -R 777 "/Library/Preferences/VMware Fusion"
if [ ! -f $FUSION ]
then "$FUSION/Contents/Library/licenses/vmware-licenseTool" enter $SERIAL_NUMBER "" "$COMPANY_NAME" $PRODUCT_VERSION "$PRODUCT_NAME" ""
fi
exit |
package public
import "embed"
//go:embed *
var Public embed.FS
////go:embed index.html
//var Index embed.FS
//
////go:embed assets/**
//var Assets embed.FS
|
//
// Document.h
// Table Tool
//
// Created by <NAME> on 06.07.15.
// Copyright (c) 2015 Egger Apps. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "CSVConfiguration.h"
#import "TTFormatViewController.h"
@interface Document : NSDocument <NSTableViewDataSource, NSTableViewDelegate, TTFormatViewControllerDelegate>
@property NSMutableArray *data;
@property long maxColumnNumber;
@property int searchStartIndex;
@property CSVConfiguration *csvConfig;
@property IBOutlet NSTableView *tableView;
@property IBOutlet NSSplitView *splitView;
@property (strong) IBOutlet NSButton *toolBarButtonDeleteColumn;
@property (strong) IBOutlet NSSegmentedControl *toolBarButtonsAddColumn;
@property (strong) IBOutlet NSSegmentedControl *toolBarButtonsAddRow;
@property (strong) IBOutlet NSToolbarItem *toolbarItemAddColumn;
@property (strong) IBOutlet NSToolbarItem *toolbarItemAddRow;
@property (strong) IBOutlet NSButton *toolBarButtonDeleteRow;
@property (strong) IBOutlet NSButton *toolBarButtonDuplicateRow;
@property (strong) IBOutlet NSToolbarItem *toolbarItemDeleteColumn;
@property (strong) IBOutlet NSToolbarItem *toolbarItemDeleteRow;
-(IBAction)addColumn:(id)sender;
-(IBAction)addRow:(id)sender;
-(void)addRowAbove:(id)sender;
-(void)addRowBelow:(id)sender;
-(void)addColumnLeft:(id)sender;
-(void)addColumnRight:(id)sender;
-(IBAction)deleteRow:(id)sender;
-(IBAction)deleteColumn:(id)sender;
-(IBAction)exportFile:(id)sender;
-(IBAction)searchWith:(NSSearchField *)sender;
-(void)configurationChangedForFormatViewController:(TTFormatViewController *)formatViewController;
@end
|
"""Leetcode 104. Maximum Depth of Binary Tree
Easy
URL: https://leetcode.com/problems/maximum-depth-of-binary-tree/
Given a binary tree, find its maximum depth.
The maximum depth is the number of nodes along the longest path from the
root node down to the farthest leaf node.
Note: A leaf is a node with no children.
Example:
Given binary tree [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
return its depth = 3.
"""
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, val):
self.val = val
self.left = None
self.right = None
class SolutionDFSRecur(object):
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for single sided tree.
"""
if not root:
return 0
left = self.maxDepth(root.left)
right = self.maxDepth(root.right)
return 1 + max(left, right)
class SolutionLevelBFSIter(object):
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
Time complexity: O(n).
Space complexity: O(logn) for balanced tree; O(n) for single sided tree.
"""
from collections import deque
if not root:
return 0
# Use queue for BFS.
queue = deque([root])
depth = 0
while queue:
# Increment depth for each level.
depth += 1
# Vist all nodes in the same level.
for i in range(len(queue)):
current = queue.pop()
if current.left:
queue.appendleft(current.left)
if current.right:
queue.appendleft(current.right)
return depth
def main():
# Binary tree: [3,9,20,null,null,15,7], ans: 3.
# 3
# / \
# 9 20
# / / \
# 6 15 7
# \
# 10
root = TreeNode(3)
root.left = TreeNode(9)
root.right = TreeNode(20)
root.left.left = TreeNode(6)
root.right.left = TreeNode(15)
root.right.right = TreeNode(7)
root.right.left.left = TreeNode(15)
print SolutionDFSRecur().maxDepth(root)
print SolutionLevelBFSIter().maxDepth(root)
if __name__ == '__main__':
main()
|
#!/bin/sh
# Copyright 2017 Marc-Antoine Ruel. All Rights Reserved. Use of this
# source code is governed by a BSD-style license that can be found in the
# LICENSE file.
# Run as:
# curl -sSL https://raw.githubusercontent.com/maruel/bin_pub/master/setup_scripts/install_authorized_keys.sh
# | bash
# curl -sSL https://goo.gl/cWtNmx | bash
#
# Setup maruel's public key.
set -eu
# TODO(maruel): Disallow running as root.
if [ ! -d $HOME/.ssh ]; then
mkdir -p $HOME/.ssh
fi
echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPuXx13VbQQGbYEPAw5UIAbKmoMs45/HD/bsjXDR6WtQ Agent Jean' >> $HOME/.ssh/authorized_keys
|
import { ADD_FLASH_MESSAGE, DELETE_FLASH_MESSAGE } from 'actions'
export default (state = [], action) => {
switch (action.type) {
case ADD_FLASH_MESSAGE:
return [...state, action.message]
case DELETE_FLASH_MESSAGE:
return state.filter((message) => message.id !== action.id)
default:
return state
}
}
|
<gh_stars>0
package org.ednovo.gooru.core.exception;
import org.springframework.security.authentication.AccountStatusException;
public class UserNotConfirmedException extends AccountStatusException {
/**
*
*/
private static final long serialVersionUID = 1L;
public UserNotConfirmedException(String msg) {
super(msg);
}
}
|
package logrus
import (
"os"
"testing"
"github.com/sirupsen/logrus"
"github.com/micro/go-micro/v2/logger"
)
func TestName(t *testing.T) {
l := NewLogger()
if l.String() != "logrus" {
t.Errorf("error: name expected 'logrus' actual: %s", l.String())
}
t.Logf("testing logger name: %s", l.String())
}
func TestWithFields(t *testing.T) {
logger.DefaultLogger = NewLogger(logger.WithOutput(os.Stdout))
logger.Log(logger.InfoLevel, "testing: Info")
logger.Logf(logger.InfoLevel, "testing: %s", "Infof")
}
func TestJSON(t *testing.T) {
logger.DefaultLogger = NewLogger(WithJSONFormatter(&logrus.JSONFormatter{}))
logger.Logf(logger.InfoLevel, "test logf: %s", "name")
}
func TestSetLevel(t *testing.T) {
logger.DefaultLogger = NewLogger()
logger.Init(logger.WithLevel(logger.DebugLevel))
logger.Logf(logger.DebugLevel, "test show debug: %s", "debug msg")
logger.Init(logger.WithLevel(logger.InfoLevel))
logger.Logf(logger.DebugLevel, "test non-show debug: %s", "debug msg")
}
func TestWithReportCaller(t *testing.T) {
logger.DefaultLogger = NewLogger(ReportCaller())
logger.Logf(logger.InfoLevel, "testing: %s", "WithReportCaller")
}
|
<reponame>AlexandreMPDias/win-scripts<gh_stars>0
const deprecated = [
/android$/,
'envs',
'home',
'restart',
'toUpperCase',
'begin',
'expoRun',
'hznt',
'rollback',
'west',
'cmake_help',
'gclone',
'jz',
'rum',
'yumm',
'cmdmp3win',
/^git/,
'jzon',
'rumAdm',
'deathnote',
'lars',
'sample',
'easy',
'migrate',
'strlen',
];
module.exports = {
deprecated
} |
<filename>bg_atlasapi/config.py<gh_stars>10-100
import configparser
from pathlib import Path
from pkg_resources import resource_filename
import click
CONFIG_FILENAME = "bg_config.conf"
CONFIG_PATH = Path(resource_filename("bg_atlasapi", CONFIG_FILENAME))
# 2 level dictionary for sections and values:
DEFAULT_PATH = Path.home() / ".brainglobe"
TEMPLATE_CONF_DICT = {
"default_dirs": {
"brainglobe_dir": DEFAULT_PATH,
"interm_download_dir": DEFAULT_PATH,
}
}
def write_default_config(path=CONFIG_PATH, template=TEMPLATE_CONF_DICT):
"""Write configuration file at first repo usage. In this way,
we don't need to keep a confusing template config file in the repo.
Parameters
----------
path : Path object
Path of the config file (optional).
template : dict
Template of the config file to be written (optional).
"""
conf = configparser.ConfigParser()
for k, val in template.items():
conf[k] = val
with open(path, "w") as f:
conf.write(f)
def read_config(path=CONFIG_PATH):
"""Read BrainGlobe config.
Parameters
----------
path : Path object
Path of the config file (optional).
Returns
-------
ConfigParser object
brainglobe configuration
"""
# If no config file exists yet, write the default one:
if not path.exists():
write_default_config()
conf = configparser.ConfigParser()
conf.read(path)
return conf
def write_config_value(key, val, path=CONFIG_PATH):
"""Write a new value in the config file. To make things simple, ignore
sections and look directly for matching parameters names.
Parameters
----------
key : str
Name of the parameter to configure.
val :
New value.
path : Path object
Path of the config file (optional).
"""
conf = configparser.ConfigParser()
conf.read(path)
for sect_name, sect_dict in conf.items():
if key in sect_dict.keys():
conf[sect_name][key] = str(val)
with open(CONFIG_PATH, "w") as f:
conf.write(f)
def get_brainglobe_dir():
"""Return brainglobe default directory.
Returns
-------
Path object
default BrainGlobe directory with atlases
"""
conf = read_config()
return Path(conf["default_dirs"]["brainglobe_dir"])
def cli_modify_config(key=0, value=0, show=False):
# Ensure that we choose valid paths for default directory. The path does
# not have to exist yet, but the parent must be valid:
if not show:
if key[-3:] == "dir":
path = Path(value)
click.echo(path.parent.exists())
if not path.parent.exists():
click.echo(
f"{value} is not a valid path. Path must be a valid path string, and its parent must exist!"
)
return
write_config_value(key, value)
click.echo(_print_config())
def _print_config():
"""Print configuration."""
config = read_config()
string = ""
for sect_name, sect_content in config.items():
string += f"[{sect_name}]\n"
for k, val in sect_content.items():
string += f"\t{k}: {val}\n"
return string
|
# Publish client-side attributes update. Replace $THINGSBOARD_EDGE_HOST_NAME and $ACCESS_TOKEN with corresponding values.
cat new-attributes-values.json | mqtt pub -d -h "$THINGSBOARD_EDGE_HOST_NAME" -t "v1/devices/me/attributes" -u '$ACCESS_TOKEN' -s -m ""
# For example, $THINGSBOARD_EDGE_HOST_NAME reference localhost, $ACCESS_TOKEN is ABC123:
cat new-attributes-values.json | mqtt pub -d -h "localhost" -t "v1/devices/me/attributes" -u 'ABC123' -s -m "" |
<filename>backend/app/serializers/listing_serializer.rb
class ListingSerializer < ActiveModel::Serializer
attributes :id, :location, :summary, :price
has_many :days
belongs_to :user
end |
parallel --jobs 6 < ./results/exp_6t_5i_n35/run-3/sea_mem_5n_6t_6d_1000f_617m_5i/jobs/jobs_n2.txt
|
<filename>Labs/Lab03-jQuery/CountriesTable/initialize-table.js<gh_stars>0
function initializeTable() {
addCountry("Bulgaria", "Sofia");
addCountry("Germany", "Berlin");
addCountry("Russia", "Moscow");
$("#createLink").on("click", createCountry);
manageLinks();
function addCountry(country, capital) {
$("<tr>").appendTo($("#countriesTable"))
.append(`<td>${country}</td>`)
.append(`<td>${capital}</td>`)
.append($("<td>")
.append($(`<a href="#" id="createLink">[Up]</a>`)
.on("click", moveUp))
.append($(`<a href="#" id="createLink">[Down]</a>`)
.on("click", moveDown))
.append($(`<a href="#" id="createLink">[Delete]</a>`)
.on("click", deleteRow)));
manageLinks();
}
function createCountry() {
let country = $("#newCountryText");
let capital = $("#newCapitalText");
addCountry(country.val(), capital.val());
country.val("");
capital.val("");
manageLinks();
}
function moveUp() {
$(this).parent().parent().insertBefore($(this).parent().parent().prev());
manageLinks();
}
function moveDown() {
$(this).parent().parent().insertAfter($(this).parent().parent().next());
manageLinks();
}
function deleteRow() {
$(this).parent().parent().remove();
manageLinks();
}
function manageLinks() {
$("#countriesTable tr a").css("display", "inline");
$("#countriesTable tr:eq(2) a:contains('Up')").css("display", "none");
$("#countriesTable tr:last a:contains('Down')").css("display", "none");
}
}
|
#!/bin/sh
. build.sh
docker push -a biodranik/nginx-letsencrypt
|
def num_to_roman(num):
ints = [1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1]
romans = ["M", "CM", "D", "CD", "C", "XC", "L", "XL", "X", "IX", "V", "IV","I"]
result = ""
for i in range(len(ints)):
count = int(num / ints[i])
result += romans[i] * count
num -= ints[i] * count
return result |
<reponame>mlj888/atc
module.exports = {
table: 'contracts',
maxCached: 100,
tableFields: [
{ name: 'id', type: 'Number', primary_key: true },
{ name: 'tid', type: 'String', length: 64, not_null: true, unique: true },
{ name: 'name', type: 'String', length: 32, not_null: true, unique: true },
{ name: 'address', type: 'String', length: 50, unique: true },
{ name: 'ownerId', type: 'String', length: 50, not_null: true, index: true },
{ name: 'consumeOwnerEnergy', type: 'Number', not_null: true, default: 0 },
{ name: 'desc', type: 'String', length: 255 },
{ name: 'version', type: 'String', length: 32 },
{ name: 'vmVersion', type: 'String', length: 32 },
{ name: 'state', type: 'Number', default: 0 },
{ name: 'code', type: 'Text', not_null: true },
{ name: 'metadata', type: 'Json', not_null: true },
{ name: 'timestamp', type: 'Number', not_null: true }
]
}
|
<reponame>oueya1479/OpenOLAT
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.video;
import java.util.Date;
import org.olat.core.id.CreateInfo;
import org.olat.resource.OLATResource;
/**
* Represents the metadata of a transcoded video file
*
* Initial date: 19.01.2017<br>
* @author fkiefer, <EMAIL>, http://www.frentix.com
*
*/
public interface VideoMeta extends CreateInfo {
public static final String FORMAT_MP4 = "mp4";
/**
* @return key, the database identifier
*/
public Long getKey();
/**
* @return The video resource of the master video
*/
public OLATResource getVideoResource();
/**
* @return The URL of an external video
*/
public String getUrl();
public void setUrl(String url);
/**
* @return width of transcoded video in pixel
*/
public int getWidth();
/**
* @param width of video in pixel after transcoding
*/
public void setWidth(int width);
/**
* @return height of transcoded video in pixel
*/
public int getHeight();
/**
* @param height of video in pixel after transcoding
*/
public void setHeight(int height);
/**
* @return the video file size in bytes
*/
public long getSize();
/**
* @param size the file size (bytes) of the transcoded video
*/
public void setSize(long size);
/**
* @return the transcoding format, e.g. mp4
*/
public VideoFormat getVideoFormat();
/**
* @return format the transcoding format, e.g. mp4
*/
public void setVideoFormat(VideoFormat format);
/**
* Gets the length of the video as string.
*
* @return the length
*/
public String getLength();
/**
* Sets the length.
*
* @param length the new length
*/
public void setLength(String length);
/**
* Sets the creation date.
*
* @param creationdate
*/
public void setCreationDate(Date creationdate);
/**
* Sets the video resource.
*
* @param videoResource the new video resource
*/
public void setVideoResource(OLATResource videoResource);
public boolean isDownloadEnabled();
public void setDownloadEnabled(boolean downloadEnabled);
}
|
def removeFirstOccurrence(str, ndl):
if ndl in str:
return str.replace(ndl, '', 1)
elif not ndl and str:
ndl = str
return ''
else:
return str |
'use strict';
function CalculatorClass() {
}
CalculatorClass.prototype.sum = (a, b) => {
return a + b;
};
module.exports = CalculatorClass; |
#!/bin/bash
#
# Bitnami Odoo library
# shellcheck disable=SC1091
# Load generic libraries
. /opt/bitnami/scripts/libfs.sh
. /opt/bitnami/scripts/libos.sh
. /opt/bitnami/scripts/libnet.sh
. /opt/bitnami/scripts/libfile.sh
. /opt/bitnami/scripts/libvalidations.sh
. /opt/bitnami/scripts/libpersistence.sh
. /opt/bitnami/scripts/libservice.sh
# Load database library
if [[ -f /opt/bitnami/scripts/libpostgresqlclient.sh ]]; then
. /opt/bitnami/scripts/libpostgresqlclient.sh
elif [[ -f /opt/bitnami/scripts/libpostgresql.sh ]]; then
. /opt/bitnami/scripts/libpostgresql.sh
fi
########################
# Validate settings in ODOO_* env vars
# Globals:
# ODOO_*
# Arguments:
# None
# Returns:
# 0 if the validation succeeded, 1 otherwise
#########################
odoo_validate() {
debug "Validating settings in ODOO_* environment variables..."
local error_code=0
# Auxiliary functions
print_validation_error() {
error "$1"
error_code=1
}
check_empty_value() {
if is_empty_value "${!1}"; then
print_validation_error "${1} must be set"
fi
}
check_yes_no_value() {
if ! is_yes_no_value "${!1}" && ! is_true_false_value "${!1}"; then
print_validation_error "The allowed values for ${1} are: yes no"
fi
}
check_multi_value() {
if [[ " ${2} " != *" ${!1} "* ]]; then
print_validation_error "The allowed values for ${1} are: ${2}"
fi
}
check_resolved_hostname() {
if ! is_hostname_resolved "$1"; then
warn "Hostname ${1} could not be resolved, this could lead to connection issues"
fi
}
check_valid_port() {
local port_var="${1:?missing port variable}"
local err
if ! err="$(validate_port "${!port_var}")"; then
print_validation_error "An invalid port was specified in the environment variable ${port_var}: ${err}."
fi
}
# Validate user inputs
check_yes_no_value "ODOO_SKIP_BOOTSTRAP"
check_yes_no_value "ODOO_LOAD_DEMO_DATA"
check_valid_port "ODOO_PORT_NUMBER"
check_valid_port "ODOO_LONGPOLLING_PORT_NUMBER"
! is_empty_value "$ODOO_DATABASE_HOST" && check_resolved_hostname "$ODOO_DATABASE_HOST"
! is_empty_value "$ODOO_DATABASE_PORT_NUMBER" && check_valid_port "ODOO_DATABASE_PORT_NUMBER"
[[ -n "${WITHOUT_DEMO:-}" ]] && warn "The WITHOUT_DEMO environment variable has been deprecated in favor of ODOO_LOAD_DEMO_DATA=yes. Support for it may be removed in a future release."
# Validate credentials
if is_boolean_yes "${ALLOW_EMPTY_PASSWORD:-}"; then
warn "You set the environment variable ALLOW_EMPTY_PASSWORD=${ALLOW_EMPTY_PASSWORD:-}. For safety reasons, do not use this flag in a production environment."
else
for empty_env_var in "ODOO_DATABASE_PASSWORD" "ODOO_PASSWORD"; do
is_empty_value "${!empty_env_var}" && print_validation_error "The ${empty_env_var} environment variable is empty or not set. Set the environment variable ALLOW_EMPTY_PASSWORD=yes to allow a blank password. This is only recommended for development environments."
done
fi
# Validate SMTP credentials
if ! is_empty_value "$ODOO_SMTP_HOST"; then
for empty_env_var in "ODOO_SMTP_USER" "ODOO_SMTP_PASSWORD"; do
is_empty_value "${!empty_env_var}" && warn "The ${empty_env_var} environment variable is empty or not set."
done
is_empty_value "$ODOO_SMTP_PORT_NUMBER" && print_validation_error "The ODOO_SMTP_PORT_NUMBER environment variable is empty or not set."
! is_empty_value "$ODOO_SMTP_PORT_NUMBER" && check_valid_port "ODOO_SMTP_PORT_NUMBER"
! is_empty_value "$ODOO_SMTP_PROTOCOL" && check_multi_value "ODOO_SMTP_PROTOCOL" "ssl tls"
fi
return "$error_code"
}
########################
# Ensure Odoo is initialized
# Globals:
# ODOO_*
# Arguments:
# None
# Returns:
# None
#########################
odoo_initialize() {
# Check if Odoo has already been initialized and persisted in a previous run
local -r app_name="odoo"
if ! is_app_initialized "$app_name"; then
local -a db_execute_args=("$ODOO_DATABASE_HOST" "$ODOO_DATABASE_PORT_NUMBER" "$ODOO_DATABASE_NAME" "$ODOO_DATABASE_USER" "$ODOO_DATABASE_PASSWORD")
# Ensure Odoo persisted directories exist (i.e. when a volume has been mounted to /bitnami)
info "Ensuring Odoo directories exist"
ensure_dir_exists "$ODOO_VOLUME_DIR"
# Use daemon:root ownership for compatibility when running as a non-root user
am_i_root && configure_permissions_ownership "$ODOO_VOLUME_DIR" -d "775" -f "664" -u "$ODOO_DAEMON_USER" -g "root"
info "Trying to connect to the database server"
odoo_wait_for_postgresql_connection "${db_execute_args[@]}"
# Odoo requires the database to be owned by the same database user used by the application
# If not, the DB will simply not appear in the list of DBs and Odoo will force you to create a new one
# Refer to function 'list_dbs' in 'service/db.py'
info "Validating database owner"
local db_owner_result
db_owner_result="$(postgresql_remote_execute_print_output "${db_execute_args[@]}" <<< "SELECT u.usename FROM pg_database d JOIN pg_user u ON (d.datdba = u.usesysid) WHERE d.datname = '${ODOO_DATABASE_NAME}' AND u.usename = '${ODOO_DATABASE_USER}';")"
if [[ "$db_owner_result" = *"(0 rows)"* ]]; then
error "The database '${ODOO_DATABASE_NAME}' is not owned by database user '${ODOO_DATABASE_USER}'. This is required for the Odoo application to be able to use this database."
return 1
fi
info "Generating configuration file"
local template_dir="${BITNAMI_ROOT_DIR}/scripts/odoo/bitnami-templates"
render-template "${template_dir}/odoo.conf.tpl" > "$ODOO_CONF_FILE"
if ! is_empty_value "$ODOO_SMTP_HOST"; then
info "Configuring SMTP"
odoo_conf_set "smtp_server" "$ODOO_SMTP_HOST"
odoo_conf_set "smtp_port" "$ODOO_SMTP_PORT_NUMBER"
[[ "$ODOO_SMTP_PROTOCOL" = "ssl" || "$ODOO_SMTP_PROTOCOL" = "tls" ]] && odoo_conf_set "smtp_ssl" "True"
odoo_conf_set "smtp_user" "$ODOO_SMTP_USER"
odoo_conf_set "smtp_password" "$ODOO_SMTP_PASSWORD"
fi
if ! is_boolean_yes "$ODOO_SKIP_BOOTSTRAP"; then
info "Installing modules"
local -a init_args=("--init=all")
# Disable demo data import if specified by the user
if [[ -n "${WITHOUT_DEMO:-}" ]]; then
# Support for legacy WITHOUT_DEMO environment variable, this may be removed in the future
init_args+=("--without-demo=${WITHOUT_DEMO}")
elif ! is_boolean_yes "$ODOO_LOAD_DEMO_DATA"; then
init_args+=("--without-demo=all")
fi
odoo_execute "${init_args[@]}"
info "Updating admin user credentials"
postgresql_remote_execute "${db_execute_args[@]}" <<< "UPDATE res_users SET login = '${ODOO_EMAIL}', password = '${ODOO_PASSWORD}' WHERE login = 'admin'"
else
info "An already initialized Odoo database was provided, configuration will be skipped"
# Odoo stores a cache of the full path to cached .css/.js files in the filesystem
# However when reinstalling with ODOO_SKIP_BOOTSTRAP, no filesystem is mounted
# So we need to clear the assets or if none of the .css/.js will load properly
info "Clearing assets cache from the database"
postgresql_remote_execute "${db_execute_args[@]}" <<< "DELETE FROM ir_attachment WHERE url LIKE '/web/content/%';"
if ! is_boolean_yes "$ODOO_SKIP_UPDATE"; then
info "Updating modules"
odoo_execute --update=all
fi
fi
info "Persisting Odoo installation"
persist_app "$app_name" "$ODOO_DATA_TO_PERSIST"
else
# Fix to make upgrades from old images work
# Before, we were persisting 'odoo-server.conf' dir instead of 'conf/odoo.conf', causing errors when restoring persisted data
# TODO: Remove this block in a future release
if [[ ! -e "${ODOO_VOLUME_DIR}/conf" && -e "${ODOO_VOLUME_DIR}/odoo-server.conf" ]]; then
warn "Detected legacy configuration file ${ODOO_VOLUME_DIR}/odoo-server.conf in volume"
warn "Creating ${ODOO_VOLUME_DIR}/conf/odoo.conf symlink pointing to ${ODOO_VOLUME_DIR}/odoo-server.conf"
mkdir -p "${ODOO_VOLUME_DIR}/conf"
ln -s "${ODOO_VOLUME_DIR}/odoo-server.conf" "${ODOO_VOLUME_DIR}/conf/odoo.conf"
fi
info "Restoring persisted Odoo installation"
restore_persisted_app "$app_name" "$ODOO_DATA_TO_PERSIST"
info "Trying to connect to the database server"
local db_host db_port db_name db_user db_pass
db_host="$(odoo_conf_get "db_host")"
db_port="$(odoo_conf_get "db_port")"
db_name="$(odoo_conf_get "db_name")"
db_user="$(odoo_conf_get "db_user")"
db_pass="$(odoo_conf_get "db_password")"
odoo_wait_for_postgresql_connection "$db_host" "$db_port" "$db_name" "$db_user" "$db_pass"
if ! is_boolean_yes "$ODOO_SKIP_UPDATE"; then
info "Updating modules"
odoo_execute --update=all
fi
fi
# Avoid exit code of previous commands to affect the result of this function
true
}
########################
# Add or modify an entry in the Odoo configuration file
# Globals:
# ODOO_*
# Arguments:
# $1 - Variable name
# $2 - Value to assign to the variable
# Returns:
# None
#########################
odoo_conf_set() {
local -r key="${1:?key missing}"
local -r value="${2:?value missing}"
debug "Setting ${key} to '${value}' in Odoo configuration"
# Sanitize key (sed does not support fixed string substitutions)
local sanitized_pattern
sanitized_pattern="^\s*(;\s*)?$(sed 's/[]\[^$.*/]/\\&/g' <<< "$key")\s*=.*"
local entry="${key} = ${value}"
# Check if the configuration exists in the file
if grep -q -E "$sanitized_pattern" "$ODOO_CONF_FILE"; then
# It exists, so replace the line
replace_in_file "$ODOO_CONF_FILE" "$sanitized_pattern" "$entry"
else
# It doesn't exist, so append to the end of the file
cat >> "$ODOO_CONF_FILE" <<< "$entry"
fi
}
########################
# Get an entry from the Odoo configuration file
# Globals:
# ODOO_*
# Arguments:
# $1 - Variable name
# Returns:
# None
#########################
odoo_conf_get() {
local -r key="${1:?key missing}"
debug "Getting ${key} from Odoo configuration"
# Sanitize key (sed does not support fixed string substitutions)
local sanitized_pattern
sanitized_pattern="^\s*(;\s*)?$(sed 's/[]\[^$.*/]/\\&/g' <<< "$key")\s*=(.*)"
grep -E "$sanitized_pattern" "$ODOO_CONF_FILE" | sed -E "s|${sanitized_pattern}|\2|" | tr -d "\"' "
}
########################
# Wait until the database is accessible with the currently-known credentials
# Globals:
# *
# Arguments:
# $1 - database host
# $2 - database port
# $3 - database name
# $4 - database username
# $5 - database user password (optional)
# Returns:
# true if the database connection succeeded, false otherwise
#########################
odoo_wait_for_postgresql_connection() {
local -r db_host="${1:?missing database host}"
local -r db_port="${2:?missing database port}"
local -r db_name="${3:?missing database name}"
local -r db_user="${4:?missing database user}"
local -r db_pass="${5:-}"
check_postgresql_connection() {
echo "SELECT 1" | postgresql_remote_execute "$db_host" "$db_port" "$db_name" "$db_user" "$db_pass"
}
if ! retry_while "check_postgresql_connection"; then
error "Could not connect to the database"
return 1
fi
}
########################
# Execute a command using the 'odoo' CLI
# Globals:
# ODOO_*
# Arguments:
# $1 - log file
# Returns:
# None
#########################
odoo_execute() {
# Define 'odoo' cmdline arguments
local -a cmd=("${ODOO_BIN_DIR}/odoo")
am_i_root && cmd=("gosu" "$ODOO_DAEMON_USER" "${cmd[@]}")
# Ensure the logfile is not populated with init info and no service is left running
debug_execute "${cmd[@]}" --config="$ODOO_CONF_FILE" --logfile= --pidfile= --stop-after-init "$@"
}
########################
# Check if Odoo is running
# Arguments:
# None
# Returns:
# Boolean
#########################
is_odoo_running() {
pid="$(get_pid_from_file "$ODOO_PID_FILE")"
if [[ -n "$pid" ]]; then
is_service_running "$pid"
else
false
fi
}
########################
# Check if Odoo is not running
# Arguments:
# None
# Returns:
# Boolean
#########################
is_odoo_not_running() {
! is_odoo_running
}
########################
# Stop Odoo
# Arguments:
# None
# Returns:
# None
#########################
odoo_stop() {
! is_odoo_running && return
stop_service_using_pid "$ODOO_PID_FILE"
}
|
package main.fieldRemoved;
public class FieldRemoved {
public int fieldStay;
public static int staticFieldStay;
}
|
#!/bin/bash
#
# Fetch Google Analytics Pageviews reporting cache
# and save as '{{ site.baseurl }}/assets/data/pagevies.json'
#
# Requirement:
# - jq
# - wget
#
# v2.0
# https://github.com/cotes2020/jekyll-theme-chirpy
# © 2019 Cotes Chung
# MIT Licensed
set -eu
WORK_DIR=$(dirname $(dirname $(realpath "$0")))
URL_FILE=${WORK_DIR}{{ site.baseurl }}/assets/data/proxy.json
PV_CACHE=${WORK_DIR}{{ site.baseurl }}/assets/data/pageviews.json
PROXY_URL=$(jq -r '.proxyUrl' $URL_FILE)
wget $PROXY_URL -O $PV_CACHE
|
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
# MIT License
#
# Copyright (c) 2021 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
from disent.data.groundtruth.base import Hdf5PreprocessedGroundTruthData
# ========================================================================= #
# dataset_dsprites #
# ========================================================================= #
class DSpritesData(Hdf5PreprocessedGroundTruthData):
"""
DSprites Dataset
- beta-VAE: Learning Basic Visual Concepts with a Constrained Variational BaseFramework
(https://github.com/deepmind/dsprites-dataset)
Files:
- direct npz: https://raw.githubusercontent.com/deepmind/dsprites-dataset/master/dsprites_ndarray_co1sh3sc6or40x32y32_64x64.npz
approx 2.5 GB loaded into memory
- direct hdf5: https://raw.githubusercontent.com/deepmind/dsprites-dataset/master/dsprites_ndarray_co1sh3sc6or40x32y32_64x64.hdf5
default chunk size is (23040, 2, 4), dataset is (737280, 64, 64) uint8.
# reference implementation: https://github.com/google-research/disentanglement_lib/blob/master/disentanglement_lib/data/ground_truth/dsprites.py
"""
factor_names = ('shape', 'scale', 'orientation', 'position_x', 'position_y')
factor_sizes = (3, 6, 40, 32, 32) # TOTAL: 737280
observation_shape = (64, 64, 1) # TODO: reference implementation has colour variants
dataset_url = 'https://raw.githubusercontent.com/deepmind/dsprites-dataset/master/dsprites_ndarray_co1sh3sc6or40x32y32_64x64.hdf5'
hdf5_name = 'imgs'
# minimum chunk size, no compression but good for random accesses
hdf5_chunk_size = (1, 64, 64)
def __init__(self, data_dir='data/dataset/dsprites', in_memory=False, force_download=False, force_preprocess=False):
super().__init__(data_dir=data_dir, in_memory=in_memory, force_download=force_download, force_preprocess=force_preprocess)
def __getitem__(self, idx):
return super().__getitem__(idx) * 255 # for some reason uint8 is used as datatype, but only in range 0-1
# ========================================================================= #
# END #
# ========================================================================= #
if __name__ == '__main__':
from tqdm import tqdm
for dat in tqdm(DSpritesData(in_memory=True, force_preprocess=True)):
pass
|
<reponame>smagill/opensphere-desktop
package io.opensphere.core.mgrs;
import io.opensphere.core.model.GeographicBoundingBox;
import io.opensphere.core.model.GeographicPosition;
/** Class to hold generic grid information. */
public class GenericGrid
{
/** The bounding box of this geometry. */
private GeographicBoundingBox myBoundingBox;
/** The center position. */
private GeographicPosition myCenterPosition;
/** The north east corner location. */
private GeographicPosition myNEPos;
/** The north west corner location. */
private GeographicPosition myNWPos;
/** The south east corner location. */
private GeographicPosition mySEPos;
/** The south west easting value. */
private double mySWEasting;
/** The south west northing value. */
private double mySWNorthing;
/** The south west corner location. */
private GeographicPosition mySWPos;
/**
* Default constructor.
*/
public GenericGrid()
{
}
/**
* Standard getter.
*
* @return The bounding box of this grid.
*/
public GeographicBoundingBox getBoundingBox()
{
return myBoundingBox;
}
/**
* Standard getter.
*
* @return The center geographic position of grid.
*/
public GeographicPosition getCenterPosition()
{
return myCenterPosition;
}
/**
* Standard getter.
*
* @return The north east geographic position.
*/
public GeographicPosition getNEPos()
{
return myNEPos;
}
/**
* Standard getter.
*
* @return The north west geographic position.
*/
public GeographicPosition getNWPos()
{
return myNWPos;
}
/**
* Standard getter.
*
* @return The south east geographic position.
*/
public GeographicPosition getSEPos()
{
return mySEPos;
}
/**
* Standard getter.
*
* @return The south west easting value.
*/
public double getSWEasting()
{
return mySWEasting;
}
/**
* Standard getter.
*
* @return The south west northing value.
*/
public double getSWNorthing()
{
return mySWNorthing;
}
/**
* Standard getter.
*
* @return The south west geographic position.
*/
public GeographicPosition getSWPos()
{
return mySWPos;
}
/**
* Standard setter.
*
* @param boundingBox The new bounding box.
*/
public void setBoundingBox(GeographicBoundingBox boundingBox)
{
this.myBoundingBox = boundingBox;
}
/**
* Standard setter.
*
* @param centerPosition The center geographic position of grid.
*/
public void setCenterPosition(GeographicPosition centerPosition)
{
this.myCenterPosition = centerPosition;
}
/**
* Standard setter.
*
* @param nePos The north east geographic position.
*/
public void setNEPos(GeographicPosition nePos)
{
this.myNEPos = nePos;
}
/**
* Standard setter.
*
* @param nwPos The north west geographic position.
*/
public void setNWPos(GeographicPosition nwPos)
{
this.myNWPos = nwPos;
}
/**
* Standard setter.
*
* @param sePos The south east geographic position.
*/
public void setSEPos(GeographicPosition sePos)
{
this.mySEPos = sePos;
}
/**
* Standard setter.
*
* @param swEasting The south west easting value.
*/
public void setSWEasting(double swEasting)
{
this.mySWEasting = swEasting;
}
/**
* Standard setter.
*
* @param swNorthing The south west northing value.
*/
public void setSWNorthing(double swNorthing)
{
this.mySWNorthing = swNorthing;
}
/**
* Standard setter.
*
* @param swPos The south west geographic position.
*/
public void setSWPos(GeographicPosition swPos)
{
this.mySWPos = swPos;
}
}
|
#Using Options
An option is an argument that changes the script behavior
Oprions are not often used in scripts
getops is used to deal with Options
#!bin/bash
#script that creates users using perferred options
#Usafe: use -a to add a home directory
# use -b to make the user member of group 100
# use -c to specify a custom shell. This option is followed by a shell name
while getops "abc:" opt # the column behind c means that c takes an option. #opt is the variable that is to be evaluated and this happens in the case loop
do
case $opt in #This defines the different options. Option A, B ,C
a) VAR1=-m ;;
b) VAR2="-g 100" ;; #It is advisable to use quotations for the entire value because of the space between the assigned characters
c) VAR3="-s $OPTARG";; #optarg is a fixed argument, it provides the option in it. thus it will take the first option after the column sign ':'
*) echo 'usage: makeuser [-a] [-b] [-c shell] username'
esac #The case is closed here
done #The while loop is closed here
echo the current arguments are set to $* # $* is All the values that have been used
shift $ (( OPTIND -1 )) #this is a special variable that refers to the option index and we will substract one from it
#The optint here removes all the arguments that an option but will only keep the one that will be used.
echo now the current arguments are set to $*
echo useradd $VAR1 $VAR2 $VAR3 $1 # $1 is the first argument that is not an option
exit 0
'''
How to run
***********
#makeuser -a -b lisa
the current arguments are set to -a -b lisa
now the current arguments are set to lisa
useradd -m -g 100 lisa
#makeuser -a -b -c /bin/bash lisa
Results:
the current arguments are set to -a -b -c /bin/bash lisa
now the current arguments are set to lisa
useradd -m -g 100 -s /bin/bash lisa
'''
Functions
*********
Functions are useful if code is repeated frequently
Functions must be defined before they can be used
it is good pratice to define all functions at the begining of a script
syntax approach 1:
function help # Start with the word function and the name of the function
{
echo this is how you do it #Then you define the code within the given function
}
'''
Function must be defined before they are used.
It is advisable to define the functions at the begining of the script but it is not absolutely necessary that this happens
start the script with the list of all the varriables and a list of all the functions
'''
Syntax approach 2:
help () #Define the name of the function with curly braces then include the function contents within the function as expected
{
echo this is how you do it
}
Using Functions: Simple Example
#!/bin/bash
noarg()
{
echo you have not provided an argument
echo when using this script, you need to specify a filename
exit 2
}
if [ -z $1 ]; then
noarg
else
file $1
fi
exit 0
'''
running this
# easyfunction #This will throw up an error saying that an argument was not given
#easyfunction blah #This will return an error saying that the blah is not a filename of that the file cannot be found in the directory
#bash -x easyfunction bash #This will debug the function line by line and display the result logs on the screen
#easyfunction /etc/hosts #Result: /etc/hosts: ASCII text
'''
Working with Arrays:
*******************
Nothe that Bash also follows the zero-based indexing just as python and java
An array is a string variable that can hold multiple values
Although appreciated by some, using arrays can often be avioded because modern bash can contain multiple values in a variable as well
The amount of values that can be be kept in arrays is higher, which makes them useful in some cases anyways
The downpart is that Bash arrays are relatively complicated
**
Using Arrays example:
names=(linda lisa laura lori)
names[0]=linda
names[1]=lisa
names[2]=laura
names[3]=lori
echo ${names[2]}
echo ${names[@]} #This will show everything in the array
echo ${#names[@]} #The hash symbol will count everything in the variable. the response here will be 4
echo ${names} #note that this will only collect the first value from the array
variable assignment:
*******************
names[4]=lucy #This will assign the argument lucy to the names array
Defining/Creating Menu Interfaces:
***********************************
The select statement can be used to create a menu Interface
select DIR in /bin /usr /etc
In this, DIR is the variable that will be filled with the selected choice, and /bin /usr /etc are presented as numbered menu options
Notice the use of break in select, without it the script would run forever
Menu Interfaces Example
#!/bin/bash
# demo script that shows making menues with select
echo 'Select a directory: ' #Remember this is between single quotes
select DIR in /bin /usr /etc
do
#only continue if the user has selected something
if [ -n $DIR ]
then
DIR=$DIR
echo you have selected $DIR
export DIR
break
else
echo invalid choice
fi
done
Example 2
**********
#!/bin/bash
#Sample Administration Menu
echo 'select a task: '
select TASK in 'Check mounts' 'Check disk space' 'Check Memory usage'
do
case $REPLY in
1) TASK=mount;;
2) TASK="df -h";;
3) TASK="free -m";;
*) echo ERROR && exit 2;;
esac
if [ -n "$TASK" ]
then
clear
$TASK
break
else
echo INVALID CHOICE && exit 3
fi
done
Using Trap
**********
Trap can be used to redefine signals
Useful to disallow Ctrl-C or other ways of killing a script
Consult man 7 signal for a list of available signals
checking the manpages
man 7 signal
Example with trap
#!/bin/bash
# The uninterruptable script
trap "echo 'Forget it bro!' " INT
trap "logger 'Who tried to kill me?' " KILL
trap "logger 'Getting nasty huh?' " TERM
while true
do
true
done
Exercise 6
**********
Create a user helpdesk. Write a menu script that is started automatically when the user logs in.
The menu script schould never be terminated, unless the user logs out (Which is a menu option as well).
From this menu, make at least the following options available:
(1) Reset Password,
(2) Show disk usage,
(3) Ping a host,
(4) Log out
Bonus Question:
modify this script and relate configuration so that the user can use sudo to set passwords for other users as well
Exercise 6 Solution
*******************
#!/bin/bash
while true ; do #We want a script that will never end
trap "echo NOPE" INT #this is where the user is trying to ctrl-c his way out of the script . The response will be no
pinghost () #The function ping host
{
echo which host \do you want to pinghost
read HOSTNAME
ping -c 1 $HOSTNAME
}
echo 'Select option: '
select TASK in 'change password' 'monitor disk space' 'ping a host' 'logout'
do
case $REPLY in
1) TASK=passwd;;
2) TASK="df -h";;
3) TASK=pinghost;;
4) TASK=exit;;
esac
if [ -n "task" ]
then
$TASK
break
else
echo invalid choice #, try again
fi
done
done
'''
The double semicolon is also useful as it leaves no ambiguity in the code.
It is required as it is used at the end of each clause as required by the bash syntax in order to parse the command correctly.
It is only used in case constructs to indicate that the end of an alternative.
'''
|
public class DuplicateRemove {
public static String removeDuplicates(String str) {
StringBuilder res = new StringBuilder();
Set<Character> set = new HashSet<>();
for (char ch : str.toCharArray()) {
if (set.add(ch)) {
res.append(ch);
}
}
return res.toString();
}
public static void main(String[] args) {
String str = "hello hello world";
System.out.println(removeDuplicates(str));
}
} |
def generate_course_html(courses):
html_code = ""
for course in courses:
html_code += f'<div class="course-box">\n'
html_code += f' <div class="course-thumb">\n'
html_code += f' <img src="{course["image_path"]}" alt="{course["title"]}">\n'
html_code += f' </div>\n'
html_code += f' <div class="course-details">\n'
html_code += f' <h3>{course["title"]}</h3>\n'
html_code += f' <p>{course["description"]}</p>\n'
html_code += f' </div>\n'
html_code += f'</div>\n'
return html_code |
<filename>async-retry/src/main/java/ca/bc/jx/kafka/retry/worker/NonBlockingKafkaConsumerConfigure.java
package ca.bc.jx.kafka.retry.worker;
import ca.bc.jx.kafka.retry.domain.RetryProperties;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Import;
import java.util.ArrayList;
@Configuration
@EnableAspectJAutoProxy
@Import(ConsumerManager.class)
@NoArgsConstructor(access = AccessLevel.PACKAGE)
public class NonBlockingKafkaConsumerConfigure {
@Configuration
@ConfigurationProperties(prefix = "nonblocking.kafka-consumers")
static class ConsumerPropertyList extends ArrayList<RetryProperties> {
}
}
|
<filename>html/js/jess/util/Util.js
define(["require", "exports"], function (require, exports) {
"use strict";
var Util = (function () {
function Util() {
}
Util.guid = function () {
return this.s4() + this.s4() + '-' + this.s4() + '-' + this.s4() + '-' +
this.s4() + '-' + this.s4() + this.s4() + this.s4();
};
Util.s4 = function () {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
};
return Util;
}());
exports.Util = Util;
});
|
<gh_stars>1-10
import React from "react";
import styled from "styled-components";
import { Chip } from "@mui/material";
import { useLanguage } from "containers/LanguageProvider";
export const NewChip = () => {
const { userLanguage, pageString } = useLanguage();
return (
<StyledNewChip
label={pageString.index.updateLog.new}
$lang={userLanguage}
/>
);
};
export const StyledChip = styled(Chip)`
height: auto;
width: ${({ $lang }) => ($lang === "en" ? "3.8rem" : "auto")};
color: ${({ theme }) => theme.colors.onPrimary};
font-size: small;
> span {
padding: 0 0.4rem;
}
margin-right: 0.4rem;
`;
const StyledNewChip = styled(StyledChip)`
background-color: ${({ theme }) => theme.colors.success};
`;
export const FixChip = () => {
const { userLanguage, pageString } = useLanguage();
return (
<StyledFixChip
label={pageString.index.updateLog.fix}
$lang={userLanguage}
/>
);
};
const StyledFixChip = styled(StyledChip)`
background-color: ${({ theme }) => theme.colors.error};
`;
export const ChangeChip = () => {
const { userLanguage, pageString } = useLanguage();
return (
<StyledChangeChip
label={pageString.index.updateLog.change}
$lang={userLanguage}
/>
);
};
const StyledChangeChip = styled(StyledChip)`
background-color: ${({ theme }) => theme.colors.blue};
`;
|
#!/bin/dash
. ./config.sh
ETHER2=`echo $ETHER | sed "s/://g"`
ETHER3="${ETHER2}${ETHER2}${ETHER2}${ETHER2}"
ETHER4="FFFFFFFFFFFF${ETHER3}${ETHER3}${ETHER3}${ETHER3}"
echo ${ETHER4} | xxd -r -p > $WAKEPACKET
|
/*
* Copyright (c) 2019-2021. <NAME> and others.
* https://github.com/mfvanek/pg-index-health
*
* This file is a part of "pg-index-health" - a Java library for
* analyzing and maintaining indexes health in PostgreSQL databases.
*
* Licensed under the Apache License 2.0
*/
package io.github.mfvanek.pg.statistics.maintenance;
import io.github.mfvanek.pg.connection.HostAware;
import io.github.mfvanek.pg.statistics.StatisticsAware;
import java.time.OffsetDateTime;
import java.util.Optional;
import javax.annotation.Nonnull;
/**
* An entry point for managing statistics on the specified host.
*
* @author <NAME>
* @see HostAware
*/
public interface StatisticsMaintenanceOnHost extends StatisticsAware, HostAware {
/**
* Resets all statistics counters for the current database on current host to zero.
* For more information, see https://www.postgresql.org/docs/current/monitoring-stats.html
*/
@Override
void resetStatistics();
/**
* Gets time at which database statistics were last reset on current host.
*
* @return {@code Optional} of null or time at which database statistics were last reset.
*/
@Override
@Nonnull
Optional<OffsetDateTime> getLastStatsResetTimestamp();
}
|
module.exports = {
wide: ['Domingo', 'Lunes', 'Martes', 'Miércoles', 'Jueves', 'Viernes', 'Sábado'],
abbr: ['Dom', 'Lun', 'Mar', 'Mié', 'Jue', 'Vie', 'Sáb'],
};
|
package br.edu.fatecfranca.list4.e2;
import java.util.ArrayList;
public class Bus {
private int passengerQuantity;
private String number;
private ArrayList<Passenger> passengers;
private static ArrayList<Bus> buses = new ArrayList<Bus>();
public Bus(int passengerQuantity, String number) {
this.passengerQuantity = passengerQuantity;
this.number = number;
this.passengers = new ArrayList<Passenger>();
Bus.addBus(this);
}
public Bus() {
this(0, "1020-10");
}
public static void addBus(Bus bus) {
buses.add(bus);
}
public static ArrayList<Bus> getBuses() {
return buses;
}
public static Bus getBus(String number) {
for(Bus bus: buses) {
if (bus.getNumber().equals(number)) {
return bus;
}
}
return null;
}
public int getPassengerQuantity() {
return this.passengerQuantity;
}
public String getNumber() {
return this.number;
}
public void setNumber(String number) {
this.number = number;
}
public ArrayList<Passenger> getPassengers() {
return this.passengers;
}
public void createPassenger(int place, String name) {
Passenger passenger = new Passenger(place, name);
addPassenger(passenger);
}
public void addPassenger(Passenger passenger) {
this.passengerQuantity += 1;
this.passengers.add(passenger);
}
public String emitPassengers() {
String list = "";
for(Passenger passenger: this.passengers) {
list = list.concat(passenger.show() + "\n");
}
return list;
}
public String show() {
return "Bus Number: " + this.getNumber() + "; Quantity of Passengers: " + this.getPassengerQuantity();
}
public Passenger getPassenger(String name) {
for(Passenger passenger: passengers) {
if (passenger.getName().equals(name)) {
return passenger;
}
}
return null;
}
}
|
import { React, useEffect, useState } from 'react';
//utilities
import { makeStyles, useTheme } from '@material-ui/core/styles';
//components
import { Grid, Grow, Typography, Paper, Fade, Collapse } from '@material-ui/core'
const stockImageSize = "250px";
const backup = { //unused backups which had true sizing instead of scaling with percentages
//fisrt num = base width, second num == number to add for larger width
"29a0cfab-485b-f5d5-779a-b59f85e204a8": ["100px", "20px"], //classic
"42da8ccc-40d5-affc-beec-15aa47b42eda": ["130px", "30px"], //shorty
"44d4e95c-4157-0037-81b2-17841bf2e8e3": ["100px", "10px"], //frenzy
"1baa85b4-4c70-1284-64bb-6481dfc3bb4e": ["140px", "35px"], //ghost
"e336c6b8-418d-9340-d77f-7a9e4cfe0702": ["140px", "20px"], //sheriff
"f7e1b454-4ad4-1063-ec0a-159e56b58941": ["195px", "20px"], //stinger
"462080d1-4035-2937-7c09-27aa2a5c27a7": ["200px", "20px"], //spectre
"910be174-449b-c412-ab22-d0873436b21b": ["240px", "40px"], //bucky
"ec845bf4-4f79-ddda-a3da-0db3774b2794": ["240px", "30px"], //judge
"ae3de142-4d85-2547-dd26-4e90bed35cf7": ["240px", "20px"], //bulldog
"4ade7faa-4cf1-8376-95ef-39884480959b": ["240px", "60px"], //guardian
"ee8e8d15-496b-07ac-e5f6-8fae5d4c7b1a": ["250px", "30px"], //phantom
"9c82e19d-4575-0200-1a81-3eacf00cf872": ["240px", "30px"], //vandal
"c4883e50-4494-202c-3ec3-6b8a9284f00b": ["250px", "70px"], //marshal
"a03b24d3-4319-996d-0f8c-94bbfba1dfc7": ["240px", "100px"], //operator
"55d8a0f4-4274-ca67-fe2c-06ab45efdf58": ["260px", "80px"], //ares
"63e6c2b6-4a8e-869c-3d4c-e38355226584": ["270px", "40px"], //odin
"2f59173c-4bed-b6c3-2191-dea9b58be9c7": ["auto", "20px"], //melee
}
const scaleOverrides = {
"29a0cfab-485b-f5d5-779a-b59f85e204a8": ["45% auto",], //classic
"42da8ccc-40d5-affc-beec-15aa47b42eda": ["60% auto",], //shorty
"44d4e95c-4157-0037-81b2-17841bf2e8e3": ["45% auto",], //frenzy
"1baa85b4-4c70-1284-64bb-6481dfc3bb4e": ["65% auto",], //ghost
"e336c6b8-418d-9340-d77f-7a9e4cfe0702": ["60% auto",], //sheriff
"f7e1b454-4ad4-1063-ec0a-159e56b58941": ["52% auto",], //stinger
"462080d1-4035-2937-7c09-27aa2a5c27a7": ["55% auto",], //spectre
"910be174-449b-c412-ab22-d0873436b21b": ["75% auto",], //bucky
"ec845bf4-4f79-ddda-a3da-0db3774b2794": ["65% auto",], //judge
"ae3de142-4d85-2547-dd26-4e90bed35cf7": ["65% auto",], //bulldog
"4ade7faa-4cf1-8376-95ef-39884480959b": ["75% auto",], //guardian
"ee8e8d15-496b-07ac-e5f6-8fae5d4c7b1a": ["73% auto",], //phantom
"9c82e19d-4575-0200-1a81-3eacf00cf872": ["65% auto",], //vandal
"c4883e50-4494-202c-3ec3-6b8a9284f00b": ["80% auto",], //marshal
"a03b24d3-4319-996d-0f8c-94bbfba1dfc7": ["80% auto",], //operator
"55d8a0f4-4274-ca67-fe2c-06ab45efdf58": ["80% auto",], //ares
"63e6c2b6-4a8e-869c-3d4c-e38355226584": ["80% auto",], //odin
"2f59173c-4bed-b6c3-2191-dea9b58be9c7": ["80% auto",], //melee
}
const useStyles = makeStyles((theme) => ({
"@global": {
"@keyframes fadeOut": {
"0%": {
transform: "rotate(-360deg)"
},
"100%": {
transform: "rotate(0deg)"
}
}
},
weaponContainerVideo: {
position: "absolute",
objectFit: "cover",
width: "auto",
height: "auto",
},
weaponPaper: {
flexDirection: "row",
position: "relative",
width: "100%",
height: "100%",
alignItems: "center",
justifyContent: "center",
background: "transparent",
transition: ".5s ease !important",
"&:hover": {
border: `1px ${theme.palette.primary.main} solid`
},
},
weaponImage: {
zIndex: 1,
width: "100%",
height: "100%",
position: "relative",
backgroundPosition: "center",
backgroundRepeat: "no-repeat",
background: "transparent",
// transition: ".25s ease !important",
backfaceVisibility: "hidden",
},
bottomGradient: {
//background: "linear-gradient(to bottom, rgba(0,0,0,0) 60%,rgba(255,255,255,.15) 100%)",
zIndex: 5,
width: "100%",
height: "100%",
top: "-100%",
},
dataContainer: {
width: "100%",
height: "100%",
display: "flex",
position: "relative",
top: "-100%"
},
textContainer: {
display: "flex",
flexDirection: "column",
width: "100%",
height: "100%",
alignItems: "center",
justifyContent: "flex-end",
backgroundPosition: "center",
overflow: "visible",
paddingLeft: "12px",
paddingBottom: "8px",
zIndex: 2,
},
buddyContainer: {
display: "flex",
maxWidth: "100px",
height: "100%",
position: "relative",
right: 0,
bottom: 7,
zIndex: 2,
},
buddyImage: {
width: "100%",
height: "auto",
objectFit: "contain",
position: "relative",
alignSelf: "flex-end",
},
weaponLabelHolder: {
display: "flex",
width: "80%",
height: "25px",
position: "relative",
alignSelf: "flex-start",
},
skinLabelHolder: {
width: "80%",
alignSelf: "flex-start",
position: "relative",
},
weaponLabel: {
textAlign: "left",
width: "100%",
flexGrow: 1,
position: "relative",
textOverflow: "ellipsis",
bottom: 0,
},
}));
function Weapon(props) {
const classes = useStyles();
const theme = useTheme();
var db = false;
const [isUpdatingImage, setUpdatingImage] = useState(true);
const [isUpdatingBuddy, setUpdatingBuddy] = useState(false);
const [skinData, updateSkinData] = useState({});
const [showSkinName, updateSkinNameVisibility] = useState(false);
const [weaponImage, setImage] = useState("");
const favorite = props.data !== undefined ? props.data.favorite : "";
const locked = props.data !== undefined ? props.data.locked : "";
useEffect(() => {
if (props.data !== undefined) {
var comparisonTarget = skinData !== null ? skinData.skin_image : ""
if (db === false && props.data.skin_image !== comparisonTarget) {
setUpdatingImage(true)
setTimeout(() => {
setImage(props.data.skin_image)
updateSkinData(props.data);
setUpdatingImage(false);
}, 300)
}
//update buddy
if (props.data.buddy_name !== skinData.buddy_name) {
setUpdatingBuddy(true);
setTimeout(() => {
updateSkinData(props.data);
setUpdatingBuddy(false);
}, 300);
}
}
}, [props.data]);
function onHover() {
updateSkinNameVisibility(true);
};
function offHover() {
updateSkinNameVisibility(false);
};
function select() {
props.weaponEditorCallback(props.uuid);
}
return (
<Fade in style={{ transitionDelay: '500ms' }}>
<Paper
className={classes.weaponPaper}
variant="outlined"
onMouseEnter={onHover}
onMouseLeave={offHover}
onMouseDown={select}
>
<Fade in={!isUpdatingImage}>
<div
className={classes.weaponImage}
style={{
//backgroundPosition: props.uuid === "2f59173c-4bed-b6c3-2191-dea9b58be9c7" ? "50% 35%" : (!props.useLargeWeaponImage ? "50% 40%" : "50% 50%"),
backgroundPosition: "50% 50%",
backgroundImage: skinData !== {} ? `url(${weaponImage})` : `url("https://media.valorant-api.com/weapons/${props.uuid}/displayicon.png")`,
backgroundSize: props.uuid !== "2f59173c-4bed-b6c3-2191-dea9b58be9c7" ? (scaleOverrides[props.uuid]) : "auto 75%",
//props.uuid !== "2f59173c-4bed-b6c3-2191-dea9b58be9c7" ? (!props.useLargeWeaponImage ? `${props.uuid in scaleOverrides ? scaleOverrides[props.uuid][0] : stockImageSize} auto` : `calc(${scaleOverrides[props.uuid][0]} + ${scaleOverrides[props.uuid][1]}) auto`) : "auto 80%",
}}
/>
</Fade>
{/* <div className={classes.bottomGradient} /> */}
<div className={classes.dataContainer}>
<div className={classes.textContainer}>
<div className={classes.weaponLabelHolder}>
<Typography className={classes.weaponLabel} variant="overline">{locked ? "🔒 " : null}{props.displayName}</Typography>
</div>
<div className={classes.skinLabelHolder}>
<Collapse in={showSkinName}>
<Typography className={classes.weaponLabel} variant="body1" style={{ marginBottom: "4px" }}>{favorite ? "❤ " : null}{skinData.skin_name}</Typography>
</Collapse>
</div>
</div>
<Grow in>
<div className={classes.buddyContainer} style={{ width: props.isSidearm ? "20%" : "14%" }}>
{props.uuid !== "2f59173c-4bed-b6c3-2191-dea9b58be9c7" ?
<img alt={skinData.buddy_name} className={classes.buddyImage} src={skinData.buddy_image !== "" ? skinData.buddy_image : null} />
: <img alt="" src="" />
}
</div>
</Grow>
</div>
</Paper>
</Fade>
)
}
export default Weapon |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB/512+512+512-shuffled-N-VB-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_and_verbs_first_third_sixth --eval_function last_sixth_eval |
<filename>src/util/action/wrapper_action/ParserWrapperAction.js
const Action = require('../JobAction').AbstractJobAction
const DecorationError = require('../error/errors').ActionDecorationError
const XMLParser = require('../../parser/XmlDataParser').XmlDataParser
const XMLParseError = require('../../parser/XmlParserError').DataNotFoundError
class ParserWrapperAction extends Action {
constructor (xmlParserType, params) {
super()
if (typeof xmlParserType !== typeof XMLParser) {
throw new DecorationError(xmlParserType)
}
this.Create = xmlParserType.prototype.constructor
this.wrapped = 'Wrapped ' + xmlParserType.name
this.params = params
}
async perform (xml) {
const parser = new this.Create(xml)
if (this.params) {
Object.keys(this.params).forEach(key => {
parser[key] = this.params[key]
})
}
if (parser.hasData() && parser.hasListOfData()) {
return [parser.getAllFromXml()]
} else if (parser.hasData()) {
return parser.xmlToJson()
} else if (!parser.xml.includes(parser.tagName) || !parser.xml.includes(parser.listTagName)) {
throw new XMLParseError('ERROR: No Data Found')
} else {
return []
}
}
}
module.exports.ParserWrapperAction = ParserWrapperAction
|
<gh_stars>0
package org.insightcentre.saffron.web;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.insightcentre.nlp.saffron.data.Status;
import org.insightcentre.nlp.saffron.data.Taxonomy;
import org.insightcentre.nlp.saffron.data.Term;
import org.insightcentre.nlp.saffron.exceptions.InvalidOperationException;
import org.insightcentre.nlp.saffron.exceptions.InvalidValueException;
//import org.insightcentre.saffron.web.mongodb.MongoDBHandler;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
/**
* Unitary tests for {@link SaffronService}.
*/
public class SaffronServiceTest {
// private MongoDBHandler mongo;
// private SaffronService service;
//
// @Before
// public void setupMock() {
// mongo = mock(MongoDBHandler.class);
// service = new SaffronService(mongo);
// }
//
// @Test
// public void testMockCreation(){
// assertNotNull(mongo);
// assertNotNull(service);
// }
//
// /**
// * Term has its status changed from "none" to "accepted"
// */
// @Test
// public void testUpdateTermStatus() {
// //Prepare
// Term input = new Term.Builder("term_string").status(Status.accepted).build();
// String runId = "runId";
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.none).build());
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(true);
//
// //Call
// service.updateTermStatus(runId,input);
//
// //Evaluate
// verify(mongo).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(),Mockito.any(Taxonomy.class));
// }
//
// /**
// * Term has its status changed from "accepted" to "none"
// */
// @Test
// public void testUpdateTermStatus2() {
// //Prepare
// Term input = new Term.Builder("term_string").status(Status.none).build();
// String runId = "runId";
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.accepted).build());
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(true);
//
// //Call
// service.updateTermStatus(runId,input);
//
// //Evaluate
// verify(mongo).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(),Mockito.any(Taxonomy.class));
// }
//
// /**
// * Term has its status changed from "accepted" to "rejected"
// */
// @Test
// public void testUpdateTermStatus3() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.rejected).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.accepted).build());
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(true);
// when(mongo.getTaxonomy(runId)).thenReturn(taxonomy);
// doNothing().when(taxonomy).removeDescendent(input.getString());
// when(mongo.updateTaxonomy(runId, taxonomy)).thenReturn(true);
//
// //Call
// service.updateTermStatus(runId,input);
//
// //Evaluate
// verify(mongo).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo).updateTaxonomy(runId, taxonomy);
// }
//
// /**
// * Term has its status changed from "none" to "rejected"
// */
// @Test
// public void testUpdateTermStatus4() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.rejected).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.none).build());
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(true);
// when(mongo.getTaxonomy(runId)).thenReturn(taxonomy);
// doNothing().when(taxonomy).removeDescendent(input.getString());
// when(mongo.updateTaxonomy(runId, taxonomy)).thenReturn(true);
//
// //Call
// service.updateTermStatus(runId,input);
//
// //Evaluate
// verify(mongo).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo).updateTaxonomy(runId, taxonomy);
// }
//
// /**
// * Term has its status changed from "rejected" to "none"
// */
// @Test(expected=InvalidOperationException.class)
// public void testUpdateTermStatus5() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.none).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.rejected).build());
//
// try {
// //Call
// service.updateTermStatus(runId,input);
// } catch (InvalidOperationException e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(runId, taxonomy);
//
// throw e;
// }
// }
//
// /**
// * Term has its status changed from "rejected" to "accepted"
// */
// @Test(expected=InvalidOperationException.class)
// public void testUpdateTermStatus6() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.accepted).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.rejected).build());
//
// try {
// //Call
// service.updateTermStatus(runId,input);
// } catch (InvalidOperationException e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(runId, taxonomy);
//
// throw e;
// }
// }
//
// /**
// * Term has its status changed from "rejected" to "rejected"
// */
// @Test(expected=InvalidOperationException.class)
// public void testUpdateTermStatus7() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.rejected).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.rejected).build());
//
// try {
// //Call
// service.updateTermStatus(runId,input);
// } catch (InvalidOperationException e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(runId, taxonomy);
//
// throw e;
// }
// }
//
// /**
// * Term has an invalid string
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateTermStatus8() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("").status(Status.rejected).build();
//
// //Call
// try {
// service.updateTermStatus(runId,input);
// } catch (InvalidValueException e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(),Mockito.any(Taxonomy.class));
// throw e;
// }
// }
//
// /**
// * Term has an invalid status
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateTermStatus9() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(null).build();
//
// //Call
// try {
// service.updateTermStatus(runId,input);
// } catch (InvalidValueException e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), null);
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(),Mockito.any(Taxonomy.class));
// throw e;
// }
// }
//
// /**
// * Something went wrong in Database when updating the term
// */
// @Test(expected = Exception.class)
// public void testUpdateTermStatus10() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.accepted).build();
//
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(false);
//
// //Call
// try {
// service.updateTermStatus(runId,input);
// } catch (Exception e) {
// //Evaluate
// verify(mongo, never()).updateTerm(runId, input.getString(), null);
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(),Mockito.any(Taxonomy.class));
// throw e;
// }
// }
//
// /**
// * Something went wrong in Database when updating the taxonomy
// */
// @Test(expected = Exception.class)
// public void testUpdateTermStatus11() {
// //Prepare
// String runId = "runId";
// Term input = new Term.Builder("mother").status(Status.rejected).build();
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTerm(runId, input.getString())).thenReturn(new Term.Builder(input.getString()).status(Status.none).build());
// when(mongo.updateTerm(runId, input.getString(), input.getStatus().toString())).thenReturn(true);
// when(mongo.getTaxonomy(runId)).thenReturn(taxonomy);
// doNothing().when(taxonomy).removeDescendent(input.getString());
// when(mongo.updateTaxonomy(runId, taxonomy)).thenReturn(false);
//
// try{
// //Call
// service.updateTermStatus(runId,input);
// } catch (Exception e) {
// //Evaluate
// verify(mongo).updateTerm(runId, input.getString(), input.getStatus().toString());
// verify(mongo).updateTaxonomy(runId, taxonomy);
// //TODO It should verify if the status change for the term was reverted to the original state since the overall operation failed
// throw e;
// }
// }
//
// /**
// * Ensure all terms in a term list are updated
// */
// @Test
// public void testUpdateTermStatusList() {
// //Prepare
// List<Term> input = new ArrayList<Term>();
// Term term1 = new Term.Builder("term1_string").status(Status.accepted).build();;
// Term term2 = new Term.Builder("term2_string").status(Status.rejected).build();
// Term term3 = new Term.Builder("term3_string").status(Status.none).build();
// input.add(term1);
// input.add(term2);
// input.add(term3);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doNothing().when(spyService).updateTermStatus(runId, term1);
// doNothing().when(spyService).updateTermStatus(runId, term2);
// doNothing().when(spyService).updateTermStatus(runId, term3);
//
//
// //Call
// spyService.updateTermStatus(runId,input);
//
// //Evaluate
// verify(spyService, times(1)).updateTermStatus(runId, term1);
// verify(spyService, times(1)).updateTermStatus(runId, term2);
// verify(spyService, times(1)).updateTermStatus(runId, term3);
// }
//
// /**
// * Ensure only correct terms in a term list are updated
// */
// @Test(expected = RuntimeException.class)
// public void testUpdateTermStatusList2() {
// //Prepare
// List<Term> input = new ArrayList<Term>();
// Term term1 = new Term.Builder("term1_string").status(null).build();;
// Term term2 = new Term.Builder("term2_string").status(Status.rejected).build();
// Term term3 = new Term.Builder("term3_string").status(Status.none).build();
// input.add(term1);
// input.add(term2);
// input.add(term3);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doThrow(new InvalidValueException("")).when(spyService).updateTermStatus(runId, term1);
// doNothing().when(spyService).updateTermStatus(runId, term2);
// doNothing().when(spyService).updateTermStatus(runId, term3);
//
// try {
// //Call
// spyService.updateTermStatus(runId,input);
// } catch (Exception e) {
// //Evaluate
// verify(spyService, times(1)).updateTermStatus(runId, term1);
// verify(spyService, times(1)).updateTermStatus(runId, term2);
// verify(spyService, times(1)).updateTermStatus(runId, term3);
// throw e;
// }
// }
//
// /**
// * Relationship status has changed to "accepted"
// */
// @Test
// public void testupdateParentRelationshipStatus() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.accepted.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
//
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo).updateTerm(taxonomyId, termChild, status);
// verify(mongo).updateTerm(taxonomyId, termParentString, status);
// }
//
//
// /**
// * Relationship status has changed to "none"
// */
// @Test
// public void testupdateParentRelationshipStatus2() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.none.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// Taxonomy childTaxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
//
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// }
//
//
// /**
// * Relationship status has changed to "rejected"
// */
// @Test(expected = InvalidOperationException.class)
// public void testupdateParentRelationshipStatus3() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.rejected.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// Taxonomy childTaxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doThrow(new InvalidOperationException("")).when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidOperationException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy, never()).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParentString, status);
// throw e;
// }
// }
//
// /**
// * Term child does not exist
// * (it just ignores and acts as if nothing happened - ??)
// */
// @Test()
// public void testupdateParentRelationshipStatus4() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.accepted.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// Taxonomy childTaxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
//
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo).updateTerm(taxonomyId, termChild, status);
// verify(mongo).updateTerm(taxonomyId, termParentString, status);
// }
//
// /**
// * Taxonomy id does not exist
// */
// @Test(expected=Exception.class)
// public void testupdateParentRelationshipStatus5() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// String termParent = "termParent";
// String status = Status.accepted.toString();
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(null);
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (Exception e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(taxonomyId, Mockito.any(Taxonomy.class));
// verify(Mockito.any(Taxonomy.class), never()).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Empty taxonomyId
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus6() {
// //prepare
// String taxonomyId = "";
// String termChild = "termChild";
// String termParent = "termParent";
// String status = Status.accepted.toString();
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Null taxonomyId
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus7() {
// //prepare
// String taxonomyId = null;
// String termChild = "termChild";
// String termParent = "termParent";
// String status = Status.accepted.toString();
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Empty termChild
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus8() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "";
// String termParent = "termParent";
// String status = Status.accepted.toString();
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Null termChild
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus9() {
// //prepare
// String taxonomyId = "runId";
// String termChild = null;
// String termParent = "termParent";
// String status = Status.accepted.toString();
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Invalid status
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus10() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// String termParent = "termParent";
// String status = "whateverStatus";
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Null status
// */
// @Test(expected=InvalidValueException.class)
// public void testupdateParentRelationshipStatus11() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// String termParent = "termParent";
// String status = null;
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (InvalidValueException e) {
// //evaluate
// verify(mongo, never()).updateTaxonomy(Mockito.anyString(), Mockito.any(Taxonomy.class));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParent, status);
// throw e;
// }
// }
//
// /**
// * Something went wrong in Database when updating child term
// * FIXME: If something goes wrong it should revert all operations
// */
// @Test(expected=Exception.class)
// public void testupdateParentRelationshipStatus12() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.accepted.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// Taxonomy childTaxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(false);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (Exception e) {
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo).updateTerm(taxonomyId, termChild, status);
// throw e;
// }
// }
//
// /**
// * Something went wrong in Database when updating parent term
// * FIXME: If something goes wrong it should revert all operations
// */
// @Test(expected=Exception.class)
// public void testupdateParentRelationshipStatus13() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.accepted.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// Taxonomy childTaxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(false);
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (Exception e) {
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo).updateTerm(taxonomyId, termParentString, status);
// throw e;
// }
// }
//
// /**
// * Something went wrong in Database when updating the taxonomy
// */
// @Test(expected=Exception.class)
// public void testupdateParentRelationshipStatus14() {
// //prepare
// String taxonomyId = "runId";
// String termChild = "termChild";
// Taxonomy termParent = mock(Taxonomy.class);
// String termParentString = "termParent";
// String status = Status.accepted.toString();
//
// Taxonomy taxonomy = mock(Taxonomy.class);
//
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
//
// doNothing().when(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(false);
//
// when(termParent.getRoot()).thenReturn(termParentString);
// when(taxonomy.getParent(termChild)).thenReturn(termParent);
// when(mongo.updateTerm(taxonomyId, termChild, status)).thenReturn(true);
// when(mongo.updateTerm(taxonomyId, termParentString, status)).thenReturn(true);
//
// try {
// //call
// service.updateParentRelationshipStatus(taxonomyId, termChild, status);
// } catch (Exception e) {
// //evaluate
// verify(mongo).updateTaxonomy(taxonomyId, taxonomy);
// verify(taxonomy).setParentChildStatus(termChild, Status.valueOf(status));
// verify(mongo, never()).updateTerm(taxonomyId, termChild, status);
// verify(mongo, never()).updateTerm(taxonomyId, termParentString, status);
// throw e;
// }
// }
//
// /**
// * Ensure all terms in a term list are updated
// */
// @Test
// public void testupdateParentRelationshipStatusList() {
// //Prepare
// List<Pair<String,String>> input = new ArrayList<Pair<String,String>>();
// Pair<String, String> pair1 = new ImmutablePair<String, String>("child1",Status.accepted.toString());
// Pair<String, String> pair2 = new ImmutablePair<String, String>("child2",Status.none.toString());
// Pair<String, String> pair3 = new ImmutablePair<String, String>("child3",Status.accepted.toString());
// input.add(pair1);
// input.add(pair2);
// input.add(pair3);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair1.getLeft(), pair1.getRight());
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair2.getLeft(), pair2.getRight());
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair3.getLeft(), pair3.getRight());
//
//
// //Call
// spyService.updateParentRelationshipStatus(runId, input);
//
// //Evaluate
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair1.getLeft(), pair1.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair2.getLeft(), pair2.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair3.getLeft(), pair3.getRight());
// }
//
// /**
// * Ensure only correct terms in a term list are updated
// */
// @Test(expected = RuntimeException.class)
// public void testupdateParentRelationshipStatusList2() {
// //Prepare
// List<Pair<String,String>> input = new ArrayList<Pair<String,String>>();
// Pair<String, String> pair1 = new ImmutablePair<String, String>("child1",Status.rejected.toString());
// Pair<String, String> pair2 = new ImmutablePair<String, String>("child2",Status.none.toString());
// Pair<String, String> pair3 = new ImmutablePair<String, String>("child3",Status.accepted.toString());
// Pair<String, String> pair4 = new ImmutablePair<String, String>("",Status.accepted.toString());
// Pair<String, String> pair5 = new ImmutablePair<String, String>("child5",Status.accepted.toString());
// Pair<String, String> pair6 = new ImmutablePair<String, String>(null,Status.accepted.toString());
// Pair<String, String> pair7 = new ImmutablePair<String, String>("child7",null);
// input.add(pair1);
// input.add(pair2);
// input.add(pair3);
// input.add(pair4);
// input.add(pair5);
// input.add(pair6);
// input.add(pair7);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doThrow(new InvalidOperationException("")).when(spyService).updateParentRelationshipStatus(runId, pair1.getLeft(), pair1.getRight());
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair2.getLeft(), pair2.getRight());
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair3.getLeft(), pair3.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParentRelationshipStatus(runId, pair4.getLeft(), pair4.getRight());
// doNothing().when(spyService).updateParentRelationshipStatus(runId, pair5.getLeft(), pair5.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParentRelationshipStatus(runId, pair6.getLeft(), pair6.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParentRelationshipStatus(runId, pair7.getLeft(), pair7.getRight());
//
// try {
// //Call
// spyService.updateParentRelationshipStatus(runId, input);
// } catch (Exception e) {
// //Evaluate
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair1.getLeft(), pair1.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair2.getLeft(), pair2.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair3.getLeft(), pair3.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair4.getLeft(), pair4.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair5.getLeft(), pair5.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair6.getLeft(), pair6.getRight());
// verify(spyService, times(1)).updateParentRelationshipStatus(runId, pair7.getLeft(), pair7.getRight());
// throw e;
// }
// }
//
// /**
// * Parent updated successfully
// */
// @Test
// public void testUpdateParent() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
//
// //evaluate
// verify(taxonomy, times(1)).updateParent(termChild, termNewParent);
// verify(mongo, times(1)).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, times(1)).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// }
//
// /**
// * Parent update unsuccessful (exception thrown)
// */
// @Test(expected = RuntimeException.class)
// public void testUpdateParent2() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doThrow(RuntimeException.class).when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (RuntimeException e) {
// //evaluate
// verify(taxonomy, times(1)).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Error updating taxonomy in the database
// */
// @Test(expected = RuntimeException.class)
// public void testUpdateParent3() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(false);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (RuntimeException e) {
// //evaluate
// verify(taxonomy, times(1)).updateParent(termChild, termNewParent);
// verify(mongo, times(1)).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Error updating the status of the terms to "accepted"
// * FIXME: It should retry updating the term status
// */
// @Test(expected = RuntimeException.class)
// public void testUpdateParent4() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doThrow(RuntimeException.class).when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (RuntimeException e){
// //evaluate
// verify(taxonomy, times(1)).updateParent(termChild, termNewParent);
// verify(mongo, times(1)).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, times(1)).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Inexistent taxonomy
// */
// @Test(expected = RuntimeException.class)
// public void testUpdateParent5() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(null);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (RuntimeException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Null taxonomyId
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent6() {
// //prepare
// String taxonomyId = null;
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Empty taxonomyId
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent7() {
// //prepare
// String taxonomyId = "";
// String termChild = "termChild";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Null termChild
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent8() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = null;
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Empty termChild
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent9() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "";
// String termNewParent = "termNewParent";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Null termNewParent
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent10() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = null;
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Empty termNewParent
// */
// @Test(expected = InvalidValueException.class)
// public void testUpdateParent11() {
// //prepare
// String taxonomyId = "taxonomyId";
// String termChild = "termChild";
// String termNewParent = "";
// SaffronService spyService = spy(service);
//
// Taxonomy taxonomy = mock(Taxonomy.class);
// doNothing().when(taxonomy).updateParent(termChild, termNewParent);
// when(mongo.getTaxonomy(taxonomyId)).thenReturn(taxonomy);
// when(mongo.updateTaxonomy(taxonomyId, taxonomy)).thenReturn(true);
// doNothing().when(spyService).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
//
// try {
// //call
// spyService.updateParent(taxonomyId, termChild, termNewParent);
// } catch (InvalidValueException e) {
// //evaluate
// verify(taxonomy, never()).updateParent(termChild, termNewParent);
// verify(mongo, never()).updateTaxonomy(taxonomyId, taxonomy);
// verify(spyService, never()).updateParentRelationshipStatus(taxonomyId, termChild, Status.accepted.toString());
// throw e;
// }
// }
//
// /**
// * Ensure all update parent relations are updated
// */
// @Test
// public void testupdateParentList() {
// //Prepare
// List<Pair<String,String>> input = new ArrayList<Pair<String,String>>();
// Pair<String, String> pair1 = new ImmutablePair<String, String>("child1","newParent1");
// Pair<String, String> pair2 = new ImmutablePair<String, String>("child2","newParent2");
// Pair<String, String> pair3 = new ImmutablePair<String, String>("child3","newParent3");
// input.add(pair1);
// input.add(pair2);
// input.add(pair3);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doNothing().when(spyService).updateParent(runId, pair1.getLeft(), pair1.getRight());
// doNothing().when(spyService).updateParent(runId, pair2.getLeft(), pair2.getRight());
// doNothing().when(spyService).updateParent(runId, pair3.getLeft(), pair3.getRight());
//
//
// //Call
// spyService.updateParent(runId, input);
//
// //Evaluate
// verify(spyService, times(1)).updateParent(runId, pair1.getLeft(), pair1.getRight());
// verify(spyService, times(1)).updateParent(runId, pair2.getLeft(), pair2.getRight());
// verify(spyService, times(1)).updateParent(runId, pair3.getLeft(), pair3.getRight());
// }
//
// /**
// * Ensure only correct update parent relations are updated
// */
// @Test(expected = RuntimeException.class)
// public void testupdateParent2() {
// //Prepare
// List<Pair<String,String>> input = new ArrayList<Pair<String,String>>();
// Pair<String, String> pair1 = new ImmutablePair<String, String>("child1","newParent1");
// Pair<String, String> pair2 = new ImmutablePair<String, String>("child2","newParent2");
// Pair<String, String> pair3 = new ImmutablePair<String, String>("child3","newParent3");
// Pair<String, String> pair4 = new ImmutablePair<String, String>("","newParent4");
// Pair<String, String> pair5 = new ImmutablePair<String, String>("child5","newParent5");
// Pair<String, String> pair6 = new ImmutablePair<String, String>(null,"newParent6");
// Pair<String, String> pair7 = new ImmutablePair<String, String>("child7",null);
// input.add(pair1);
// input.add(pair2);
// input.add(pair3);
// input.add(pair4);
// input.add(pair5);
// input.add(pair6);
// input.add(pair7);
//
// String runId = "runId";
// SaffronService spyService = spy(service);
//
// doThrow(new InvalidOperationException("")).when(spyService).updateParent(runId, pair1.getLeft(), pair1.getRight());
// doNothing().when(spyService).updateParent(runId, pair2.getLeft(), pair2.getRight());
// doNothing().when(spyService).updateParent(runId, pair3.getLeft(), pair3.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParent(runId, pair4.getLeft(), pair4.getRight());
// doNothing().when(spyService).updateParent(runId, pair5.getLeft(), pair5.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParent(runId, pair6.getLeft(), pair6.getRight());
// doThrow(new InvalidValueException("")).when(spyService).updateParent(runId, pair7.getLeft(), pair7.getRight());
//
// try {
// //Call
// spyService.updateParent(runId, input);
// } catch (Exception e) {
// //Evaluate
// verify(spyService, times(1)).updateParent(runId, pair1.getLeft(), pair1.getRight());
// verify(spyService, times(1)).updateParent(runId, pair2.getLeft(), pair2.getRight());
// verify(spyService, times(1)).updateParent(runId, pair3.getLeft(), pair3.getRight());
// verify(spyService, times(1)).updateParent(runId, pair4.getLeft(), pair4.getRight());
// verify(spyService, times(1)).updateParent(runId, pair5.getLeft(), pair5.getRight());
// verify(spyService, times(1)).updateParent(runId, pair6.getLeft(), pair6.getRight());
// verify(spyService, times(1)).updateParent(runId, pair7.getLeft(), pair7.getRight());
// throw e;
// }
// }
}
|
def countElements(arr):
#base case
if len(arr) == 0:
return 0
#recursive case
else:
return 1 + countElements(arr[1:])
countElements([1, 2, 3, 4, 5]) |
import React, { useState } from 'react';
import { Line } from 'react-chartjs-2';
function ExpenseTracker() {
const [expenses, setExpenses] = useState([]);
const [amount, setAmount] = useState(0);
const addExpense = (amount) => {
setExpenses([...expenses, amount]);
};
const removeExpense = (amount) => {
const index = expenses.indexOf(amount);
if (index !== -1) {
const newExpenses = [...expenses];
newExpenses.splice(index, 1);
setExpenses(newExpenses);
}
};
const totalAmount = () => {
let total = 0;
expenses.forEach(amount => {
total += amount;
});
setAmount(total);
};
// chart
const data = {
labels: new Array(expenses.length).fill(''),
datasets: [
{
label: 'Expenses',
fill: false,
lineTension: 0.1,
backgroundColor: 'rgba(75,192,192,0.4)',
borderColor: 'rgba(75,192,192,1)',
borderCapStyle: 'butt',
borderDash: [],
borderDashOffset: 0.0,
borderJoinStyle: 'miter',
pointBorderColor: 'rgba(75,192,192,1)',
pointBackgroundColor: '#fff',
pointBorderWidth: 1,
pointHoverRadius: 5,
pointHoverBackgroundColor: 'rgba(75,192,192,1)',
pointHoverBorderColor: 'rgba(220,220,220,1)',
pointHoverBorderWidth: 2,
pointRadius: 1,
pointHitRadius: 10,
data: expenses
}
]
};
return (
<div>
<h3>Total Expense: {amount}</h3>
<form
onSubmit={e => {
e.preventDefault();
if (!amount) return;
addExpense(amount);
totalAmount();
}}
>
<input
type="number"
value={amount}
onChange={e => setAmount(+e.target.value)}
/>
<input type="submit" value="Add" />
</form>
<div>
{expenses.map((expense, index) => (
<div>
{expense}
<button onClick={() => removeExpense(expense)}>x</button>
</div>
))}
</div>
<Line data={data} />
</div>
);
}
export default ExpenseTracker; |
package com.projects.tradingMachine.utility;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.projects.tradingMachine.utility.marketData.MarketData;
import quickfix.DataDictionaryProvider;
import quickfix.FixVersions;
import quickfix.LogUtil;
import quickfix.Message;
import quickfix.MessageUtils;
import quickfix.Session;
import quickfix.SessionID;
import quickfix.SessionNotFound;
import quickfix.field.ApplVerID;
public final class Utility {
private static final Logger logger = LoggerFactory.getLogger(Utility.class);
private static final Random Random = new Random();
public enum DestinationType {Queue, Topic}
public static void shutdownExecutorService(final ExecutorService es, long timeout, TimeUnit timeUnit) throws InterruptedException {
es.shutdown();
if (!es.awaitTermination(timeout, timeUnit))
es.shutdownNow();
logger.info("Terminated ScheduledExecutorService");
}
public static Properties getApplicationProperties(final String propertiesFileName) throws FileNotFoundException, IOException {
final Properties p = new Properties();
try(final InputStream inputStream = ClassLoader.getSystemResourceAsStream(propertiesFileName)) {
p.load(inputStream);
return p;
}
}
public static double roundDouble(final double value, final int scale) {
return new BigDecimal(value).setScale(scale, RoundingMode.HALF_UP).doubleValue();
}
public static void sendMessage(final SessionID sessionID, final Message message) {
try {
final Session session = Session.lookupSession(sessionID);
if (session == null) {
throw new SessionNotFound(sessionID.toString());
}
final DataDictionaryProvider dataDictionaryProvider = session.getDataDictionaryProvider();
try {
dataDictionaryProvider.getApplicationDataDictionary(getApplVerID(session, message)).validate(message, true);
} catch (Exception e) {
LogUtil.logThrowable(sessionID, "Outgoing message failed validation: "+ e.getMessage(), e);
return;
}
session.send(message); //thread safe.
} catch (final SessionNotFound e) {
logger.error(e.getMessage(), e);
}
}
private static ApplVerID getApplVerID(final Session session, final Message message) {
final String beginString = session.getSessionID().getBeginString();
if (FixVersions.BEGINSTRING_FIXT11.equals(beginString)) {
return new ApplVerID(ApplVerID.FIX50);
} else {
return MessageUtils.toApplVerID(beginString);
}
}
public static MarketData buildRandomMarketDataItem(final String symbol) {
return new MarketData(symbol, roundDouble(Random.nextDouble() * 100, 2),
Utility.roundDouble(Random.nextDouble() * 100, 2), Random.nextInt(1000), Random.nextInt(1000));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.