text
stringlengths 1
1.05M
|
|---|
#! /bin/bash -e
THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source ${THIS_DIR}/../_common.sh
exec ansible-playbook ${ANSIBLE_OPTS} playbooks/nfd_undeploy.yml
|
<filename>src/tpcc/impl/tx/MVCCTpccTableV2.scala
package ddbt.tpcc.tx
import java.io._
import scala.collection.mutable._
import java.util.Date
import java.sql.Connection
import java.sql.Statement
import java.sql.ResultSet
import ddbt.tpcc.loadtest.Util._
import ddbt.tpcc.loadtest.DatabaseConnector._
import ddbt.lib.concurrent.ConcurrentSHMap
import ddbt.lib.concurrent.ConcurrentSHMap._
import ddbt.lib.concurrent.ConcurrentSHSet
import ddbt.lib.BinaryHeap
import ddbt.tpcc.loadtest.TpccConstants._
import TpccTable._
import MVCCTpccTableV2._
import java.util.concurrent.atomic.AtomicLong
object MVCCTpccTableV2 {
def testSpecialDsUsed = false
class Transaction(val tm: TransactionManager, val startTS: Long, var xactId: Long, var committed:Boolean=false) {
def commitTS = xactId
// var undoBuffer = List[DeltaVersion[_,_]]()
def addPredicate(p:String) = {}
def commit = tm.commit(this)
def rollback = tm.rollback(this)
}
class TransactionManager {
var transactionIdGen = new AtomicLong(1L << 32)
var startAndCommitTimestampGen = new AtomicLong(1L)
// val activeXacts = new SHMap[Long,Transaction]
var recentlyCommittedXacts = List[Transaction]()
def begin = {
val xactId = transactionIdGen.getAndIncrement()
val startTS = startAndCommitTimestampGen.getAndIncrement()
new Transaction(this,startTS, xactId)
}
def commit(implicit xact:Transaction) = {
this.synchronized {
val xactId = xact.xactId
// activeXacts -= xactId
xact.xactId = startAndCommitTimestampGen.getAndIncrement()
recentlyCommittedXacts = xact :: recentlyCommittedXacts
}
}
def rollback(implicit xact:Transaction) = {
this.synchronized {
// activeXacts -= xact.xactId
}
}
/////// TABLES \\\\\\\
val newOrderTbl = new ConcurrentSHMap[(Int,Int,Int),Tuple1[Boolean]](0.9f, 262144, (k:(Int,Int,Int),v:Tuple1[Boolean]) => ((k._2, k._3)) )
val historyTbl = new ConcurrentSHMap[(Int,Int,Int,Int,Int,Date,Float,String),Tuple1[Boolean]]/*(0.9f, 4194304)*/
val warehouseTbl = new ConcurrentSHMap[Int,(String,String,String,String,String,String,Float,Double)]
val itemPartialTbl = new ConcurrentSHMap[Int,(/*Int,*/String,Float,String)]/*(1f, 262144)*/
val orderTbl = new ConcurrentSHMap[(Int,Int,Int),(Int,Date,Option[Int],Int,Boolean)](/*0.9f, 4194304,*/ (k:(Int,Int,Int), v:(Int,Date,Option[Int],Int,Boolean)) => ((k._2, k._3, v._1)) )
val districtTbl = new ConcurrentSHMap[(Int,Int),(String,String,String,String,String,String,Float,Double,Int)]/*(1f, 32)*/
val orderLineTbl = new ConcurrentSHMap[(Int,Int,Int,Int),(Int,Int,Option[Date],Int,Float,String)](/*0.9f, 33554432, List((0.9f, 4194304)),*/ (k:(Int,Int,Int,Int), v:(Int,Int,Option[Date],Int,Float,String)) => ((k._1, k._2, k._3)) )
val customerTbl = new ConcurrentSHMap[(Int,Int,Int),(String,String,String,String,String,String,String,String,String,Date,String,Float,Float,Float,Float,Int,Int,String)] (/*1f, 65536, List((1f, 16384)),*/ (k:(Int,Int,Int), v:(String,String,String,String,String,String,String,String,String,Date,String,Float,Float,Float,Float,Int,Int,String)) => ((k._2, k._3, v._3)) )
val stockTbl = new ConcurrentSHMap[(Int,Int),(Int,String,String,String,String,String,String,String,String,String,String,Int,Int,Int,String)]/*(1f, 262144)*/
val customerWarehouseFinancialInfoMap = new ConcurrentSHMap[(Int,Int,Int),(Float,String,String,Float)]/*(1f, 65536)*/
}
}
/**
* Tables for TPC-C Benchmark (with all operations reflected through its API
* (and not directly via internal tables)
*
* @author <NAME>
*/
class MVCCTpccTableV2 extends TpccTable(7) {
override val newOrderTbl = null
override val historyTbl = null
override val warehouseTbl = null
override val itemPartialTbl = null
override val orderTbl = null
override val districtTbl = null
override val orderLineTbl = null
override val customerTbl = null
override val stockTbl = null
override val customerWarehouseFinancialInfoMap = null
val tm = new TransactionManager
def begin = tm.begin
def commit(implicit xact:Transaction) = tm.commit
def rollback(implicit xact:Transaction) = tm.rollback
override def testSpecialDsUsed = MVCCTpccTableV2.testSpecialDsUsed
def onInsert_NewOrder(no_o_id:Int, no_d_id:Int, no_w_id:Int)(implicit xact:Transaction) = {
tm.newOrderTbl += ((no_o_id, no_d_id, no_w_id), Tuple1(true))
}
def onDelete_NewOrder(no_o_id:Int, no_d_id:Int, no_w_id:Int)(implicit xact:Transaction) = {
tm.newOrderTbl -= ((no_o_id, no_d_id, no_w_id))
}
/*Func*/ def findFirstNewOrder(no_w_id_input:Int, no_d_id_input:Int)(implicit xact:Transaction):Option[Int] = {
xact.addPredicate("P1_findFirstNewOrder")
var first_no_o_id:Option[Int] = None
tm.newOrderTbl.slice(0, (no_d_id_input, no_w_id_input)).foreach { case ((no_o_id,_,_),_) =>
if(no_o_id <= first_no_o_id.getOrElse(Integer.MAX_VALUE)) {
first_no_o_id = Some(no_o_id)
}
}
first_no_o_id
}
def onInsert_HistoryTbl(h_c_id:Int, h_c_d_id:Int, h_c_w_id:Int, h_d_id:Int, h_w_id:Int, h_date:Date, h_amount:Float, h_data:String)(implicit xact:Transaction) = {
tm.historyTbl += ((h_c_id,h_c_d_id,h_c_w_id,h_d_id,h_w_id,roundDate(h_date),h_amount,h_data), Tuple1(true))
}
def onInsert_Item(i_id:Int, i_im_id:Int, i_name:String, i_price:Float, i_data:String)(implicit xact:Transaction) = {
tm.itemPartialTbl += (i_id, (/*i_im_id,*/i_name,i_price,i_data))
}
/*Func*/ def findItem(item_id:Int)(implicit xact:Transaction) = {
xact.addPredicate("P2_findItem")
tm.itemPartialTbl(item_id)
}
def onInsert_Order(o_id:Int, o_d_id:Int, o_w_id:Int, o_c_id:Int, o_entry_d:Date, o_carrier_id:Option[Int], o_ol_cnt:Int, o_all_local:Boolean)(implicit xact:Transaction) = {
tm.orderTbl += ((o_id,o_d_id,o_w_id), (o_c_id,o_entry_d,o_carrier_id,o_ol_cnt,o_all_local))
}
/*Func*/ def findMaxOrder(o_w_id_arg:Int, o_d_id_arg:Int, c_id_arg:Int)(implicit xact:Transaction) = {
xact.addPredicate("P2_findMaxOrder")
var max_o_id = -1
tm.orderTbl.slice(0,(o_d_id_arg,o_w_id_arg, c_id_arg)).foreach { case ((o_id,_,_), (_,_,_,_,_)) =>
if(o_id > max_o_id) {
max_o_id = o_id
}
}
max_o_id
}
/*Func*/ def findOrder(max_o_id:Int, o_w_id_arg:Int, o_d_id_arg:Int)(implicit xact:Transaction) = {
xact.addPredicate("P2_findOrder")
tm.orderTbl((max_o_id,o_d_id_arg,o_w_id_arg))
}
def onUpdate_Order_forDelivery(o_id:Int, o_d_id:Int, o_w_id:Int, o_c_id:Int/*, o_entry_d:Date*/, o_carrier_id:Option[Int]/*, o_ol_cnt:Int, o_all_local:Boolean*/)(implicit xact:Transaction) = {
tm.orderTbl.update((o_id,o_d_id,o_w_id),(currentVal/*:(Int, java.util.Date, Option[Int], Int, Boolean))*/ => ((o_c_id,currentVal._2,o_carrier_id,currentVal._4,currentVal._5))))
}
def onUpdate_Order_byFunc(o_id:Int, o_d_id:Int, o_w_id:Int, updateFunc:((Int, Date, Option[Int], Int, Boolean)) => (Int, Date, Option[Int], Int, Boolean))(implicit xact:Transaction) = {
tm.orderTbl.update((o_id,o_d_id,o_w_id),updateFunc)
}
def onInsert_Warehouse(w_id:Int, w_name:String, w_street_1:String, w_street_2:String, w_city:String, w_state:String, w_zip:String, w_tax:Float, w_ytd:Double)(implicit xact:Transaction) = {
tm.warehouseTbl += (w_id, (w_name,w_street_1,w_street_2,w_city,w_state,w_zip,w_tax,w_ytd))
}
def onUpdate_Warehouse(w_id:Int, w_name:String, w_street_1:String, w_street_2:String, w_city:String, w_state:String, w_zip:String, w_tax:Float, w_ytd:Double)(implicit xact:Transaction) = {
tm.warehouseTbl(w_id) = ((w_name,w_street_1,w_street_2,w_city,w_state,w_zip,w_tax,w_ytd))
}
def onUpdate_Warehouse_byFunc(w_id:Int, updateFunc:((String, String, String, String, String, String, Float, Double)) => (String, String, String, String, String, String, Float, Double))(implicit xact:Transaction) = {
tm.warehouseTbl.update(w_id,updateFunc)
}
def onInsert_District(d_id:Int, d_w_id:Int, d_name:String, d_street1:String, d_street2:String, d_city:String, d_state:String, d_zip:String, d_tax:Float, d_ytd:Double, d_next_o_id:Int)(implicit xact:Transaction) = {
tm.districtTbl += ((d_id,d_w_id), (d_name,d_street1,d_street2,d_city,d_state,d_zip,d_tax,d_ytd,d_next_o_id))
}
def onUpdate_District(d_id:Int, d_w_id:Int, d_name:String, d_street1:String, d_street2:String, d_city:String, d_state:String, d_zip:String, d_tax:Float, d_ytd:Double, d_next_o_id:Int)(implicit xact:Transaction) = {
tm.districtTbl += ((d_id,d_w_id), (d_name,d_street1,d_street2,d_city,d_state,d_zip,d_tax,d_ytd,d_next_o_id))
}
def onUpdate_District_forNewOrder(d_id:Int, d_w_id:Int/*, d_name:String, d_street1:String, d_street2:String, d_city:String, d_state:String, d_zip:String*/, d_tax:Float/*, d_ytd:Float*/, d_next_o_id:Int)(implicit xact:Transaction) = {
val (d_name,d_street1,d_street2,d_city,d_state,d_zip,_,d_ytd,_) = tm.districtTbl(d_id,d_w_id)
tm.districtTbl((d_id,d_w_id)) = ((d_name,d_street1,d_street2,d_city,d_state,d_zip,d_tax,d_ytd,d_next_o_id))
}
def onUpdate_District_byFunc(d_id:Int, d_w_id:Int, updateFunc:((String, String, String, String, String, String, Float, Double, Int)) => (String, String, String, String, String, String, Float, Double, Int))(implicit xact:Transaction) = {
tm.districtTbl.update((d_id,d_w_id), updateFunc)
}
/*Func*/ def findDistrict(w_id:Int, d_id:Int)(implicit xact:Transaction) = {
tm.districtTbl((d_id,w_id))
}
def onInsertOrderLine(ol_o_id:Int, ol_d_id:Int, ol_w_id:Int, ol_number:Int, ol_i_id:Int, ol_supply_w_id:Int, ol_delivery_d:Option[Date], ol_quantity:Int, ol_amount:Float, ol_dist_info:String)(implicit xact:Transaction): Unit = {
tm.orderLineTbl += ((ol_o_id, ol_d_id, ol_w_id, ol_number), (ol_i_id, ol_supply_w_id, ol_delivery_d, ol_quantity, ol_amount, ol_dist_info))
}
def onUpdateOrderLine(ol_o_id:Int, ol_d_id:Int, ol_w_id:Int, ol_number:Int, ol_i_id:Int, ol_supply_w_id:Int, ol_delivery_d:Option[Date], ol_quantity:Int, ol_amount:Float, ol_dist_info:String)(implicit xact:Transaction): Unit = {
tm.orderLineTbl((ol_o_id, ol_d_id, ol_w_id, ol_number)) = ((ol_i_id, ol_supply_w_id, ol_delivery_d, ol_quantity, ol_amount, ol_dist_info))
}
/*Func*/ def orderLineTblSlice[P](part:Int, partKey:P, f: (((Int,Int,Int,Int),(Int,Int,Option[Date],Int,Float,String))) => Unit)(implicit xact:Transaction) = {
tm.orderLineTbl.slice(0, partKey).foreach(f)
}
/*Func*/ def orderLineTblSliceEntry[P](part:Int, partKey:P, f: java.util.Map.Entry[SEntry[(Int,Int,Int,Int),(Int,Int,Option[Date],Int,Float,String)], Boolean] => Unit)(implicit xact:Transaction) = {
tm.orderLineTbl.slice(0, partKey).foreachEntry(f)
}
def onInsertCustomer(c_id: Int, c_d_id: Int, c_w_id: Int, c_first:String, c_middle:String, c_last:String, c_street_1:String, c_street_2:String, c_city:String, c_state:String, c_zip:String, c_phone:String, c_since:Date, c_credit:String, c_credit_lim:Float, c_discount:Float, c_balance:Float, c_ytd_payment:Float, c_payment_cnt:Int, c_delivery_cnt:Int, c_data:String)(implicit xact:Transaction) = {
tm.customerTbl += ((c_id,c_d_id,c_w_id), (c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data))
var w_tax = 0f
w_tax = tm.warehouseTbl(c_w_id)._7
tm.customerWarehouseFinancialInfoMap += ((c_id,c_d_id,c_w_id), (c_discount, c_last, c_credit, w_tax))
}
/*Func*/ def findCustomerWarehouseFinancialInfo(w_id:Int, d_id:Int, c_id:Int)(implicit xact:Transaction) = {
tm.customerWarehouseFinancialInfoMap(c_id,d_id,w_id)
}
def onUpdateCustomer(c_id: Int, c_d_id: Int, c_w_id: Int, c_first:String, c_middle:String, c_last:String, c_street_1:String, c_street_2:String, c_city:String, c_state:String, c_zip:String, c_phone:String, c_since:Date, c_credit:String, c_credit_lim:Float, c_discount:Float, c_balance:Float, c_ytd_payment:Float, c_payment_cnt:Int, c_delivery_cnt:Int, c_data:String)(implicit xact:Transaction) = {
tm.customerTbl((c_id,c_d_id,c_w_id)) = ((c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data))
}
def onUpdateCustomer_byFunc(c_id: Int, c_d_id: Int, c_w_id: Int, updateFunc:((String, String, String, String, String, String, String, String, String, Date, String, Float, Float, Float, Float, Int, Int, String)) => (String, String, String, String, String, String, String, String, String, Date, String, Float, Float, Float, Float, Int, Int, String))(implicit xact:Transaction) = {
tm.customerTbl.update((c_id,c_d_id,c_w_id),updateFunc)
}
def onUpdateCustomer_byEntry(c: SEntry[(Int,Int,Int),(String,String,String,String,String,String,String,String,String,Date,String,Float,Float,Float,Float,Int,Int,String)], c_first:String, c_middle:String, c_last:String, c_street_1:String, c_street_2:String, c_city:String, c_state:String, c_zip:String, c_phone:String, c_since:Date, c_credit:String, c_credit_lim:Float, c_discount:Float, c_balance:Float, c_ytd_payment:Float, c_payment_cnt:Int, c_delivery_cnt:Int, c_data:String)(implicit xact:Transaction) = {
//TODO: FIX IT
// c.setValue((c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment/*+h_amount*/,c_payment_cnt/*+1*/,c_delivery_cnt,c_data))
tm.customerTbl(c.getKey) = (c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment/*+h_amount*/,c_payment_cnt/*+1*/,c_delivery_cnt,c_data)
}
def onInsertStock(s_i_id:Int, s_w_id:Int, s_quantity:Int, s_dist_01:String, s_dist_02:String, s_dist_03:String, s_dist_04:String, s_dist_05:String, s_dist_06:String, s_dist_07:String, s_dist_08:String, s_dist_09:String, s_dist_10:String, s_ytd:Int, s_order_cnt:Int, s_remote_cnt:Int, s_data:String)(implicit xact:Transaction) = {
tm.stockTbl += ((s_i_id,s_w_id), (s_quantity, s_dist_01,s_dist_02,s_dist_03,s_dist_04,s_dist_05,s_dist_06,s_dist_07,s_dist_08,s_dist_09,s_dist_10,s_ytd,s_order_cnt,s_remote_cnt,s_data))
}
def onUpdateStock(s_i_id:Int, s_w_id:Int, s_quantity:Int, s_dist_01:String, s_dist_02:String, s_dist_03:String, s_dist_04:String, s_dist_05:String, s_dist_06:String, s_dist_07:String, s_dist_08:String, s_dist_09:String, s_dist_10:String, s_ytd:Int, s_order_cnt:Int, s_remote_cnt:Int, s_data:String)(implicit xact:Transaction) = {
tm.stockTbl((s_i_id,s_w_id)) = ((s_quantity, s_dist_01,s_dist_02,s_dist_03,s_dist_04,s_dist_05,s_dist_06,s_dist_07,s_dist_08,s_dist_09,s_dist_10,s_ytd,s_order_cnt,s_remote_cnt,s_data))
}
def onUpdateStock_byFunc(s_i_id:Int, s_w_id:Int, updateFunc:((Int, String, String, String, String, String, String, String, String, String, String, Int, Int, Int, String)) => (Int, String, String, String, String, String, String, String, String, String, String, Int, Int, Int, String))(implicit xact:Transaction) = {
tm.stockTbl.update((s_i_id,s_w_id), updateFunc)
}
/*Func*/ def findStock(item_id:Int, w_id:Int)(implicit xact:Transaction) = {
tm.stockTbl(item_id,w_id)
}
private class MiniCustomer(val cust_id:Int, val cust_first:String) extends Ordered[MiniCustomer] {
def compare(that: MiniCustomer) = this.cust_first.compareToIgnoreCase(that.cust_first)
override def toString = "MiniCustomer(%s,%s)".format(cust_id, cust_first)
}
def findCustomerEntryByName(input_c_w_id: Int, input_c_d_id: Int, input_c_last: String)(implicit xact:Transaction) = {
var customers = new ArrayBuffer[MiniCustomer]
//we should slice over input_c_last
tm.customerTbl.slice(0, (input_c_d_id, input_c_w_id, input_c_last)).foreach { case ((c_id,_,_) , (c_first,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_)) =>
customers += new MiniCustomer(c_id,c_first)
}
if (customers.size == 0) {
throw new RuntimeException("The customer C_LAST=" + input_c_last + " C_D_ID=" + input_c_d_id + " C_W_ID=" + input_c_w_id + " not found!")
}
// println("**********************************")
// println("Customers before:",customers)
customers = customers.sorted
// println("Customers after:",customers)
// println("**********************************")
var index: Int = customers.size / 2
if (customers.size % 2 == 0) {
index -= 1
}
val c_id = customers(index).cust_id
tm.customerTbl.getEntry((c_id,input_c_d_id,input_c_w_id))
}
def findCustomerEntryById(input_c_w_id: Int, input_c_d_id: Int, c_id: Int)(implicit xact:Transaction) = {
tm.customerTbl.getEntry((c_id,input_c_d_id,input_c_w_id))
}
def findCustomerByName(input_c_w_id: Int, input_c_d_id: Int, input_c_last: String)(implicit xact:Transaction) = {
var customers = new ArrayBuffer[MiniCustomer]
//we should slice over input_c_last
tm.customerTbl.slice(0, (input_c_d_id, input_c_w_id, input_c_last)).foreach { case ((c_id,_,_) , (c_first,_,c_last,_,_,_,_,_,_,_,_,_,_,_,_,_,_,_)) =>
customers += new MiniCustomer(c_id,c_first)
}
if (customers.size == 0) {
throw new RuntimeException("The customer C_LAST=" + input_c_last + " C_D_ID=" + input_c_d_id + " C_W_ID=" + input_c_w_id + " not found!")
}
// println("**********************************")
// println("Customers before:",customers)
customers = customers.sorted
// println("Customers after:",customers)
// println("**********************************")
var index: Int = customers.size / 2
if (customers.size % 2 == 0) {
index -= 1
}
val c_id = customers(index).cust_id
val (c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data) = tm.customerTbl((c_id,input_c_d_id,input_c_w_id))
(c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data,c_id)
}
def findCustomerById(input_c_w_id: Int, input_c_d_id: Int, c_id: Int)(implicit xact:Transaction) = {
val (c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data) = tm.customerTbl((c_id,input_c_d_id,input_c_w_id))
(c_first,c_middle,c_last,c_street_1,c_street_2,c_city,c_state,c_zip,c_phone,c_since,c_credit,c_credit_lim,c_discount,c_balance,c_ytd_payment,c_payment_cnt,c_delivery_cnt,c_data,c_id)
}
override def getAllMapsInfoStr:String = ""
override def toTpccTable = {
val res = new TpccTable(7)
implicit val xact = this.begin
val THE_VALUE_DOES_NOT_EXIST = -1 //TODO: should be FIXED
tm.newOrderTbl.foreach { case (k,v) => res.onInsert_NewOrder(k._1,k._2,k._3) }
tm.historyTbl.foreach { case (k,v) => res.onInsert_HistoryTbl(k._1,k._2,k._3,k._4,k._5,k._6,k._7,k._8) }
tm.warehouseTbl.foreach { case (k,v) => res.onInsert_Warehouse(k,v._1,v._2,v._3,v._4,v._5,v._6,v._7,v._8) }
tm.itemPartialTbl.foreach { case (k,v) => res.onInsert_Item(k,THE_VALUE_DOES_NOT_EXIST,v._1,v._2,v._3) }
tm.customerTbl.foreach { case (k,v) => res.onInsertCustomer(k._1,k._2,k._3,v._1,v._2,v._3,v._4,v._5,v._6,v._7,v._8,v._9,v._10,v._11,v._12,v._13,v._14,v._15,v._16,v._17,v._18) }
tm.orderTbl.foreach { case (k,v) => res.onInsert_Order(k._1,k._2,k._3,v._1,v._2,v._3,v._4,v._5) }
tm.districtTbl.foreach { case (k,v) => res.onInsert_District(k._1,k._2,v._1,v._2,v._3,v._4,v._5,v._6,v._7,v._8,v._9) }
tm.orderLineTbl.foreach { case (k,v) => res.onInsertOrderLine(k._1,k._2,k._3,k._4,v._1,v._2,v._3,v._4,v._5,v._6) }
tm.stockTbl.foreach { case (k,v) => res.onInsertStock(k._1,k._2,v._1,v._2,v._3,v._4,v._5,v._6,v._7,v._8,v._9,v._10,v._11,v._12,v._13,v._14,v._15) }
this.commit
res
}
}
|
<filename>spring/Web/EjemploMicroServicio/src/main/java/com/curso/ejemplomicroservicio/modelo/Persona.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.curso.ejemplomicroservicio.modelo;
/**
*
* @author usuario
*/
public class Persona {
private String nombre;
private Integer telefono;
public Persona() {
this("Nombre por defecto",123);
}
public Persona(String nombre, Integer telefono) {
this.nombre = nombre;
this.telefono = telefono;
}
/**
* Get the value of telefono
*
* @return the value of telefono
*/
public Integer getTelefono() {
return telefono;
}
/**
* Set the value of telefono
*
* @param telefono new value of telefono
*/
public void setTelefono(Integer telefono) {
this.telefono = telefono;
}
/**
* Get the value of nombre
*
* @return the value of nombre
*/
public String getNombre() {
return nombre;
}
/**
* Set the value of nombre
*
* @param nombre new value of nombre
*/
public void setNombre(String nombre) {
this.nombre = nombre;
}
}
|
#!/bin/bash
set -eu
cur=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
source $cur/../_utils/test_prepare
WORK_DIR=$TEST_DIR/$TEST_NAME
function run() {
# 1. test sync fetch binlog met error and reset binlog streamer with remote binlog
# with a 5 rows insert txn: 1 * FormatDesc + 1 * PreviousGTID + 1 * GTID + 1 * BEGIN + 5 * (Table_map + Write_rows) + 1 * XID
# here we fail at the third write rows event, sync should retry and auto recover without any duplicate event
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/syncer/GetEventErrorInTxn=13*return(3)"
run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
check_contains 'Query OK, 2 rows affected'
run_dm_master $WORK_DIR/master $MASTER_PORT $cur/conf/dm-master.toml
check_rpc_alive $cur/../bin/check_master_online 127.0.0.1:$MASTER_PORT
check_metric $MASTER_PORT 'start_leader_counter' 3 0 2
run_dm_worker $WORK_DIR/worker1 $WORKER1_PORT $cur/conf/dm-worker1.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER1_PORT
dmctl_operate_source create $cur/conf/source1.yaml $SOURCE_ID1
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/dm-task.yaml --remove-meta"
check_metric $WORKER1_PORT "dm_worker_task_state{source_id=\"mysql-replica-01\",task=\"test\",worker=\"worker1\"}" 10 1 3
# wait safe-mode pass
check_log_contain_with_retry "disable safe-mode after task initialization finished" $WORK_DIR/worker1/log/dm-worker.log
run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST1 $MYSQL_PORT1 $MYSQL_PASSWORD1
check_log_contain_with_retry "reset replication binlog puller" $WORK_DIR/worker1/log/dm-worker.log
check_log_contain_with_retry "discard event already consumed" $WORK_DIR/worker1/log/dm-worker.log
check_sync_diff $WORK_DIR $cur/conf/diff_config.toml
# 2. test relay log retry relay with GTID
# with a 5 rows insert txn: 1 * FormatDesc + 1 * PreviousGTID + 1 * GTID + 1 * BEGIN + 5 * (Table_map + Write_rows) + 1 * XID
# here we fail at the third write rows event, sync should retry and auto recover without any duplicate event
export GO_FAILPOINTS="github.com/pingcap/tiflow/dm/relay/RelayGetEventFailed=15*return(3);github.com/pingcap/tiflow/dm/relay/RelayAllowRetry=return"
run_dm_worker $WORK_DIR/worker2 $WORKER2_PORT $cur/conf/dm-worker2.toml
check_rpc_alive $cur/../bin/check_worker_online 127.0.0.1:$WORKER2_PORT
cp $cur/conf/source2.yaml $WORK_DIR/source2.yaml
dmctl_operate_source create $WORK_DIR/source2.yaml $SOURCE_ID2
run_sql_file $cur/data/db1.prepare.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
check_contains 'Query OK, 2 rows affected'
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/dm-task-relay.yaml --remove-meta"
check_metric $WORKER2_PORT "dm_worker_task_state{source_id=\"mysql-replica-02\",task=\"test_relay\",worker=\"worker2\"}" 10 1 3
check_sync_diff $WORK_DIR $cur/conf/diff_relay_config.toml
run_sql_source2 "flush logs;"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-relay -s $SOURCE_ID2 worker2" \
"\"result\": true" 2
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status -s $SOURCE_ID2" \
"\"relayCatchUpMaster\": true" 1
run_sql_file $cur/data/db1.increment.sql $MYSQL_HOST2 $MYSQL_PORT2 $MYSQL_PASSWORD2
check_sync_diff $WORK_DIR $cur/conf/diff_relay_config.toml
# check relay log binlog file size is the same as master size
run_sql_source2 "show master status;"
binlog_file=$(grep "File" $TEST_DIR/sql_res.$TEST_NAME.txt | awk -F: '{print $2}' | xargs)
binlog_pos=$(grep "Position" $TEST_DIR/sql_res.$TEST_NAME.txt | awk -F: '{print $2}' | xargs)
server_uuid=$(tail -n 1 $WORK_DIR/worker2/relay-dir/server-uuid.index)
relay_log_size=$(ls -al $WORK_DIR/worker2/relay-dir/$server_uuid/$binlog_file | awk '{print $5}')
[ "$binlog_pos" -eq "$relay_log_size" ]
}
# also cleanup dm processes in case of last run failed
cleanup_process $*
cleanup_data dup_event1 dup_event_relay
run
cleanup_process $*
echo "[$(date)] <<<<<< test case $TEST_NAME success! >>>>>>"
|
# Load the data
exam_data <- read.csv("exam_data.csv")
# Define a function to convert marks to letter grades
get_letter_grade <- function(mark){
if(mark >= 80) {
grade <- 'A'
} else if (mark >= 70) {
grade <- 'B'
} else if (mark >= 60) {
grade <- 'C'
} else if (mark >= 50) {
grade <- 'D'
} else {
grade <- 'F'
}
return (grade)
}
exam_data$grade <- sapply(exam_data$mark, get_letter_grade)
# Output the result
exam_data
|
<reponame>ProjectRailgun/Altair
import {fromEvent as observableFromEvent, Subscription} from 'rxjs';
import {debounceTime, distinctUntilChanged, filter, map, mergeMap, takeWhile, tap} from 'rxjs/operators';
import {AfterViewInit, Component, ElementRef, ViewChild} from '@angular/core';
import {Bangumi} from '../../entity';
import {AdminService} from '../admin.service';
import {UIDialogRef, UIToast, UIToastComponent, UIToastRef} from 'altair-ui';
import {BaseError} from '../../../helpers/error/BaseError';
import {BangumiRaw} from '../../entity/bangumi-raw';
// export const SEARCH_BAR_HEIGHT = 4.8;
@Component({
selector: 'search-bangumi',
templateUrl: './search-bangumi.html',
styleUrls: ['./search-bangumi.less']
})
export class SearchBangumi implements AfterViewInit {
private _subscription = new Subscription();
private _toastRef: UIToastRef<UIToastComponent>;
@ViewChild('searchBox', {static: false}) searchBox: ElementRef;
@ViewChild('typePicker', {static: false}) typePicker: ElementRef;
name: string;
bangumiType: number = 1001;
currentPage: number = 1;
total: number = 0;
count: number = 10;
bangumiList: Bangumi[];
isLoading: boolean = false;
typePickerOpen: boolean = false;
selectedBgmId: number;
showDetail: boolean = false;
isSaving: boolean = false;
constructor(private _adminService: AdminService,
private _dialogRef: UIDialogRef<SearchBangumi>,
toastService: UIToast) {
this._toastRef = toastService.makeText();
}
ngAfterViewInit(): void {
let searchBox = <HTMLElement>this.searchBox.nativeElement;
let typePicker = <HTMLElement>this.typePicker.nativeElement;
this._subscription.add(
observableFromEvent(typePicker, 'click').pipe(
filter(() => !this.typePickerOpen),
tap((event: MouseEvent) => {
event.preventDefault();
event.stopPropagation();
this.typePickerOpen = true;
}),
mergeMap(() => {
return observableFromEvent(document.body, 'click').pipe(
tap((event: MouseEvent) => {
event.preventDefault();
event.stopPropagation();
}),
takeWhile(() => this.typePickerOpen),)
}),)
.subscribe(
() => {
this.typePickerOpen = false;
}
)
);
this._subscription.add(
observableFromEvent(searchBox, 'keyup').pipe(
debounceTime(500),
map(() => (searchBox as HTMLInputElement).value),
distinctUntilChanged(),
filter(name => !!name),)
.subscribe(
(name: string) => {
this.currentPage = 1;
this.name = name;
this.fetchData();
}
)
);
// setTimeout(() => {
// let cardHeight = getRemPixel(CARD_HEIGHT_REM);
// let viewportHeight = Math.max(document.documentElement.clientHeight, window.innerHeight || 0);
// let scaleFactor = viewportHeight < 600 ? 1 : 0.8;
// let uiPaginationHeight = getRemPixel(1/* font-size */ + 0.92857143/* padding */ * 2 + 2 /* margin-top */);
// this.bangumiListHeight = Math.floor(viewportHeight * scaleFactor) - getRemPixel(SEARCH_BAR_HEIGHT) - uiPaginationHeight;
// this.count = Math.max(1, Math.floor((this.bangumiListHeight - uiPaginationHeight) / cardHeight));
// console.log(this.count);
// });
}
onPageChanged(page: number) {
this.currentPage = page;
this.fetchData();
}
onTypeChanged(type: number) {
this.bangumiType = type;
this.fetchData();
}
fetchData() {
if (!this.name) {
return;
}
let offset = (this.currentPage - 1) * this.count;
this.isLoading = true;
this._adminService.searchBangumi({
name: this.name,
type: 2, // type: this.bangumiType, // Force to search subject type=2 (anime).
offset: offset,
count: this.count
})
.subscribe(
(result: { data: Bangumi[], total: number }) => {
this.bangumiList = result.data;
this.total = result.total;
this.isLoading = false;
},
(error: BaseError) => {
this.bangumiList = [];
this._toastRef.show(error.message);
this.isLoading = false;
}
);
}
cancelSearch() {
this._dialogRef.close('cancelled');
}
viewDetail(bangumi: Bangumi): void {
if (bangumi.id) {
return;
}
this.selectedBgmId = bangumi.bgm_id;
this.showDetail = true;
}
fromDetail(bangumi: BangumiRaw): void {
if (bangumi) {
this.isSaving = true;
bangumi.type = this.bangumiType; // Use user-defined bangumiType to override that from API.
this._subscription.add(
this._adminService.addBangumi(bangumi)
.subscribe(
(bangumi_id: string) => {
this._dialogRef.close(bangumi_id);
},
(error: BaseError) => {
this.isSaving = false;
this._toastRef.show(error.message);
}
)
);
} else {
this.showDetail = false;
}
}
}
|
package is.tagomor.woothee;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import is.tagomor.woothee.DataSet;
public final class Classifier {
public static String VERSION = "1.10.1";
public static Map<String,String> parse(final String useragent) {
return fillResult(execParse(useragent));
}
public static boolean isCrawler(final String useragent) {
if (useragent == null || useragent.length() < 1 || useragent.equals("-"))
return false;
if (tryCrawler(useragent, new HashMap<String,String>(6, (float)1.0)))
return true;
return false;
}
public static Map<String,String> execParse(final String useragent) {
HashMap<String,String> result = new HashMap<String,String>(6, (float)1.0); // initial capacity, load factor
if (useragent == null || useragent.length() < 1 || useragent.equals("-"))
return result;
if (tryCrawler(useragent, result)) {
return result;
}
if (tryBrowser(useragent, result)) {
if (tryOS(useragent, result))
return result;
else
return result;
}
if (tryMobilePhone(useragent, result)) {
return result;
}
if (tryAppliance(useragent, result)) {
return result;
}
if (tryMisc(useragent, result)) {
return result;
}
// browser unknown. check os only
if (tryOS(useragent, result))
return result;
if (tryRareCases(useragent, result))
return result;
return result;
}
public static boolean tryCrawler(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.crawler.Google.challenge(useragent, result)) {
return true;
}
if (is.tagomor.woothee.crawler.Crawlers.challenge(useragent, result)) {
return true;
}
return false;
}
public static boolean tryBrowser(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.browser.MSIE.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.Vivaldi.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.YandexBrowser.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.SafariChrome.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.Firefox.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.Opera.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.Webview.challenge(useragent, result))
return true;
return false;
}
public static boolean tryOS(final String useragent, final Map<String,String> result) {
// Windows PC, and Windows Phone OS
if (is.tagomor.woothee.os.Windows.challenge(useragent, result))
return true;
// Mac OS X PC, and iOS devices(strict check)
if (is.tagomor.woothee.os.OSX.challenge(useragent, result))
return true;
// Linux PC, and Android
if (is.tagomor.woothee.os.Linux.challenge(useragent, result))
return true;
// all useragents matches /(iPhone|iPad|iPod|Andorid|BlackBerry)/
if (is.tagomor.woothee.os.SmartPhone.challenge(useragent, result))
return true;
// mobile phones like KDDI-* ...
if (is.tagomor.woothee.os.MobilePhone.challenge(useragent, result))
return true;
// Nintendo DSi/Wii with Opera
if (is.tagomor.woothee.os.Appliance.challenge(useragent, result))
return true;
// Win98,BSD
if (is.tagomor.woothee.os.MiscOS.challenge(useragent, result))
return true;
return false;
}
public static boolean tryMobilePhone(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.mobilephone.Docomo.challenge(useragent, result))
return true;
if (is.tagomor.woothee.mobilephone.Au.challenge(useragent, result))
return true;
if (is.tagomor.woothee.mobilephone.Softbank.challenge(useragent, result))
return true;
if (is.tagomor.woothee.mobilephone.Willcom.challenge(useragent, result))
return true;
if (is.tagomor.woothee.mobilephone.MiscPhones.challenge(useragent, result))
return true;
return false;
}
public static boolean tryAppliance(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.appliance.Playstation.challenge(useragent, result))
return true;
if (is.tagomor.woothee.appliance.Nintendo.challenge(useragent, result))
return true;
if (is.tagomor.woothee.appliance.DigitalTV.challenge(useragent, result))
return true;
return false;
}
public static boolean tryMisc(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.misc.DesktopTools.challenge(useragent, result))
return true;
return false;
}
public static boolean tryRareCases(final String useragent, final Map<String,String> result) {
if (is.tagomor.woothee.misc.SmartPhonePatterns.challenge(useragent, result))
return true;
if (is.tagomor.woothee.browser.Sleipnir.challenge(useragent, result))
return true;
if (is.tagomor.woothee.misc.HTTPLibrary.challenge(useragent, result))
return true;
if (is.tagomor.woothee.misc.MayBeRSSReader.challenge(useragent, result))
return true;
if (is.tagomor.woothee.crawler.MayBeCrawler.challenge(useragent, result))
return true;
return false;
}
public static Map<String,String> fillResult(final Map<String,String> result) {
if (result.get(DataSet.ATTRIBUTE_NAME) == null)
result.put(DataSet.ATTRIBUTE_NAME, DataSet.VALUE_UNKNOWN);
if (result.get(DataSet.ATTRIBUTE_CATEGORY) == null)
result.put(DataSet.ATTRIBUTE_CATEGORY, DataSet.VALUE_UNKNOWN);
if (result.get(DataSet.ATTRIBUTE_OS) == null)
result.put(DataSet.ATTRIBUTE_OS, DataSet.VALUE_UNKNOWN);
if (result.get(DataSet.ATTRIBUTE_VERSION) == null)
result.put(DataSet.ATTRIBUTE_VERSION, DataSet.VALUE_UNKNOWN);
if (result.get(DataSet.ATTRIBUTE_VENDOR) == null)
result.put(DataSet.ATTRIBUTE_VENDOR, DataSet.VALUE_UNKNOWN);
return result;
}
}
|
/*
* BDSup2Sub++ (C) 2012 <NAME>.
* Based on code from BDSup2Sub by Copyright 2009 <NAME> (0xdeadbeef)
* and Copyright 2012 <NAME> (mjuhasz)
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "palette.h"
#include <QColor>
Palette::Palette()
{
}
Palette::Palette(const Palette &other) :
paletteSize(other.paletteSize),
useBT601(other.useBT601),
colors(other.colors),
y(other.y),
cb(other.cb),
cr(other.cr)
{
}
Palette::Palette(const Palette *other) :
paletteSize(other->paletteSize),
useBT601(other->useBT601),
colors(other->colors),
y(other->y),
cb(other->cb),
cr(other->cr)
{
}
Palette::Palette(int paletteSize, bool use601) :
paletteSize(paletteSize),
useBT601(use601),
colors(paletteSize, 0)
{
QVector<int> yCbCr;
for (int i = 0; i < paletteSize; ++i)
{
yCbCr = Palette::RGB2YCbCr(qRgb(0, 0, 0), useBT601);
y.push_back(yCbCr[0]);
cb.push_back(yCbCr[1]);
cr.push_back(yCbCr[2]);
}
}
Palette::Palette(QVector<uchar> inRed, QVector<uchar> inGreen, QVector<uchar> inBlue, QVector<uchar> inAlpha, bool use601) :
useBT601(use601)
{
for (int i = 0; i < inRed.size(); ++i)
{
colors.push_back(qRgba(inRed.at(i), inGreen.at(i), inBlue.at(i), inAlpha.at(i)));
}
QVector<int> yCbCr;
for (int i = 0; i < colors.size(); ++i)
{
yCbCr = RGB2YCbCr(colors.at(i), useBT601);
y.push_back(yCbCr[0]);
cb.push_back(yCbCr[1]);
cr.push_back(yCbCr[2]);
}
paletteSize = colors.size();
}
Palette::~Palette()
{
colors.clear();
y.clear();
cb.clear();
cr.clear();
}
void Palette::setAlpha(int index, int alpha)
{
colors.replace(index, qRgba(qRed(colors.at(index)), qGreen(colors.at(index)), qBlue(colors.at(index)), alpha));
}
void Palette::setRGB(int index, QRgb rgb)
{
colors.replace(index, qRgba(qRed(rgb), qGreen(rgb), qBlue(rgb), qAlpha(colors.at(index))));
QVector<int> yCbCr = RGB2YCbCr(rgb, useBT601);
y.replace(index, yCbCr[0]);
cb.replace(index, yCbCr[1]);
cr.replace(index, yCbCr[2]);
}
QVector<int> Palette::RGB2YCbCr(QRgb rgb, bool use601)
{
QVector<int> yCbCr;
double y, cb, cr;
int r = qRed(rgb);
int g = qGreen(rgb);
int b = qBlue(rgb);
if (use601)
{
/* BT.601 for RGB 0..255 (PC) -> YCbCr 16..235 */
y = (((r * 0.299) * 219) / 255) + (((g * 0.587) * 219) / 255) + (((b * 0.114) * 219) / 255);
cb = (((-r * 0.168736) * 224) / 255) - (((g * 0.331264) * 224) / 255) + (((b * 0.5) * 224) / 255);
cr = (((r * 0.5) * 224) / 255) - (((g * 0.418688) * 224) / 255) - (((b * 0.081312) * 224) / 255);
}
else
{
/* BT.709 for RGB 0..255 (PC) -> YCbCr 16..235 */
y = (((r * 0.2126) * 219) / 255) + (((g * 0.7152) * 219) / 255) + (((b * 0.0722) * 219) / 255);
cb = ((((-r * 0.2126) / 1.8556) * 224) / 255) - ((((g * 0.7152) / 1.8556) * 224) / 255) + (((b * 0.5) * 224) / 255);
cr = (((r * 0.5) * 224) / 255) - ((((g * 0.7152) / 1.5748) * 224) / 255) - ((((b * 0.0722) / 1.5748) * 224) / 255);
}
yCbCr.push_back(16 + (int)(y + .5));
yCbCr.push_back(128 + (int)(cb + .5));
yCbCr.push_back(128 + (int)(cr + .5));
for (int i = 0; i < yCbCr.size(); ++i)
{
if (yCbCr[i] < 16)
{
yCbCr.replace(i, 16);
}
else
{
if (i == 0)
{
if (yCbCr[i] > 235)
{
yCbCr.replace(i, 235);
}
}
else
{
if (yCbCr[i] > 240)
{
yCbCr.replace(i, 240);
}
}
}
}
return yCbCr;
}
QRgb Palette::YCbCr2RGB(int y, int cb, int cr, bool useBT601)
{
double y1, r, g, b;
y -= 16;
cb -= 128;
cr -= 128;
y1 = y * 1.164383562;
if (useBT601)
{
/* BT.601 for YCbCr 16..235 -> RGB 0..255 (PC) */
r = y1 + (cr * 1.596026317);
g = y1 - (cr * 0.8129674985) - (cb * 0.3917615979);
b = y1 + (cb * 2.017232218);
}
else
{
/* BT.709 for YCbCr 16..235 -> RGB 0..255 (PC) */
r = y1 + (cr * 1.792741071);
g = y1 - (cr * 0.5329093286) - (cb * 0.2132486143);
b = y1 + (cb * 2.112401786);
}
r = (int)(r + 0.5);
r = std::max((int) r, 0);
r = std::min((int) r, 255);
g = (int)(g + 0.5);
g = std::max((int) g, 0);
g = std::min((int) g, 255);
b = (int)(b + 0.5);
b = std::max((int) b, 0);
b = std::min((int) b, 255);
return qRgb(r, g, b);
}
void Palette::setARGB(int index, QRgb inColor)
{
setRGB(index, inColor);
setAlpha(index, qAlpha(inColor));
}
int Palette::transparentIndex()
{
int transparentIndex = 0;
int minimumAlpha = 0x100;
for (int i = 0; i < paletteSize; ++i)
{
if (qAlpha(colors.at(i)) < minimumAlpha)
{
minimumAlpha = qAlpha(colors.at(i));
transparentIndex = i;
if (minimumAlpha == 0)
{
break;
}
}
}
return transparentIndex;
}
void Palette::setYCbCr(int index, int yn, int cbn, int crn)
{
y.replace(index, (uchar)yn);
cb.replace(index, (uchar)cbn);
cr.replace(index, (uchar)crn);
QRgb rgb = YCbCr2RGB(yn, cbn, crn, useBT601);
colors.replace(index, qRgba(qRed(rgb), qGreen(rgb), qBlue(rgb), qAlpha(colors.at(index))));
}
QVector<int> Palette::YCbCr(int index)
{
QVector<int> yCbCr;
yCbCr.push_back(y[index] & 0xff);
yCbCr.push_back(cb[index] & 0xff);
yCbCr.push_back(cr[index] & 0xff);
return yCbCr;
}
|
var EnemyDog = Class.create(Enemy, {
initialize: function(manager, data) {
Enemy.call(this, manager, data, 64, 32, "rect");
this.sprite = new Sprite(this.width, this.height);
this.sprite.image = game.assets["res/enemy02.png"];
this.x = data.x;
this.y = data.y;
this.addChild(this.sprite);
},
oncollide: function(collider) {
var e = this.parentNode;
if(collider.name == "Bullet" && collider.parentNode.gun.player.isMine && e.status != EnemyStatus.DEAD) {
this.parentNode.status = EnemyStatus.DEAD;
collider.willdelete = true;
collider.parentNode.remove();
}
},
decidestatus: function() {
if(this.status == EnemyStatus.DEAD)
return EnemyStatus.DEAD;
if(Math.abs(this.centerX - this.player.centerX) < this.width ||
(this.status == EnemyStatus.ATTACKING && this.statusElapsed < 1000))
return EnemyStatus.ATTACKING;
else
return EnemyStatus.APPROACHING;
},
onapproaching: function() {
var moved = {x:0, y:0};
if(this.centerX > this.player.centerX) {
moved.x -= 150 * Time.elapsedsec + Math.sin(this.statusElapsed / 50);
this.sprite.scaleX = -1;
} else {
moved.x += 150 * Time.elapsedsec + Math.sin(this.statusElapsed / 50);
this.sprite.scaleX = 1;
}
return moved;
},
ondead: function() {
this.sprite.scaleY = -1;
return {x:0, y:0};
}
});
|
#!/bin/bash
#
# Copyright (C) 2016 The CyanogenMod Project
# Copyright (C) 2017 The LineageOS Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
DEVICE_COMMON=msm8937-common
VENDOR=xiaomi
INITIAL_COPYRIGHT_YEAR=2018
# Load extract_utils and do some sanity checks
MY_DIR="${BASH_SOURCE%/*}"
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; fi
BOOTLEGGERS_ROOT="$MY_DIR"/../../..
HELPER="$ARROW_ROOT"/vendor/bootleggers/build/tools/extract_utils.sh
if [ ! -f "$HELPER" ]; then
echo "Unable to find helper script at $HELPER"
exit 1
fi
. "$HELPER"
# Initialize the helper for common
setup_vendor "$DEVICE_COMMON" "$VENDOR" "$BOOTLEGGERS_ROOT" true
# Copyright headers and guards
write_headers "land santoni"
# The standard common blobs
write_makefiles "$MY_DIR"/proprietary-files.txt true
echo "" >> "$PRODUCTMK"
write_makefiles "$MY_DIR"/proprietary-files-qc.txt true
# We are done!
write_footers
if [ -s "$MY_DIR"/../$DEVICE/proprietary-files.txt ]; then
# Reinitialize the helper for device
INITIAL_COPYRIGHT_YEAR="$DEVICE_BRINGUP_YEAR"
setup_vendor "$DEVICE" "$VENDOR" "$BOOTLEGGERS_ROOT" false
# Copyright headers and guards
write_headers
# The standard device blobs
write_makefiles "$MY_DIR"/../$DEVICE/proprietary-files.txt true
# We are done!
write_footers
fi
|
#!/bin/bash
rm -rf dist demos/leaflet.annotation.*
echo -e "Build Dev: \n...(re)packing leaflet annotator tool..."
echo -e "\n ...transpiling typed audio depends... "
tsc src/audio_model.ts --downlevelIteration
tsc src/audio_loading_utils.ts
echo -e "\n ...transpiling default style depends... "
tsc src/defaults.ts
echo -e "\n ...packing tool..."
webpack --config webpack/webpack.leaflet.annotator_dev.ts
echo -e "packing done :)\n "
|
#!/bin/bash
export LANG=C
export LC_ALL=C
set -e
GTF=$1
CHRLEN=$2
MAPABILITYEXCL=$3
LIBEXEC=$4
OPTNONPOLYA=$5
OPTBLACKLISTEXCL=$6
#GTF=../../REF/Homo_sapiens.GRCh37.75.150/Homo_sapiens.GRCh37.75.limit_chr.gtf
#CHRLEN=../../REF/Homo_sapiens.GRCh37.75.150/STAR/chrNameLength.txt
#MAPABILITYEXCL=MapabilityExclusion.bed.gz
#OPTBLACKLISTEXCL=wgEncodeDacMapabilityConsensusExcludable.bed.gz
NEARGENE="-l 5000 -r 1000 -s"
# Gene has been reversed before being sloped -- effectively adding an exclusion zone 5000 bp downstream of a reverse sense gene (more likely to over-run this direction than to have an early TSS).
#echo ""
echo "<Phase 3: IRFinder Reference Preparation>"
date +"%b %d %T ... building Ref 1..."
"$LIBEXEC/gtf2bed-custom.pl" "$GTF" \
| \
tee \
>(awk 'BEGIN {FS="\t"; OFS="\t"} ($10>1 && $6=="+") {print $1,$2,$3,$4,$5,"-" } ($10>1 && $6=="-") {print $1,$2,$3,$4,$5,"+" }' > tmp.reversed.genes ) \
>(awk 'BEGIN {FS="\t"; OFS="\t"} ($10>1) {print $1,$2,$3,$4,$5,$6 }' > tmp.all.annotations ) \
>(grep -v -e 'intron' | "$LIBEXEC/bed-to-intron+exon.pl" tmp.exons.exclude /dev/null ) \
>(grep -e '/rRNA/' -e '/Mt_rRNA/' | cut -f 1-4,6 | awk 'BEGIN {FS="\t";OFS="\t"} {$4 = "rRNA/" $1 "/" $2 "/" $3 "/" $5 "/" $4; print }' > tmp.ROI.rRNA.bed ) \
| grep -e '/processed_transcript/' -e '/protein_coding/' | "$LIBEXEC/bed-to-intron+exon.pl" /dev/null tmp.candidate.introns
# grep -e '/rRNA/' -e '/Mt_rRNA/' | cut -f 1-4,6 | sort -S 500M -k1,1 -k2,2n -k3,3n | awk 'BEGIN {FS="\t";OFS="\t"} {$4 = $1 "/" $2 "/" $3 "/" $4; print } '> rRNA-ROI.bed
#>(grep -v -e 'intron' | grep -e '/processed_transcript/' -e '/protein_coding/' | $LIBEXEC/bed-to-intron+exon.pl bed.exons.exclude.anno-coding /dev/null ) \
#>(grep -v -e 'intron' | grep -v -e '/processed_transcript/' -e '/protein_coding/' | $LIBEXEC/bed-to-intron+exon.pl bed.exons.exclude.anno-noncoding /dev/null ) \
#>(awk 'BEGIN {FS="\t"; OFS="\t"} ($10>1 && $4 ~ /protein_coding/) {print $1,$2,$3,$4,$5,$6 }' > tmp.coding.genes ) \
## Stable sort -- lets get the same named introns each time.
sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -k4,4 < tmp.candidate.introns | sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -u > introns.unique.bed
#sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -k4,4 -u < tmp.candidate.introns > introns.unique.bed
sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -k4,4 < tmp.exons.exclude | sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -u | bedtools slop -b 5 -i stdin -g "$CHRLEN" | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k6,6 > exclude.directional.bed
date +"%b %d %T ... building Ref 2..."
gzip -dc "$MAPABILITYEXCL" | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n -u | bedtools merge -i stdin -d 9 | awk 'BEGIN {FS="\t"; OFS="\t"} (($3-$2)>=10) { print $1, $2, $3, "M" }' > exclude.omnidirectional.bed
#if [ ! -z "$OPTBLACKLISTEXCL" ]
# if not empty (or null) string.
if [ -n "$OPTBLACKLISTEXCL" ]
then
if [[ "$OPTBLACKLISTEXCL" == *.gz ]]
then
gzip -dc "$OPTBLACKLISTEXCL" | awk 'BEGIN {FS="\t"; OFS="\t"} { if ($1 ~ /^chr/) {sub(/^chr/,"",$1)}; if ($1 == "M") {$1 = "MT"}; print $1,$2,$3,$4 }' >> exclude.omnidirectional.bed
else
cat "$OPTBLACKLISTEXCL" | awk 'BEGIN {FS="\t"; OFS="\t"} { if ($1 ~ /^chr/) {sub(/^chr/,"",$1)}; if ($1 == "M") {$1 = "MT"}; print $1,$2,$3,$4 }' >> exclude.omnidirectional.bed
fi
fi
### BUG in bedtools merge in latest bedtools. Doesn't honour strand (or at least, drops it from the output which is pretty much the same thing)!
date +"%b %d %T ... building Ref 3..."
function excludeFileDir {
cat \
<( cat exclude.omnidirectional.bed | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n | bedtools merge -i stdin | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "X", "0", "+"; print $1, $2, $3, "X", "0", "-" }' ) \
<( cat exclude.directional.bed | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -u | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "E", "0", $6}' ) \
| \
sort -t $'\t' -S 1G -k1,1 -k2,2n -k3,3n -k6,6
}
function excludeFileNondir {
cat \
<( cat tmp.reversed.genes | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -u | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "A", "0", $6}' ) \
<( cat tmp.reversed.genes | bedtools slop $NEARGENE -i stdin -g "$CHRLEN" | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k6,6 -u | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "AE", "0", $6}' ) \
<( cat exclude.omnidirectional.bed | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n -u | bedtools merge -i stdin | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "X", "0", "+"; print $1, $2, $3, "X", "0", "-" }' ) \
<( cat exclude.directional.bed | sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -u | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "E", "0", "+"; print $1, $2, $3, "E", "0", "-" }' ) \
| \
sort -t $'\t' -S 1G -k1,1 -k2,2n -k3,3n -k6,6
}
date +"%b %d %T ... building Ref 4..."
bedtools intersect -s -sorted -wao -a introns.unique.bed -b <(excludeFileDir) | "$LIBEXEC/IntronExclusion.pl" >(cat > tmp.50) >(cat > tmp.read-continues) | sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -k4,4 -u > tmp-dir.IntronCover.bed
date +"%b %d %T ... building Ref 5..."
bedtools intersect -s -sorted -wao -a introns.unique.bed -b <(excludeFileNondir) | "$LIBEXEC/IntronExclusion.pl" >(cat >> tmp.50) >(cat >> tmp.read-continues) | sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 -k4,4 -u > tmp-nd.IntronCover.bed
#echo "Build Ref 6"
date +"%b %d %T ... building Ref 6..."
sort -t $'\t' -S 500M -k1,1 -k2,2n < tmp.read-continues | uniq > ref-read-continues.ref
#awk 'BEGIN {FS = "[\t/]"; OFS = "\t"} {print $1, $8, $9, $6}' < tmp-dir.IntronCover.bed | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k4,4 -u > ref-sj.ref
cut -f 1,2,3,6 < introns.unique.bed | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k4,4 -u > ref-sj.ref
#echo "Build Ref 8"
# Workaround bug in bedtools merge when dealing with directional data (introduced in latest version of bedtools)
#cat \
#<(awk 'BEGIN {FS="\t"; OFS="\t"} ($6 == "+")' < tmp.coding.genes | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k6,6 -u | bedtools merge -i stdin | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "G+", 0, "+"}' ) \
#<(awk 'BEGIN {FS="\t"; OFS="\t"} ($6 == "-")' < tmp.coding.genes | sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k6,6 -u | bedtools merge -i stdin | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "G-", 0, "-"}' ) \
#| sort -t $'\t' -S 2G -k1,1 -k2,2n -k3,3n -k6,6 -u > coding.genes.merged.bed
#echo "Build Ref 9"
#bedtools intersect -S -v -wa -a coding.genes.merged.bed -b <(bedtools slop -b 5000 -i exclude.directional.bed -g "$CHRLEN") | awk 'BEGIN {FS="\t"; OFS="\t"} {print $1, $2, $3, "G"$6, $5, "+"; print $1, $2, $3, "G"$6, $5, "-"}' > ref-genes-dir-clean.bed
# awk protein coding -- create description for G+ or G-, but output both directions. The result of these counts indicates whether the sequencing was directional or not.
#cat bed.exons.exclude.anno-coding | awk 'BEGIN {FS="\t"; OFS="\t"} (($3-$2) < 120) {print}' > ref-short-exons.bed
date +"%b %d %T ... building Ref 7..."
cat <(awk 'BEGIN {FS="\t"; OFS="\t"} {$4 = "dir/" $4; print}' < tmp-dir.IntronCover.bed) \
<(awk 'BEGIN {FS="\t"; OFS="\t"} {$4 = "nd/" $4; print}' < tmp-nd.IntronCover.bed) \
<(awk 'BEGIN {FS="\t"; OFS="\t"} {$4 = "skip"; print}' < tmp.50) \
| sort -t $'\t' -s -S 500M -k1,1 -k2,2n -k3,3n -k6,6 | uniq > ref-cover.bed
date +"%b %d %T ... building Ref 8..."
#in Bedtools v2.26, chromosome-length file need to be sorted (sort -k1,1) according to chromosome names before being passed to complement function
bedtools slop -b 10000 -g "$CHRLEN" -i tmp.all.annotations \
| sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n | bedtools merge -d 1000 -i stdin \
| bedtools complement -i stdin -g <(sort -k1,1 "$CHRLEN") \
| awk 'BEGIN {FS="\t"; OFS="\t"} (length($1)<=2) {$4 = "Intergenic/" $1; print $1, $2, $3, $4}' > intergenic.ROI.bed
date +"%b %d %T ... building Ref 9..."
if [ -n "$OPTNONPOLYA" ]
then
if [[ "$OPTNONPOLYA" == *.gz ]]
then
bedtools intersect -v -sorted -a <(sort -t $'\t' -S 500M -k1,1 -k2,2n < intergenic.ROI.bed) -b <(cat tmp.ROI.rRNA.bed <(gzip -cd "$OPTNONPOLYA") | cut -f 1-3 | sort -t $'\t' -S 500M -k1,1 -k2,2n) > tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 10a..."
bedtools intersect -v -sorted -a <(sort -t $'\t' -S 500M -k1,1 -k2,2n < tmp.ROI.rRNA.bed) -b <(gzip -cd "$OPTNONPOLYA" | cut -f 1-3 | sort -t $'\t' -S 500M -k1,1 -k2,2n) >> tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 11a..."
gzip -cd < "$OPTNONPOLYA" | awk 'BEGIN {FS="\t"; OFS="\t"} {$4 = "NonPolyA/" $1 "/" $2 "/" $3; print $1, $2, $3, $4}' >> tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 12a..."
else
bedtools intersect -v -sorted -a <(sort -t $'\t' -S 500M -k1,1 -k2,2n < intergenic.ROI.bed) -b <(cat tmp.ROI.rRNA.bed "$OPTNONPOLYA" | cut -f 1-3 | sort -t $'\t' -S 500M -k1,1 -k2,2n) > tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 10b..."
bedtools intersect -v -sorted -a <(sort -t $'\t' -S 500M -k1,1 -k2,2n < tmp.ROI.rRNA.bed) -b <(cat "$OPTNONPOLYA" | cut -f 1-3 | sort -t $'\t' -S 500M -k1,1 -k2,2n) >> tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 11b..."
awk 'BEGIN {FS="\t"; OFS="\t"} {$4 = "NonPolyA/" $1 "/" $2 "/" $3; print $1, $2, $3, $4}' < "$OPTNONPOLYA" >> tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 12b..."
fi
else
bedtools intersect -v -sorted -a <(sort -t $'\t' -S 500M -k1,1 -k2,2n < intergenic.ROI.bed) -b <(cat tmp.ROI.rRNA.bed | cut -f 1-3 | sort -t $'\t' -S 500M -k1,1 -k2,2n) > tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 10c..."
cat tmp.ROI.rRNA.bed >> tmp.ROI.combined.bed
date +"%b %d %T ... building Ref 11c..."
fi
sort -t $'\t' -S 500M -k1,1 -k2,2n -k3,3n < tmp.ROI.combined.bed > ref-ROI.bed
ENDSTAT=0
if [ ! -s exclude.directional.bed ]
then
ENDSTAT=1
echo "Error: exclude.directional.bed is empty."
fi
if [ ! -s exclude.omnidirectional.bed ]
then
ENDSTAT=1
echo "Error: exclude.omnidirectional.bed is empty."
fi
if [ ! -s introns.unique.bed ]
then
ENDSTAT=1
echo "Error: introns.unique.bed is empty."
fi
if [ ! -s ref-cover.bed ]
then
ENDSTAT=1
echo "Error: ref-cover.bed is empty."
fi
if [ ! -s ref-read-continues.ref ]
then
ENDSTAT=1
echo "Error: ref-read-continues.ref is empty."
fi
if [ ! -s ref-sj.ref ]
then
ENDSTAT=1
echo "Error: ref-sj.ref is empty."
fi
if [ $ENDSTAT -eq 1 ]
then
echo "Error: IRFinder reference building FAILED."
else
#date +"%b %d %T ... cleaning temporary files..."
rm tmp.50 tmp-dir.IntronCover.bed tmp-nd.IntronCover.bed tmp.read-continues tmp.candidate.introns tmp.exons.exclude tmp.all.annotations tmp.reversed.genes tmp.ROI.rRNA.bed tmp.ROI.combined.bed
#echo "IRFinder reference building: COMPLETE."
fi
|
#!/usr/bin/env bash
# クラスタノード 1台目
sbt \
-Dsbt.server.forcestart=true \
-DHOST=127.0.0.1 \
-DPORT=25521 \
-DAPP_HOST=127.0.0.1 \
-DAPP_PORT=9001 \
"SampleApp/run"
|
#!/usr/bin/env sh
echo "This happens only on the first start of the container, before the configuration is applied to the instance"
|
<reponame>danielmlc/blogServer<filename>src/auth/dto/UserListDto.ts
import {IsNotEmpty } from 'class-validator';
import { UserDto } from './UserDto';
export class UserListDto {
@IsNotEmpty()
Items: Array<UserDto>;
}
|
# https://programmers.co.kr/learn/courses/30/lessons/64061
def get_top(board, move):
for i in range(len(board)):
if board[i][move-1] != 0:
res = board[i][move-1]
board[i][move-1] = 0
return res
# Test
#board = [[0,0,0,0,0],[0,0,1,0,3],[0,2,5,0,1],[4,2,4,4,2],[3,5,1,3,1]]
#moves = [1,5,3,5,1,2,1,4]
stack = []
for move in moves:
stack.append(get_top(board, move))
# Remove None
new_stack = [item for item in stack if item]
new_length = len(new_stack)
def remove_desc(stack:list):
for i in range(len(stack)-1, 0, -1):
if stack[i] == stack[i-1]:
stack.pop(i-1)
stack.pop(i-1)
return stack
while True:
old_len = len(new_stack)
remove_desc(new_stack)
new_len = len(new_stack)
if old_len == new_len:
break
print(new_length - old_len)
|
import { ExtractorModelData } from "../types/types";
class ExtractorModel {
name: string;
private _raw: any; // eslint-disable-line @typescript-eslint/no-explicit-any
/**
* Model for raw Discord Player extractors
* @param {string} extractorName Name of the extractor
* @param {object} data Extractor object
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
constructor(extractorName: string, data: any) {
/**
* The extractor name
* @type {string}
*/
this.name = extractorName;
/**
* The raw model
* @name ExtractorModel#_raw
* @type {any}
* @private
*/
Object.defineProperty(this, "_raw", { value: data, configurable: false, writable: false, enumerable: false });
}
/**
* Method to handle requests from `Player.play()`
* @param {string} query Query to handle
* @returns {Promise<ExtractorModelData>}
*/
async handle(query: string): Promise<ExtractorModelData> {
const data = await this._raw.getInfo(query);
if (!data) return null;
return {
playlist: data.playlist ?? null,
data:
(data.info as Omit<ExtractorModelData, "playlist">["data"])?.map((m) => ({
title: m.title as string,
duration: m.duration as number,
thumbnail: m.thumbnail as string,
engine: m.engine,
views: m.views as number,
author: m.author as string,
description: m.description as string,
url: m.url as string,
source: m.source || "arbitrary"
})) ?? []
};
}
/**
* Method used by Discord Player to validate query with this extractor
* @param {string} query The query to validate
* @returns {boolean}
*/
validate(query: string): boolean {
return Boolean(this._raw.validate(query));
}
/**
* The extractor version
* @type {string}
*/
get version(): string {
return this._raw.version ?? "0.0.0";
}
}
export { ExtractorModel };
|
import { useEffect } from 'react';
import { useRouter } from 'next/dist/client/router';
import styles from '@styles/index.module.scss';
export default function NotFound() {
const router = useRouter();
useEffect(() => {
router.push('/');
}, []);
return <div className={styles.container}></div>;
}
|
#!/bin/bash
if [ "$TRAVIS_BRANCH" == "master" ]; then
docker build -t sorenmat/k8s-rds:$TRAVIS_BUILD_NUMBER .
docker tag sorenmat/k8s-rds:$TRAVIS_BUILD_NUMBER sorenmat/k8s-rds:latest
docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD";
docker push sorenmat/k8s-rds:$TRAVIS_BUILD_NUMBER
docker push sorenmat/k8s-rds:latest
else
echo "Skipping docker push since we are not running on master"
fi
|
#!/bin/sh
pip install -r requirements.txt
# uvicorn api:fhir --reload --host 0.0.0.0 --port 9000
|
"use strict";
module.exports = {
rules: {
"accessor-pairs": "error",
"arrow-spacing": "error",
"block-spacing": "error",
"brace-style": "error",
"camelcase": "error",
"comma-dangle": "error",
"comma-spacing": "error",
"comma-style": "error",
"curly": "error",
"dot-location": "error",
"eol-last": "error",
"eqeqeq": "error",
"func-call-spacing": "error",
"generator-star-spacing": "error",
"key-spacing": "error",
"keyword-spacing": "error",
"new-cap": "error",
"new-parens": "error",
"no-array-constructor": "error",
"no-caller": "error",
"no-eval": "error",
"no-extend-native": "error",
"no-extra-bind": "error",
"no-extra-parens": "error",
"no-floating-decimal": "error",
"no-implied-eval": "error",
"no-iterator": "error",
"no-labels": "error",
"no-lone-blocks": "error",
"no-mixed-operators": "error",
"no-multi-spaces": "error",
"no-multi-str": "error",
"no-multiple-empty-lines": "error",
"no-new": "error",
"no-new-func": "error",
"no-new-object": "error",
"no-new-wrappers": "error",
"no-octal-escape": "error",
"no-proto": "error",
"no-return-assign": "error",
"no-self-compare": "error",
"no-sequences": "error",
"no-tabs": "error",
"no-template-curly-in-string": "error",
"no-throw-literal": "error",
"no-trailing-spaces": "error",
"no-undef-init": "error",
"no-unmodified-loop-condition": "error",
"no-unneeded-ternary": "error",
"no-unused-expressions": "error",
"no-use-before-define": "error",
"no-useless-call": "error",
"no-useless-computed-key": "error",
"no-useless-constructor": "error",
"no-useless-rename": "error",
"no-useless-return": "error",
"no-whitespace-before-property": "error",
"operator-linebreak": "error",
"padded-blocks": "error",
"prefer-promise-reject-errors": "error",
"rest-spread-spacing": "error",
"semi-spacing": "error",
"space-before-blocks": "error",
"space-before-function-paren": "error",
"space-in-parens": "error",
"space-infix-ops": "error",
"space-unary-ops": "error",
"spaced-comment": "error",
"symbol-description": "error",
"template-curly-spacing": "error",
"template-tag-spacing": "error",
"unicode-bom": "error",
"wrap-iife": "error",
"yield-star-spacing": "error",
"yoda": "error"
}
};
|
<reponame>b-liw/Chip8
package pl.bliw.gui;
import javafx.application.Platform;
import javafx.concurrent.Service;
import javafx.concurrent.Task;
import javafx.fxml.FXML;
import javafx.scene.canvas.Canvas;
import javafx.scene.canvas.GraphicsContext;
import javafx.scene.input.KeyEvent;
import javafx.scene.paint.Color;
import javafx.stage.FileChooser;
import org.apache.log4j.Logger;
import pl.bliw.emulator.Chip8;
import pl.bliw.emulator.io.Screen;
import pl.bliw.util.Constants;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
/**
* The MainWindowController handles user input and interactions with main window.
*/
public class MainWindowController {
/**
* The logger to this class.
*/
private static Logger log = Logger.getLogger(MainWindowController.class.getName());
/**
* The main Chip8 instance.
*/
private Chip8 chip;
/**
* The graphics context used for drawing on canvas.
*/
private GraphicsContext gc;
/**
* The thread executor used for starting new threads.
*/
private ScheduledExecutorService threadExecutor;
/**
* The reference to the canvas from main window.
*/
@FXML
private Canvas canvas;
/**
* Constructs new controller for main window.
*
* @param chip8 instance of chip8
*/
public MainWindowController(Chip8 chip8) {
this.chip = chip8;
}
/**
* The method redraws canvas using chip8 internal screen buffer.
*/
public void drawCanvas() {
Screen screen = chip.getScreen();
Boolean[] screenState = screen.getScreenState();
for (int i = 0; i < screenState.length; i++) {
boolean b = screenState[i];
gc.setFill(b ? Color.WHITE : Color.BLACK);
int x = (i % 64);
int y = i / Screen.getWidth();
gc.fillRect(x * Constants.SCALE, y * Constants.SCALE, Constants.SCALE, Constants.SCALE);
}
}
/**
* The method initializes main controller after creation.
*/
@FXML
public void initialize() throws InterruptedException {
gc = canvas.getGraphicsContext2D();
canvas.requestFocus();
String romPath = getFilePathFromFileChooser();
if (!romPath.isEmpty()) {
try {
chip.initialize(romPath);
Thread.sleep(Constants.ONE_SECOND);
runChipInAnotherThread();
} catch (IOException e) {
String message = String.format("Error occurred during opening rom file with specified path: %s%n%s", romPath, e.getMessage());
log.error(message, e);
ErrorDialog.show(message);
Platform.exit();
} catch (IllegalArgumentException e) {
log.error(e.getMessage(), e);
ErrorDialog.show(e.getMessage());
Platform.exit();
}
} else {
ErrorDialog.show("You have to select correct rom file");
Platform.exit();
}
}
/**
* Starts chip8 in new thread
*/
private void runChipInAnotherThread() {
Service<Void> service = new Service<Void>() {
@Override
protected Task<Void> createTask() {
return new Task<Void>() {
@Override
protected Void call() {
try {
threadExecutor = Executors.newScheduledThreadPool(1);
threadExecutor.scheduleAtFixedRate(() -> {
chip.run();
if (chip.getScreen().isCanvasUpdated()) {
drawCanvas();
chip.getScreen().setCanvasUpdated(false);
}
}, 0, Constants.EXPECTED_DELAY_IN_NANO_SECONDS, TimeUnit.NANOSECONDS).get();
} catch (Exception e) { // TOP EXCEPTION HANDLER, WHICH WILL SHUTDOWN EMULATOR AND SHOW CRASH LOG
String message = String.format("Critical error, shutting down the emulator%n%s", e.getMessage());
ErrorDialog.show(message);
log.fatal(message, e);
chip.shutDown();
Platform.exit();
}
return null;
}
};
}
};
service.start();
}
/**
* Listener for pressed buttons
* @param event key event
*/
@FXML
private void keyPressedListener(KeyEvent event) {
chip.getKeyboard().keyPressed(event.getCode().getName());
}
/**
* Listener for released buttons
* @param event key event
*/
@FXML
private void keyReleasedListener(KeyEvent event) {
chip.getKeyboard().keyReleased(event.getCode().getName());
}
/**
* Returns the file path as string of file that can be selected using file chooser.
* @return file path
*/
private String getFilePathFromFileChooser() {
FileChooser fileChooser = new FileChooser();
fileChooser.setTitle("SELECT ROM FILE");
File file = fileChooser.showOpenDialog(null);
if (file != null) {
return file.getAbsolutePath();
} else {
return "";
}
}
}
|
/*=============================================================================
Copyright (c) 2003 <NAME>
Copyright (c) 2004 <NAME>
Use, modification and distribution is subject to the Boost Software
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_SEQUENCE_DETAIL_SEQUENCE_BASE_HPP)
#define FUSION_SEQUENCE_DETAIL_SEQUENCE_BASE_HPP
namespace boost { namespace fusion
{
struct sequence_root {};
template <typename Sequence>
struct sequence_base : sequence_root
{
Sequence const&
cast() const;
Sequence&
cast();
};
template <typename Sequence>
Sequence const&
sequence_base<Sequence>::cast() const
{
return static_cast<Sequence const&>(*this);
}
template <typename Sequence>
Sequence&
sequence_base<Sequence>::cast()
{
return static_cast<Sequence&>(*this);
}
}}
#endif
|
<gh_stars>1-10
/* eslint-disable @typescript-eslint/camelcase */
import {HttpService, Injectable} from '@nestjs/common';
import {Logger} from '../../logger';
import {LcdChannelDto, LcdDenomDto} from '../../dto/http.dto'
import {LcdChannelType, DenomType,LcdChannelClientState} from '../../types/lcd.interface'
import {cfg} from "../../config/config";
@Injectable()
export class ChainHttp {
static async getIbcChannels(lcdAddr, channelsPath: string) {
let rgexOffset = "\\OFFSET", regexLimit = "\\LIMIT"
channelsPath = channelsPath.replace(new RegExp(rgexOffset, "g"), <string>cfg.channels.offset);
channelsPath = channelsPath.replace(new RegExp(regexLimit, "g"), <string>cfg.channels.limit);
const ibcChannelsUrl = `${lcdAddr}${channelsPath}`;
try {
const ibcChannels: LcdChannelType[] = await new HttpService()
.get(ibcChannelsUrl)
.toPromise()
.then(result => result.data.channels);
if (ibcChannels) {
return LcdChannelDto.bundleData(ibcChannels);
} else {
Logger.warn(
'api-error:',
'there is no result of total_supply from lcd',
);
}
} catch (e) {
// Logger.warn(`api-error from ${ibcChannelsUrl}`, e);
Logger.warn(`api-error from ${ibcChannelsUrl} error`);
}
}
static async getDcChainIdByScChannel(lcdAddr, clientStatePath: string, port: string, channel: string) {
let rgexPort = "\\PORT", regexChannel = "\\CHANNEL"
clientStatePath = clientStatePath.replace(new RegExp(rgexPort, "g"), port);
clientStatePath = clientStatePath.replace(new RegExp(regexChannel, "g"), channel);
const ibcClientStateUrl = `${lcdAddr}${clientStatePath}`;
try {
const ibcClientState: LcdChannelClientState = await new HttpService()
.get(ibcClientStateUrl)
.toPromise()
.then(result => result.data);
if (ibcClientState) {
return ibcClientState.identified_client_state.client_state.chain_id;
} else {
Logger.warn(
'api-error:',
'there is no result of total_supply from lcd',
);
}
} catch (e) {
// Logger.warn(`api-error from ${ibcChannelsUrl}`, e);
Logger.warn(`api-error from ${ibcClientStateUrl} error`);
}
}
static async getDenomByLcdAndHash(lcdAddr, hash) {
const ibcDenomTraceUrl = `${lcdAddr}/ibc/applications/transfer/v1beta1/denom_traces/${hash}`;
try {
const denom_trace: DenomType = await new HttpService()
.get(ibcDenomTraceUrl)
.toPromise()
.then(result => result.data.denom_trace);
if (denom_trace) {
return new LcdDenomDto(denom_trace);
} else {
Logger.warn(
'api-error:',
'there is no result of total_supply from lcd',
);
}
} catch (e) {
// Logger.warn(`api-error from ${ibcDenomTraceUrl}`, e);
Logger.warn(`api-error from ${ibcDenomTraceUrl} error`);
}
}
}
|
def collaborate(a1, p1, a2, p2, cls, fcls, k):
# Implement the collaborate function to produce the expected solution set
# Your implementation here
# ...
complete_solution_set = [] # Placeholder for the collaborated data
# Your implementation here
# ...
return complete_solution_set
|
def first_letter_string(input_str):
output_str = ""
for w in input_str.split():
output_str += w[0]
return output_str
input_str = "This is a sample string"
print(first_letter_string(input_str))
|
#!/bin/sh
docker login "$DOCKER_BASE_URL" -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD"
docker build -t "${DOCKER_REPO}/orbit:${ORBIT_VERSION}" -f docker/server/Dockerfile .
docker push "${DOCKER_REPO}/orbit:${ORBIT_VERSION}"
|
#!/bin/sh -e
# Temporary script to regenerate the data and build the app.
# TODO(jontayler): retire it once gradle can do this or
# or we get rid of the data generation step.
sed -i -e 's/gms/fdroid/' tools/build.gradle
sed -i -e '/gmsCompile/d' app/build.gradle
sed -i -e '/com.google.gms.google-services/d' app/build.gradle
./gradlew clean assembleFdroid installDist
(cd tools
# Gah. Gradle gets the classpath for the tools wrong. Fix:
sed -i -e 's#CLASSPATH=#CLASSPATH=$APP_HOME/lib/:#g' build/install/datagen/bin/datagen
./generate.sh
./binary.sh)
./gradlew assembleFdroid
|
<html>
<head>
<title>Table of Contents</title>
<link rel="stylesheet" type="text/css" href="styles.css" />
</head>
<body>
<h1>Table of Contents</h1>
<ul>
<li><a href="#introduction">Introduction</a></li>
<li><a href="#methodology">Methodology</a></li>
<li><a href="#results">Results</a></li>
<li><a href="#discussion">Discussion</a></li>
</ul>
<a name="introduction"></a>
<h2>Introduction</h2>
<a name="methodology"></a>
<h2>Methodology</h2>
<a name="results"></a>
<h2>Results</h2>
<a name="discussion"></a>
<h2>Discussion</h2>
</body>
</html>
|
class Calculator:
def __init__(self):
# initialization code
def add(self, x, y):
return x + y
def subtract(self, x, y):
return x - y
def multiply(self, x, y):
return x * y
def divide(self, x, y):
return x / y
|
<filename>chest/net/common/src/main/java/net/community/chest/net/proto/ProtocolNetConnection.java
package net.community.chest.net.proto;
import java.io.IOException;
import net.community.chest.net.NetConnection;
/**
* Copyright 2007 as per GPLv2
*
* Represents a "well-known" protocol network connection
*
* @author <NAME>.
* @since Jun 28, 2007 2:02:47 PM
*/
public interface ProtocolNetConnection extends NetConnection {
/**
* @return default protocol port
*/
int getDefaultPort ();
/**
* Connects to specified host on default protocol port
* @param host name/IP address to which to connect to
* @see #getDefaultPort()
* @throws IOException if connection handling error
*/
void connect (String host) throws IOException;
}
|
<gh_stars>0
package com.alipay.api.response;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.open.app.community.userpromo.sync response.
*
* @author auto create
* @since 1.0, 2021-12-08 23:34:47
*/
public class AlipayOpenAppCommunityUserpromoSyncResponse extends AlipayResponse {
private static final long serialVersionUID = 8418327671358724565L;
}
|
<filename>lib/endeavour/railtie.rb
class Endeavour
class Railtie < Rails::Railtie
initializer 'endeavour.hook' do
Endeavour.hook!
end
end
end
|
<gh_stars>0
package com.company;
public class Exercise_6_14 {
public static void main(String[] args) {
// print heading
System.out.printf("%-6s%8s\n", "i", "m(i)");
System.out.println("---------------");
// print body
for (int i=1; i<= 901; i +=100) {
System.out.printf("%-6d%8.4f\n", i, m(i));
}
}
public static double m(int i) {
double pi = 0;
for(int n = 1; n<= i; n++)
pi += Math.pow(-1, n+1)/(2*n-1);
return 4*pi;
}
}
|
<reponame>jeffreiffers/datafabrikken-portal<filename>src/components/theme-box/styled.ts<gh_stars>0
import styled, { css } from 'styled-components';
import { theme, Colour } from '../../entrypoints/main/app/theme';
const onMobileView = '@media (max-width: 900px)';
type Props = {
checked: boolean;
};
const ThemeIcon = styled.div`
align-items: center;
display: flex;
flex: 0 0 20%;
margin-right: ${theme.spacing('S6')};
& > svg {
fill: ${theme.colour(Colour.BLUE, 'B20')};
}
`;
const ThemeBox = styled.div<Props>`
background-color: ${({ checked }) =>
checked
? theme.colour(Colour.BLUE, 'B20')
: theme.colour(Colour.BLUE, 'B48')};
color: ${({ checked }) =>
checked
? theme.colour(Colour.NEUTRAL, 'N70')
: theme.colour(Colour.NEUTRAL, 'N0')};
flex: 0 0 calc(20% - 5px);
height: 60px;
transition: background-color 150ms ease;
&:hover {
${({ checked }) =>
!checked &&
css`
background-color: ${theme.colour(Colour.BLUE, 'B44')};
`}
}
${({ checked }) =>
checked &&
css`
& > label > ${ThemeIcon} {
& > svg {
fill: ${theme.colour(Colour.NEUTRAL, 'N70')};
& > path {
fill: ${theme.colour(Colour.NEUTRAL, 'N70')};
}
}
}
`}
${onMobileView} {
flex-direction: column;
}
`;
const Label = styled.label`
align-items: center;
display: flex;
padding: 0.5em;
& > span {
margin-left: ${theme.spacing('S4')};
}
& > input {
border: 0;
clip: rect(0 0 0 0);
clippath: inset(50%);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
white-space: nowrap;
width: 1px;
}
`;
export default { ThemeBox, Label, ThemeIcon };
|
(function(Auth) {
"use strict";
// Constructor
var HashResponseBase = function() {
};
// Public functions
// This returns the result as array of Uint8Array.
HashResponseBase.prototype.divide7BytesArray = function(array) {
var result = [];
var offset = 0;
while (offset < array.length) {
result.push(array.subarray(offset, (offset + 7)));
offset += 7;
}
return result;
};
/*jslint bitwise: true */
HashResponseBase.prototype.createDesKey = function(array) {
var result = [];
var temp = 0;
var tempPos = 2;
var parity = 0;
for (var i = array.length - 1; i >= 0; i--) {
var target = array[i];
for (var j = 0; j < 8; j++) {
var bit = target & 1;
if (bit === 1) {
parity++;
}
temp = temp | (bit * tempPos);
tempPos *= 2;
if (tempPos === 256) {
if (parity % 2 === 0) {
temp |= 1;
}
result.push(temp);
temp = 0;
tempPos = 2;
parity = 0;
}
target = target >> 1;
}
}
return new Uint8Array(result.reverse());
};
HashResponseBase.prototype.encryptByDes = function(source, key) {
var keyWordArray = CryptoJS.lib.WordArray.create(key);
var resultWordArray = CryptoJS.DES.encrypt(source, keyWordArray, {
mode: CryptoJS.mode.ECB,
padding: CryptoJS.pad.NoPadding
});
// This returns the result as WordArray.
return resultWordArray.ciphertext;
};
HashResponseBase.prototype.createClientNonce = function(byteLength) {
return this.createRandomBytes(byteLength);
};
HashResponseBase.prototype.createRandomBytes = function(byteLength) {
var buffer = new ArrayBuffer(byteLength);
var view = new DataView(buffer);
var pos = 0;
while (pos < byteLength) {
var rand = Math.floor((Math.random() * 100000000000) + 1) % 0xffffffff;
view.setUint32(pos, rand, true);
pos += 4;
}
// This returns the result as ArrayBuffer.
return buffer;
//var rand = new Uint8Array([0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33, 0x44]);
//return rand.buffer;
};
// Export
Auth.HashResponseBase = HashResponseBase;
})(SmbClient.Auth);
|
<gh_stars>1-10
package org.softuni.exodia.repository;
import org.softuni.exodia.domain.entities.Document;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import org.springframework.validation.annotation.Validated;
import javax.persistence.Tuple;
import java.util.List;
import java.util.UUID;
@Validated
@Repository
public interface DocumentRepository extends JpaRepository<Document, UUID> {
@Query("SELECT d.id as id, d.title as title FROM Document AS d")
List<Tuple> findAllShortView();
}
|
async function deductFromBalance(userId, amount) {
try {
const userBalanceRef = projectDatabase.ref(`/users/${userId}/balance`);
const snapshot = await userBalanceRef.once('value');
const currentBalance = snapshot.val();
if (currentBalance < amount) {
throw new Error("Insufficient balance");
}
const updatedBalance = currentBalance - amount;
await userBalanceRef.set(updatedBalance);
return updatedBalance;
} catch (error) {
return error.message;
}
}
|
import VDataTable from './components/v_data_table';
const VDataTablePlugin = {
install(Vue, options) {
options = options || {};
Vue.component(options.name || VDataTable.name, VDataTable);
}
};
if (typeof window !== "undefined") {
window.VDataTable = VDataTablePlugin;
}
export default VDataTablePlugin;
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -exu
if [ $# -lt 2 ]; then
echo "Usage: $0 VERSION rc"
echo " $0 VERSION staging-rc"
echo " $0 VERSION release"
echo " $0 VERSION staging-release"
echo " $0 VERSION local"
echo " e.g.: $0 0.13.0 rc # Verify 0.13.0 RC"
echo " e.g.: $0 0.13.0 staging-rc # Verify 0.13.0 RC on staging"
echo " e.g.: $0 0.13.0 release # Verify 0.13.0"
echo " e.g.: $0 0.13.0 staging-release # Verify 0.13.0 on staging"
echo " e.g.: $0 0.13.0-dev20210203 local # Verify 0.13.0-dev20210203 on local"
exit 1
fi
VERSION="$1"
TYPE="$2"
local_prefix="/arrow/dev/tasks/linux-packages"
artifactory_base_url="https://apache.jfrog.io/artifactory/arrow"
distribution=$(. /etc/os-release && echo "${ID}")
distribution_version=$(. /etc/os-release && echo "${VERSION_ID}" | grep -o "^[0-9]*")
distribution_prefix="centos"
cmake_package=cmake
cmake_command=cmake
have_flight=yes
have_gandiva=yes
have_glib=yes
have_parquet=yes
have_python=yes
install_command="dnf install -y --enablerepo=powertools"
case "${distribution}-${distribution_version}" in
almalinux-*)
distribution_prefix="almalinux"
;;
amzn-2)
cmake_package=cmake3
cmake_command=cmake3
have_flight=no
have_gandiva=no
have_python=no
install_command="yum install -y"
distribution_prefix="amazon-linux"
amazon-linux-extras install epel -y
;;
centos-7)
cmake_package=cmake3
cmake_command=cmake3
have_flight=no
have_gandiva=no
install_command="yum install -y"
;;
esac
if [ "$(arch)" = "aarch64" ]; then
have_gandiva=no
fi
if [ "${TYPE}" = "local" ]; then
case "${VERSION}" in
*-dev*)
package_version="$(echo "${VERSION}" | sed -e 's/-dev\(.*\)$/-0.dev\1/g')"
;;
*-rc*)
package_version="$(echo "${VERSION}" | sed -e 's/-rc.*$//g')"
package_version+="-1"
;;
*)
package_version="${VERSION}-1"
;;
esac
release_path="${local_prefix}/yum/repositories"
case "${distribution}" in
almalinux)
package_version+=".el${distribution_version}"
release_path+="/almalinux"
;;
amzn)
package_version+=".${distribution}${distribution_version}"
release_path+="/amazon-linux"
amazon-linux-extras install -y epel
;;
*)
package_version+=".el${distribution_version}"
release_path+="/centos"
;;
esac
release_path+="/${distribution_version}/$(arch)/Packages"
release_path+="/apache-arrow-release-${package_version}.noarch.rpm"
${install_command} "${release_path}"
else
package_version="${VERSION}"
case "${TYPE}" in
rc|staging-rc|staging-release)
suffix=${TYPE%-release}
distribution_prefix+="-${suffix}"
;;
esac
${install_command} \
${artifactory_base_url}/${distribution_prefix}/${distribution_version}/apache-arrow-release-latest.rpm
fi
if [ "${TYPE}" = "local" ]; then
sed \
-i"" \
-e "s,baseurl=https://apache\.jfrog\.io/artifactory/arrow/,baseurl=file://${local_prefix}/yum/repositories/,g" \
/etc/yum.repos.d/Apache-Arrow.repo
keys="${local_prefix}/KEYS"
if [ -f "${keys}" ]; then
cp "${keys}" /etc/pki/rpm-gpg/RPM-GPG-KEY-Apache-Arrow
fi
else
case "${TYPE}" in
rc|staging-rc|staging-release)
suffix=${TYPE%-release}
sed \
-i"" \
-e "s,/almalinux/,/almalinux-${suffix}/,g" \
-e "s,/centos/,/centos-${suffix}/,g" \
-e "s,/amazon-linux/,/amazon-linux-${suffix}/,g" \
/etc/yum.repos.d/Apache-Arrow.repo
;;
esac
fi
${install_command} --enablerepo=epel arrow-devel-${package_version}
${install_command} \
${cmake_package} \
gcc-c++ \
git \
libarchive \
make
mkdir -p build
cp -a /arrow/cpp/examples/minimal_build build
pushd build/minimal_build
${cmake_command} .
make -j$(nproc)
./arrow_example
popd
if [ "${have_glib}" = "yes" ]; then
${install_command} --enablerepo=epel arrow-glib-devel-${package_version}
${install_command} --enablerepo=epel arrow-glib-doc-${package_version}
fi
if [ "${have_python}" = "yes" ]; then
${install_command} --enablerepo=epel arrow-python-devel-${package_version}
fi
if [ "${have_glib}" = "yes" ]; then
${install_command} --enablerepo=epel plasma-glib-devel-${package_version}
${install_command} --enablerepo=epel plasma-glib-doc-${package_version}
else
${install_command} --enablerepo=epel plasma-devel-${package_version}
fi
if [ "${have_flight}" = "yes" ]; then
${install_command} --enablerepo=epel arrow-flight-glib-devel-${package_version}
${install_command} --enablerepo=epel arrow-flight-glib-doc-${package_version}
fi
if [ "${have_gandiva}" = "yes" ]; then
if [ "${have_glib}" = "yes" ]; then
${install_command} --enablerepo=epel gandiva-glib-devel-${package_version}
${install_command} --enablerepo=epel gandiva-glib-doc-${package_version}
else
${install_command} --enablerepo=epel gandiva-devel-${package_version}
fi
fi
if [ "${have_parquet}" = "yes" ]; then
if [ "${have_glib}" = "yes" ]; then
${install_command} --enablerepo=epel parquet-glib-devel-${package_version}
${install_command} --enablerepo=epel parquet-glib-doc-${package_version}
else
${install_command} --enablerepo=epel parquet-devel-${package_version}
fi
fi
|
def is_palindrome(word):
n = len(word)
# A single character is always a palindrome
if n == 1:
return True
# Iterate through the string while two indexes left and right simultaneously
left, right = 0, n-1
while right >= left:
if word[left] != word[right]:
return False
left += 1
right -= 1
return True
|
<reponame>hmrc/amls<filename>test/models/enrolment/KnownFactsSpec.scala
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.enrolment
import generators.AmlsReferenceNumberGenerator
import models.{KnownFactsForService, KnownFact => GGKNownFact}
import org.scalatestplus.play.PlaySpec
import play.api.libs.json.Json
class KnownFactsSpec extends PlaySpec with AmlsReferenceNumberGenerator {
"The model" must {
"serialize to the correct Json" in {
val model = KnownFacts(Set(
KnownFact("Postcode", "TF2 6NU"),
KnownFact("NINO", "AB123456X")
))
val expectedJson = Json.obj(
"verifiers" -> Json.arr(
Json.obj("key" -> "Postcode", "value" -> "TF2 6NU"),
Json.obj("key" -> "NINO", "value" -> "AB123456X")
)
)
Json.toJson(model) mustBe expectedJson
}
"convert from legacy KnownFacts model" which {
"filters identifier MLRRefNumber" in {
val legacyModel = KnownFactsForService(Seq(
GGKNownFact("MLRRefNumber", amlsRegistrationNumber),
GGKNownFact("Postcode", "TF2 6NU"),
GGKNownFact("NINO", "AB123456X")
))
val currentModel = KnownFacts(Set(
KnownFact("Postcode", "TF2 6NU"),
KnownFact("NINO", "AB123456X")
))
KnownFacts.conv(legacyModel) must be(currentModel)
}
}
}
}
|
#!/usr/bin/env bash
. "test/testlib.sh"
begin_test "fetch with good ref"
(
set -e
reponame="fetch-master-branch-required"
setup_remote_repo "$reponame"
clone_repo "$reponame" "$reponame"
git lfs track "*.dat"
echo "a" > a.dat
git add .gitattributes a.dat
git commit -m "add a.dat"
git push origin master
# $ echo "a" | shasum -a 256
oid="87428fc522803d31065e7bce3cf03fe475096631e5e07bbd7a0fde60c4cf25c7"
assert_local_object "$oid" 2
assert_server_object "$reponame" "$oid" "refs/heads/master"
rm -rf .git/lfs/objects
git lfs fetch --all
assert_local_object "$oid" 2
)
end_test
begin_test "fetch with tracked ref"
(
set -e
reponame="fetch-tracked-branch-required"
setup_remote_repo "$reponame"
clone_repo "$reponame" "$reponame"
git lfs track "*.dat"
echo "a" > a.dat
git add .gitattributes a.dat
git commit -m "add a.dat"
git push origin master:tracked
# $ echo "a" | shasum -a 256
oid="87428fc522803d31065e7bce3cf03fe475096631e5e07bbd7a0fde60c4cf25c7"
assert_local_object "$oid" 2
assert_server_object "$reponame" "$oid" "refs/heads/tracked"
rm -rf .git/lfs/objects
git config push.default upstream
git config branch.master.merge refs/heads/tracked
git lfs fetch --all
assert_local_object "$oid" 2
)
end_test
begin_test "fetch with bad ref"
(
set -e
reponame="fetch-other-branch-required"
setup_remote_repo "$reponame"
clone_repo "$reponame" "$reponame"
git lfs track "*.dat"
echo "a" > a.dat
git add .gitattributes a.dat
git commit -m "add a.dat"
git push origin master:other
# $ echo "a" | shasum -a 256
oid="87428fc522803d31065e7bce3cf03fe475096631e5e07bbd7a0fde60c4cf25c7"
assert_local_object "$oid" 2
assert_server_object "$reponame" "$oid" "refs/heads/other"
rm -rf .git/lfs/objects
GIT_CURL_VERBOSE=1 git lfs fetch --all 2>&1 | tee fetch.log
if [ "0" -eq "${PIPESTATUS[0]}" ]; then
echo >&2 "fatal: expected 'git lfs fetch' to fail"
exit 1
fi
grep 'Expected ref "refs/heads/other", got "refs/heads/master"' fetch.log
)
end_test
|
<reponame>rainrambler/PoemStar<gh_stars>1-10
package poemstar;
import java.util.List;
import poemstar.beans.ChineseDynasty;
import poemstar.beans.Poem;
import poemstar.fileio.PoemsDBManager;
/**
*
* @author xinway
*/
public class ModifyPoemJDialog extends javax.swing.JDialog {
/**
* Creates new form ModifyPoemJDialog
* @param parent
* @param modal
*/
public ModifyPoemJDialog(java.awt.Frame parent, boolean modal) {
super(parent, modal);
initComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabelDynasty = new javax.swing.JLabel();
jTextFieldAuthor = new javax.swing.JTextField();
jLabelAuthor = new javax.swing.JLabel();
jTextFieldTitle = new javax.swing.JTextField();
jLabelTitle = new javax.swing.JLabel();
jButtonQuery = new javax.swing.JButton();
jButtonModify = new javax.swing.JButton();
jScrollPane1 = new javax.swing.JScrollPane();
jTextAreaContent = new javax.swing.JTextArea();
jLabelLog = new javax.swing.JLabel();
jButtonAddPoem = new javax.swing.JButton();
jButtonDelPoem = new javax.swing.JButton();
jComboBoxDynasty = new javax.swing.JComboBox();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowOpened(java.awt.event.WindowEvent evt) {
formWindowOpened(evt);
}
});
jLabelDynasty.setText("年代:");
jLabelAuthor.setText("作者:");
jLabelTitle.setText("标题:");
jButtonQuery.setText("查询");
jButtonQuery.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonQueryActionPerformed(evt);
}
});
jButtonModify.setText("修改");
jButtonModify.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonModifyActionPerformed(evt);
}
});
jTextAreaContent.setColumns(20);
jTextAreaContent.setRows(5);
jScrollPane1.setViewportView(jTextAreaContent);
jButtonAddPoem.setText("添加");
jButtonAddPoem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonAddPoemActionPerformed(evt);
}
});
jButtonDelPoem.setText("删除");
jButtonDelPoem.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButtonDelPoemActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(jLabelLog, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(jScrollPane1)
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addComponent(jLabelDynasty)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jComboBoxDynasty, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(73, 73, 73)
.addComponent(jLabelAuthor)
.addGap(18, 18, 18)
.addComponent(jTextFieldAuthor, javax.swing.GroupLayout.PREFERRED_SIZE, 117, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addComponent(jLabelTitle)
.addGap(18, 18, 18)
.addComponent(jTextFieldTitle)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jButtonQuery, javax.swing.GroupLayout.PREFERRED_SIZE, 79, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonModify, javax.swing.GroupLayout.PREFERRED_SIZE, 79, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonAddPoem, javax.swing.GroupLayout.PREFERRED_SIZE, 79, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonDelPoem, javax.swing.GroupLayout.PREFERRED_SIZE, 79, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addContainerGap(48, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabelDynasty)
.addComponent(jLabelAuthor)
.addComponent(jTextFieldAuthor, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jComboBoxDynasty, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabelTitle)
.addComponent(jTextFieldTitle, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jButtonQuery))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(jButtonModify)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jButtonAddPoem)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jButtonDelPoem))
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 346, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabelLog)
.addContainerGap(26, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private String getSelectedDynasty() {
return jComboBoxDynasty.getSelectedItem().toString();
}
/**
* Query a poem (All keywords should be matched exactly)
* @param evt
*/
private void jButtonQueryActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonQueryActionPerformed
String dynasty = getSelectedDynasty();
String author = jTextFieldAuthor.getText();
String title = jTextFieldTitle.getText();
Poem p = PoemsDBManager.INSTANCE.findPoem(dynasty, author, title);
if (p == null) {
jLabelLog.setText("Can not find poem.");
return;
}
jTextAreaContent.setText(p.getContent());
}//GEN-LAST:event_jButtonQueryActionPerformed
private void jButtonModifyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonModifyActionPerformed
String dynasty = getSelectedDynasty();
String author = jTextFieldAuthor.getText();
String title = jTextFieldTitle.getText();
String content = jTextAreaContent.getText();
boolean result = PoemsDBManager.INSTANCE.modifyPoemContent(dynasty, author, title, content);
if (result) {
jLabelLog.setText("Modify poem complete.");
}
else {
jLabelLog.setText("Modify poem failure.");
}
}//GEN-LAST:event_jButtonModifyActionPerformed
private void jButtonAddPoemActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonAddPoemActionPerformed
String dynasty = getSelectedDynasty();
String author = jTextFieldAuthor.getText();
String title = jTextFieldTitle.getText();
String content = jTextAreaContent.getText();
String res = PoemsDBManager.INSTANCE.addPoemContent(dynasty, author, title, content);
jLabelLog.setText(res);
}//GEN-LAST:event_jButtonAddPoemActionPerformed
private void jButtonDelPoemActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButtonDelPoemActionPerformed
String dynasty = getSelectedDynasty();
String author = jTextFieldAuthor.getText();
String title = jTextFieldTitle.getText();
boolean res = PoemsDBManager.INSTANCE.deletePoem(dynasty, author, title);
if (res) {
jLabelLog.setText("Delete complete.");
}
else {
jLabelLog.setText("Cannot find. Nothing to delete.");
}
}//GEN-LAST:event_jButtonDelPoemActionPerformed
private void formWindowOpened(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowOpened
List<String> alldynasty = ChineseDynasty.INSTANCE.getAll();
jComboBoxDynasty.removeAllItems();
for (String s : alldynasty) {
jComboBoxDynasty.addItem(s);
}
}//GEN-LAST:event_formWindowOpened
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton jButtonAddPoem;
private javax.swing.JButton jButtonDelPoem;
private javax.swing.JButton jButtonModify;
private javax.swing.JButton jButtonQuery;
private javax.swing.JComboBox jComboBoxDynasty;
private javax.swing.JLabel jLabelAuthor;
private javax.swing.JLabel jLabelDynasty;
private javax.swing.JLabel jLabelLog;
private javax.swing.JLabel jLabelTitle;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextArea jTextAreaContent;
private javax.swing.JTextField jTextFieldAuthor;
private javax.swing.JTextField jTextFieldTitle;
// End of variables declaration//GEN-END:variables
}
|
<table>
<tr>
<th>Column 1</th>
<th>Column 2</th>
</tr>
<tr>
<td>row 1, cell 1</td>
<td>row 1, cell 2</td>
</tr>
<tr>
<td>row 1, cell 1</td>
<td>row 1, cell 2</td>
</tr>
<tr>
<td>row 1, cell 1</td>
<td>row 1, cell 2</td>
</tr>
</table>
|
#!/bin/bash
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
set -e
set -o pipefail
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output/* ! -name '*summary-info*' -exec rm -R -f {} +
rm -R -f fifo/*
rm -R -f work/*
mkdir work/kat/
mkfifo fifo/gul_P1
mkfifo fifo/gul_P2
mkfifo fifo/gul_P3
mkfifo fifo/gul_P4
mkfifo fifo/gul_P5
mkfifo fifo/gul_P6
mkfifo fifo/gul_P7
mkfifo fifo/gul_P8
mkfifo fifo/gul_P9
mkfifo fifo/gul_P10
mkfifo fifo/gul_P11
mkfifo fifo/gul_P12
mkfifo fifo/gul_P13
mkfifo fifo/gul_P14
mkfifo fifo/gul_P15
mkfifo fifo/gul_P16
mkfifo fifo/gul_P17
mkfifo fifo/gul_P18
mkfifo fifo/gul_P19
mkfifo fifo/gul_P20
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_eltcalc_P1
mkfifo fifo/gul_S1_summary_P2
mkfifo fifo/gul_S1_eltcalc_P2
mkfifo fifo/gul_S1_summary_P3
mkfifo fifo/gul_S1_eltcalc_P3
mkfifo fifo/gul_S1_summary_P4
mkfifo fifo/gul_S1_eltcalc_P4
mkfifo fifo/gul_S1_summary_P5
mkfifo fifo/gul_S1_eltcalc_P5
mkfifo fifo/gul_S1_summary_P6
mkfifo fifo/gul_S1_eltcalc_P6
mkfifo fifo/gul_S1_summary_P7
mkfifo fifo/gul_S1_eltcalc_P7
mkfifo fifo/gul_S1_summary_P8
mkfifo fifo/gul_S1_eltcalc_P8
mkfifo fifo/gul_S1_summary_P9
mkfifo fifo/gul_S1_eltcalc_P9
mkfifo fifo/gul_S1_summary_P10
mkfifo fifo/gul_S1_eltcalc_P10
mkfifo fifo/gul_S1_summary_P11
mkfifo fifo/gul_S1_eltcalc_P11
mkfifo fifo/gul_S1_summary_P12
mkfifo fifo/gul_S1_eltcalc_P12
mkfifo fifo/gul_S1_summary_P13
mkfifo fifo/gul_S1_eltcalc_P13
mkfifo fifo/gul_S1_summary_P14
mkfifo fifo/gul_S1_eltcalc_P14
mkfifo fifo/gul_S1_summary_P15
mkfifo fifo/gul_S1_eltcalc_P15
mkfifo fifo/gul_S1_summary_P16
mkfifo fifo/gul_S1_eltcalc_P16
mkfifo fifo/gul_S1_summary_P17
mkfifo fifo/gul_S1_eltcalc_P17
mkfifo fifo/gul_S1_summary_P18
mkfifo fifo/gul_S1_eltcalc_P18
mkfifo fifo/gul_S1_summary_P19
mkfifo fifo/gul_S1_eltcalc_P19
mkfifo fifo/gul_S1_summary_P20
mkfifo fifo/gul_S1_eltcalc_P20
# --- Do ground up loss computes ---
eltcalc < fifo/gul_S1_eltcalc_P1 > work/kat/gul_S1_eltcalc_P1 & pid1=$!
eltcalc -s < fifo/gul_S1_eltcalc_P2 > work/kat/gul_S1_eltcalc_P2 & pid2=$!
eltcalc -s < fifo/gul_S1_eltcalc_P3 > work/kat/gul_S1_eltcalc_P3 & pid3=$!
eltcalc -s < fifo/gul_S1_eltcalc_P4 > work/kat/gul_S1_eltcalc_P4 & pid4=$!
eltcalc -s < fifo/gul_S1_eltcalc_P5 > work/kat/gul_S1_eltcalc_P5 & pid5=$!
eltcalc -s < fifo/gul_S1_eltcalc_P6 > work/kat/gul_S1_eltcalc_P6 & pid6=$!
eltcalc -s < fifo/gul_S1_eltcalc_P7 > work/kat/gul_S1_eltcalc_P7 & pid7=$!
eltcalc -s < fifo/gul_S1_eltcalc_P8 > work/kat/gul_S1_eltcalc_P8 & pid8=$!
eltcalc -s < fifo/gul_S1_eltcalc_P9 > work/kat/gul_S1_eltcalc_P9 & pid9=$!
eltcalc -s < fifo/gul_S1_eltcalc_P10 > work/kat/gul_S1_eltcalc_P10 & pid10=$!
eltcalc -s < fifo/gul_S1_eltcalc_P11 > work/kat/gul_S1_eltcalc_P11 & pid11=$!
eltcalc -s < fifo/gul_S1_eltcalc_P12 > work/kat/gul_S1_eltcalc_P12 & pid12=$!
eltcalc -s < fifo/gul_S1_eltcalc_P13 > work/kat/gul_S1_eltcalc_P13 & pid13=$!
eltcalc -s < fifo/gul_S1_eltcalc_P14 > work/kat/gul_S1_eltcalc_P14 & pid14=$!
eltcalc -s < fifo/gul_S1_eltcalc_P15 > work/kat/gul_S1_eltcalc_P15 & pid15=$!
eltcalc -s < fifo/gul_S1_eltcalc_P16 > work/kat/gul_S1_eltcalc_P16 & pid16=$!
eltcalc -s < fifo/gul_S1_eltcalc_P17 > work/kat/gul_S1_eltcalc_P17 & pid17=$!
eltcalc -s < fifo/gul_S1_eltcalc_P18 > work/kat/gul_S1_eltcalc_P18 & pid18=$!
eltcalc -s < fifo/gul_S1_eltcalc_P19 > work/kat/gul_S1_eltcalc_P19 & pid19=$!
eltcalc -s < fifo/gul_S1_eltcalc_P20 > work/kat/gul_S1_eltcalc_P20 & pid20=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_eltcalc_P1 > /dev/null & pid21=$!
tee < fifo/gul_S1_summary_P2 fifo/gul_S1_eltcalc_P2 > /dev/null & pid22=$!
tee < fifo/gul_S1_summary_P3 fifo/gul_S1_eltcalc_P3 > /dev/null & pid23=$!
tee < fifo/gul_S1_summary_P4 fifo/gul_S1_eltcalc_P4 > /dev/null & pid24=$!
tee < fifo/gul_S1_summary_P5 fifo/gul_S1_eltcalc_P5 > /dev/null & pid25=$!
tee < fifo/gul_S1_summary_P6 fifo/gul_S1_eltcalc_P6 > /dev/null & pid26=$!
tee < fifo/gul_S1_summary_P7 fifo/gul_S1_eltcalc_P7 > /dev/null & pid27=$!
tee < fifo/gul_S1_summary_P8 fifo/gul_S1_eltcalc_P8 > /dev/null & pid28=$!
tee < fifo/gul_S1_summary_P9 fifo/gul_S1_eltcalc_P9 > /dev/null & pid29=$!
tee < fifo/gul_S1_summary_P10 fifo/gul_S1_eltcalc_P10 > /dev/null & pid30=$!
tee < fifo/gul_S1_summary_P11 fifo/gul_S1_eltcalc_P11 > /dev/null & pid31=$!
tee < fifo/gul_S1_summary_P12 fifo/gul_S1_eltcalc_P12 > /dev/null & pid32=$!
tee < fifo/gul_S1_summary_P13 fifo/gul_S1_eltcalc_P13 > /dev/null & pid33=$!
tee < fifo/gul_S1_summary_P14 fifo/gul_S1_eltcalc_P14 > /dev/null & pid34=$!
tee < fifo/gul_S1_summary_P15 fifo/gul_S1_eltcalc_P15 > /dev/null & pid35=$!
tee < fifo/gul_S1_summary_P16 fifo/gul_S1_eltcalc_P16 > /dev/null & pid36=$!
tee < fifo/gul_S1_summary_P17 fifo/gul_S1_eltcalc_P17 > /dev/null & pid37=$!
tee < fifo/gul_S1_summary_P18 fifo/gul_S1_eltcalc_P18 > /dev/null & pid38=$!
tee < fifo/gul_S1_summary_P19 fifo/gul_S1_eltcalc_P19 > /dev/null & pid39=$!
tee < fifo/gul_S1_summary_P20 fifo/gul_S1_eltcalc_P20 > /dev/null & pid40=$!
summarycalc -g -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 &
summarycalc -g -1 fifo/gul_S1_summary_P2 < fifo/gul_P2 &
summarycalc -g -1 fifo/gul_S1_summary_P3 < fifo/gul_P3 &
summarycalc -g -1 fifo/gul_S1_summary_P4 < fifo/gul_P4 &
summarycalc -g -1 fifo/gul_S1_summary_P5 < fifo/gul_P5 &
summarycalc -g -1 fifo/gul_S1_summary_P6 < fifo/gul_P6 &
summarycalc -g -1 fifo/gul_S1_summary_P7 < fifo/gul_P7 &
summarycalc -g -1 fifo/gul_S1_summary_P8 < fifo/gul_P8 &
summarycalc -g -1 fifo/gul_S1_summary_P9 < fifo/gul_P9 &
summarycalc -g -1 fifo/gul_S1_summary_P10 < fifo/gul_P10 &
summarycalc -g -1 fifo/gul_S1_summary_P11 < fifo/gul_P11 &
summarycalc -g -1 fifo/gul_S1_summary_P12 < fifo/gul_P12 &
summarycalc -g -1 fifo/gul_S1_summary_P13 < fifo/gul_P13 &
summarycalc -g -1 fifo/gul_S1_summary_P14 < fifo/gul_P14 &
summarycalc -g -1 fifo/gul_S1_summary_P15 < fifo/gul_P15 &
summarycalc -g -1 fifo/gul_S1_summary_P16 < fifo/gul_P16 &
summarycalc -g -1 fifo/gul_S1_summary_P17 < fifo/gul_P17 &
summarycalc -g -1 fifo/gul_S1_summary_P18 < fifo/gul_P18 &
summarycalc -g -1 fifo/gul_S1_summary_P19 < fifo/gul_P19 &
summarycalc -g -1 fifo/gul_S1_summary_P20 < fifo/gul_P20 &
eve 1 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P1 &
eve 2 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P2 &
eve 3 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P3 &
eve 4 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P4 &
eve 5 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P5 &
eve 6 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P6 &
eve 7 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P7 &
eve 8 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P8 &
eve 9 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P9 &
eve 10 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P10 &
eve 11 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P11 &
eve 12 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P12 &
eve 13 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P13 &
eve 14 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P14 &
eve 15 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P15 &
eve 16 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P16 &
eve 17 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P17 &
eve 18 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P18 &
eve 19 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P19 &
eve 20 20 | getmodel | gulcalc -S100 -L100 -r -c - > fifo/gul_P20 &
wait $pid1 $pid2 $pid3 $pid4 $pid5 $pid6 $pid7 $pid8 $pid9 $pid10 $pid11 $pid12 $pid13 $pid14 $pid15 $pid16 $pid17 $pid18 $pid19 $pid20 $pid21 $pid22 $pid23 $pid24 $pid25 $pid26 $pid27 $pid28 $pid29 $pid30 $pid31 $pid32 $pid33 $pid34 $pid35 $pid36 $pid37 $pid38 $pid39 $pid40
# --- Do ground up loss kats ---
kat work/kat/gul_S1_eltcalc_P1 work/kat/gul_S1_eltcalc_P2 work/kat/gul_S1_eltcalc_P3 work/kat/gul_S1_eltcalc_P4 work/kat/gul_S1_eltcalc_P5 work/kat/gul_S1_eltcalc_P6 work/kat/gul_S1_eltcalc_P7 work/kat/gul_S1_eltcalc_P8 work/kat/gul_S1_eltcalc_P9 work/kat/gul_S1_eltcalc_P10 work/kat/gul_S1_eltcalc_P11 work/kat/gul_S1_eltcalc_P12 work/kat/gul_S1_eltcalc_P13 work/kat/gul_S1_eltcalc_P14 work/kat/gul_S1_eltcalc_P15 work/kat/gul_S1_eltcalc_P16 work/kat/gul_S1_eltcalc_P17 work/kat/gul_S1_eltcalc_P18 work/kat/gul_S1_eltcalc_P19 work/kat/gul_S1_eltcalc_P20 > output/gul_S1_eltcalc.csv & kpid1=$!
wait $kpid1
rm -R -f work/*
rm -R -f fifo/*
|
#!/usr/bin/env bash
source /app/vagrant/provision/common.sh
#== Import script args ==
timezone=$(echo "$1")
#== Provision script ==
info "Provision-script user: `whoami`"
export DEBIAN_FRONTEND=noninteractive
info "Configure timezone"
timedatectl set-timezone ${timezone} --no-ask-password
info "Prepare root password for MySQL"
debconf-set-selections <<< "mysql-community-server mysql-community-server/root-pass password \"''\""
debconf-set-selections <<< "mysql-community-server mysql-community-server/re-root-pass password \"''\""
echo "Done!"
info "Add PHP 7.1 repository"
add-apt-repository ppa:ondrej/php -y
info "Update OS software"
apt-get update
apt-get upgrade -y
info "Install additional software"
apt-get install -y php7.1-curl php7.1-cli php7.1-intl php7.1-mysqlnd php7.1-gd php7.1-fpm php7.1-mbstring php7.1-xml unzip nginx mysql-server-5.7 php.xdebug php7.1-memcached memcached
info "Configure MySQL"
sed -i "s/.*bind-address.*/bind-address = 0.0.0.0/" /etc/mysql/mysql.conf.d/mysqld.cnf
mysql -uroot <<< "CREATE USER 'root'@'%' IDENTIFIED BY ''"
mysql -uroot <<< "GRANT ALL PRIVILEGES ON *.* TO 'root'@'%'"
mysql -uroot <<< "DROP USER 'root'@'localhost'"
mysql -uroot <<< "FLUSH PRIVILEGES"
echo "Done!"
info "Configure PHP-FPM"
sed -i 's/user = www-data/user = vagrant/g' /etc/php/7.1/fpm/pool.d/www.conf
sed -i 's/group = www-data/group = vagrant/g' /etc/php/7.1/fpm/pool.d/www.conf
sed -i 's/owner = www-data/owner = vagrant/g' /etc/php/7.1/fpm/pool.d/www.conf
cat << EOF > /etc/php/7.1/mods-available/xdebug.ini
zend_extension=xdebug.so
xdebug.remote_enable=1
xdebug.remote_connect_back=1
xdebug.remote_port=9000
xdebug.remote_autostart=1
EOF
echo "Done!"
info "Configure NGINX"
sed -i 's/user www-data/user vagrant/g' /etc/nginx/nginx.conf
echo "Done!"
info "Enabling site configuration"
ln -s /app/vagrant/nginx/app.conf /etc/nginx/sites-enabled/app.conf
echo "Done!"
info "Initailize databases for MySQL"
mysql -uroot <<< "CREATE DATABASE yii2advanced"
mysql -uroot <<< "CREATE DATABASE yii2advanced_test"
echo "Done!"
info "Install composer"
curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer
|
#!/bin/bash
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "Creating Datastore/App Engine instance"
gcloud app create --region "us-central"
echo "Exporting GCLOUD_PROJECT"
export GCLOUD_PROJECT=$DEVSHELL_PROJECT_ID
echo "Creating virtual environment"
mkdir ~/venvs
virtualenv ~/venvs/developingapps
source ~/venvs/developingapps/bin/activate
echo "Installing Python libraries"
pip install --upgrade pip
pip install -r requirements.txt
echo "Creating Datastore entities"
python add_entities.py
echo "Project ID: $DEVSHELL_PROJECT_ID"
|
<filename>buyer/buyer_test.go
package buyer
import (
"encoding/json"
"github.com/sigtot/sanntid/mac"
"github.com/sigtot/sanntid/pubsub"
"github.com/sigtot/sanntid/types"
"testing"
"time"
)
type MockPriceCalculator struct{}
func (pc *MockPriceCalculator) GetPrice(call types.Call) int {
return 2
}
func TestBuyer(t *testing.T) {
forSalePubChan := pubsub.StartPublisher(pubsub.SalesDiscoveryPort)
soldToPubChan := pubsub.StartPublisher(pubsub.SoldToDiscoveryPort)
elevatorID, err := mac.GetMacAddr()
if err != nil {
t.Fatalf("Could not get mac addr %s\n", err.Error())
}
priceCalc := MockPriceCalculator{}
newOrders := make(chan types.Order)
StartBuying(&priceCalc, newOrders)
// Sell call
call := types.Call{Type: types.Hall, Floor: 3, Dir: types.Down, ElevatorID: ""}
callJson, err := json.Marshal(call)
if err != nil {
t.Fatalf("Could not marshal call %s\n", err.Error())
}
forSalePubChan <- callJson
time.Sleep(20 * time.Millisecond)
// Send sold to
soldTo := types.SoldTo{Bid: types.Bid{
Call: call,
Price: priceCalc.GetPrice(call),
ElevatorID: elevatorID,
}}
soldToJson, err := json.Marshal(soldTo)
if err != nil {
t.Fatalf("Could not marshal soldTo %s\n", err.Error())
}
soldToPubChan <- soldToJson
time.Sleep(20 * time.Millisecond)
select {
case newOrder := <-newOrders:
order := types.Order{Call: call}
if newOrder != order {
t.Fatalf("Bad order received: %+v\n", order)
}
case <-time.After(20 * time.Millisecond):
t.Fatal("Timed out waiting for bought order")
}
}
|
print ('Pythonの覚書:文字存在チェック | 指定文字が含まれるか')
res = 'p' in 'python'
print(res)
res = '大猫' in '臆病な大猫に目を合わせてはいけません'
print(res)
res = '犬' in '臆病な大猫に目を合わせてはいけません'
print(res)
|
// ChartRateFactory class
class ChartRateFactory {
func createChartRate() -> ChartRate {
// Implement chart rate creation logic here
return ChartRate() // Replace with actual chart rate creation
}
}
// ChartInteractor class
class ChartInteractor {
weak var delegate: ChartInteractorDelegate?
private let rateManager: RateManager
private let chartTypeStorage: ChartTypeStorage
init(rateManager: RateManager, chartTypeStorage: ChartTypeStorage) {
self.rateManager = rateManager
self.chartTypeStorage = chartTypeStorage
}
func retrieveChartData(for coin: Coin, in currency: Currency) {
// Implement data retrieval logic using rateManager and chartTypeStorage
let chartData = rateManager.getChartData(for: coin, in: currency)
delegate?.didRetrieveChartData(chartData)
}
}
protocol ChartInteractorDelegate: AnyObject {
func didRetrieveChartData(_ data: ChartData)
}
// ChartPresenter class
class ChartPresenter: ChartInteractorDelegate {
weak var view: ChartView?
private let interactor: ChartInteractor
private let factory: ChartRateFactory
private let coin: Coin
private let currency: Currency
init(interactor: ChartInteractor, factory: ChartRateFactory, coin: Coin, currency: Currency) {
self.interactor = interactor
self.factory = factory
self.coin = coin
self.currency = currency
}
func viewDidLoad() {
interactor.retrieveChartData(for: coin, in: currency)
}
func didRetrieveChartData(_ data: ChartData) {
let chartRate = factory.createChartRate()
view?.displayChart(with: chartRate, data: data)
}
}
// ChartViewController class
class ChartViewController: UIViewController, ChartView {
weak var delegate: ChartPresenter?
init(delegate: ChartPresenter) {
self.delegate = delegate
super.init(nibName: nil, bundle: nil)
}
func displayChart(with chartRate: ChartRate, data: ChartData) {
// Implement chart display logic using chartRate and data
}
}
protocol ChartView: AnyObject {
func displayChart(with chartRate: ChartRate, data: ChartData)
}
// Other necessary classes and protocols
class Coin {}
class Currency {}
class RateManager {
func getChartData(for coin: Coin, in currency: Currency) -> ChartData {
// Implement data retrieval logic from rate manager
return ChartData() // Replace with actual data retrieval
}
}
class ChartTypeStorage {}
class ChartRate {}
class ChartData {}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync --delete -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/Kingfisher/Kingfisher.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/Kingfisher/Kingfisher.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package com.went.core.utils;
import java.io.Serializable;
import java.util.Map;
import static com.went.core.constants.BaseConstants.STATUS_SUCCESS;
/**
* Service 返回的结果
*
* Create By HCL at 2017/7/31
*/
public class ServerResult<T> implements Serializable {
private static final long serialVersionUID = 812376774103405857L;
private int state = STATUS_SUCCESS;
private String message = "";
private T data;
private Map extra;
/**
* 空的构造方法,供 json 转换时使用
*
* Create By HCL at 2017/8/7
*/
public ServerResult() {
}
/**
* 全参数构造方法
*
* @param data 数据信息
* @param state 状态
* @param message 消息
*/
public ServerResult(T data, int state, String message) {
this.state = state;
this.message = message;
this.data = data;
}
public int getState() {
return state;
}
public void setState(int state) {
this.state = state;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public T getData() {
return data;
}
public void setData(T data) {
this.data = data;
}
public Map getExtra() {
return extra;
}
public void setExtra(Map extra) {
this.extra = extra;
}
public static ServerResult Msg(int state, String message){
return new ServerResult(null, state,message);
}
// public PageResult<Map<String, Object>> selectPage(int pageNum,int pageSize){
// Page<Map<String,Object>> pageTask = PageHelper.startPage(pageNum, pageSize);
// List<Map<String, Object>> result = data;
// PageInfo pageInfo = new PageInfo(result);
// PageResult<Map<String, Object>> pageResult = new PageResult<>(result);
// pageResult.setTotal(pageInfo.getTotal());
// pageResult.setPageNum(pageInfo.getPageNum());
// pageResult.setPageSize(pageInfo.getPageSize());
// return pageResult;
// }
}
|
#!/usr/local/bin/ksh93 -p
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
# Copyright 2007 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
# ident "@(#)xattr_006_pos.ksh 1.1 07/02/06 SMI"
#
# $FreeBSD$
. $STF_SUITE/include/libtest.kshlib
. $STF_SUITE/tests/xattr/xattr_common.kshlib
################################################################################
#
# __stc_assertion_start
#
# ID: xattr_006_pos
#
# DESCRIPTION:
# Xattrs present on a file in a snapshot should be visible.
#
# STRATEGY:
# 1. Create a file and give it an xattr
# 2. Take a snapshot of the filesystem
# 3. Verify that we can take a snapshot of it.
#
# TESTABILITY: explicit
#
# TEST_AUTOMATION_LEVEL: automated
#
# CODING_STATUS: COMPLETED (2006-12-05)
#
# __stc_assertion_end
#
################################################################################
function cleanup {
log_must $ZFS destroy $TESTPOOL/$TESTFS@snap
log_must $RM $TESTDIR/myfile.${TESTCASE_ID}
}
log_assert "read xattr on a snapshot"
log_onexit cleanup
# create a file, and an xattr on it
log_must $TOUCH $TESTDIR/myfile.${TESTCASE_ID}
create_xattr $TESTDIR/myfile.${TESTCASE_ID} passwd /etc/passwd
# snapshot the filesystem
log_must $ZFS snapshot $TESTPOOL/$TESTFS@snap
# check for the xattr on the snapshot
verify_xattr $TESTDIR/$(get_snapdir_name)/snap/myfile.${TESTCASE_ID} passwd /etc/passwd
log_pass "read xattr on a snapshot"
|
<gh_stars>1-10
# frozen_string_literal: true
FactoryBot.define do
factory :region do
sequence :name do |n|
"Test Region #{n}"
end
end
end
|
#!/bin/sh
mkdir -p $PACKAGE_LIB/usr/bin
mkdir -p $PACKAGE_LIB/etc/cron.daily
mkdir -p $PACKAGE_LIB/etc/housekeeper
cp housekeeper.py $PACKAGE_LIB/usr/bin/housekeeper
dos2unix $PACKAGE_LIB/usr/bin/housekeeper
chmod a+x $PACKAGE_LIB/usr/bin/housekeeper
cat > $PACKAGE_LIB/etc/cron.daily/housekeeper <<EOF
#!/bin/bash
housekeeper -s -r
EOF
chmod a+x $PACKAGE_LIB/etc/cron.daily/housekeeper
|
<gh_stars>10-100
'use strict';
const config = {
testEnvironment: 'node',
globalSetup: './test/setup.js',
globalTeardown: './test/teardown.js',
coverageReporters: ['lcov', 'text-summary'],
collectCoverageFrom: ['lib/**/*.js'],
};
module.exports = config;
|
def forward(self, inputs, hidden_state):
# Extracting dimensions from the input data
b, a, e = inputs.size()
# Applying transformation to the input data
transformed_input = F.relu(self.fc1(inputs.view(-1, e)), inplace=True) # (b*a, e) --> (b*a, h)
# Additional processing using the hidden state (if required)
# ...
return transformed_input
|
<reponame>nortal/spring-mvc-component-web
package com.nortal.spring.cw.core.web.helper;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Collection;
import java.util.Date;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.lang3.Range;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import com.nortal.spring.cw.core.model.FileHolderModel;
import com.nortal.spring.cw.core.model.LangModel;
import com.nortal.spring.cw.core.web.annotation.component.BooleanField;
import com.nortal.spring.cw.core.web.annotation.component.DateTimeField;
import com.nortal.spring.cw.core.web.annotation.component.DoubleField;
import com.nortal.spring.cw.core.web.annotation.component.IntegerCollectionField;
import com.nortal.spring.cw.core.web.annotation.component.IntegerField;
import com.nortal.spring.cw.core.web.annotation.component.LongCollectionField;
import com.nortal.spring.cw.core.web.annotation.component.LongField;
import com.nortal.spring.cw.core.web.annotation.component.StringCollectionField;
import com.nortal.spring.cw.core.web.annotation.component.StringField;
import com.nortal.spring.cw.core.web.component.element.AbstractBaseElement;
import com.nortal.spring.cw.core.web.component.element.FormElement;
import com.nortal.spring.cw.core.web.component.multiple.FileCollectionElement;
import com.nortal.spring.cw.core.web.component.multiple.IntegerCollectionElement;
import com.nortal.spring.cw.core.web.component.multiple.LanguageElement;
import com.nortal.spring.cw.core.web.component.multiple.LongCollectionElement;
import com.nortal.spring.cw.core.web.component.multiple.StringCollectionElement;
import com.nortal.spring.cw.core.web.component.single.BooleanElement;
import com.nortal.spring.cw.core.web.component.single.DateTimeElement;
import com.nortal.spring.cw.core.web.component.single.DoubleElement;
import com.nortal.spring.cw.core.web.component.single.FileElement;
import com.nortal.spring.cw.core.web.component.single.IntegerElement;
import com.nortal.spring.cw.core.web.component.single.LongElement;
import com.nortal.spring.cw.core.web.component.single.StringElement;
import com.nortal.spring.cw.core.web.exception.FieldNotFoundException;
import com.nortal.spring.cw.core.web.util.BeanUtil;
/**
* Klass koondab endas erinevaid vahendeid vormi väljadega manipuleerimiseks
*
* @author <NAME>
*/
public final class FieldHelper {
private FieldHelper() {
super();
}
/**
* Välja baasil vormi elemendi loomine. Kui Elemendi loomine ei õnnestunud, mis tähendab üldjuhul seda et tüüp ei ole lubatud, kutsutakse
* välja RuntimeException
*
* @param elementFieldPath
* @param objClass
* @return
*/
public static <T extends FormElement> T createElement(Class<?> objClass, String elementFieldPath) {
return createElement(BeanUtil.getMethodByFieldPath(objClass, elementFieldPath), elementFieldPath);
}
/**
* Meetodi baasil vormi elemendi loomine. Kui Elemendi loomine ei õnnestunud, mis tähendab üldjuhul seda et tüüp ei ole lubatud,
* kutsutakse välja RuntimeException
*
* @param method
* @param elementFieldPath
* @return
*/
public static <T extends FormElement> T createElement(Method method, String elementFieldPath) {
T element = createElementByAnnotation(method, elementFieldPath);
if (element == null) {
element = createElementByType(method, elementFieldPath);
}
if (element == null) {
throw new FieldNotFoundException(elementFieldPath);
}
return element;
}
@SuppressWarnings("unchecked")
private static <T extends FormElement> T createElementByType(Method method, String elementFieldPath) {
T element = null;
Class<?> returnType = method.getReturnType();
String label = elementFieldPath;
boolean mandatory = false;
if (returnType.equals(String.class)) {
element = (T) new StringElement();
} else if (returnType.equals(Long.class)) {
element = (T) new LongElement();
} else if (returnType.equals(Integer.class)) {
element = (T) new IntegerElement();
} else if (returnType.equals(BigDecimal.class)) {
element = (T) new DoubleElement();
} else if (returnType.equals(Boolean.class)) {
element = (T) new BooleanElement();
} else if (returnType.equals(Date.class) || returnType.equals(Time.class) || returnType.equals(Timestamp.class)) {
element = (T) new DateTimeElement();
} else if (FileHolderModel.class.isAssignableFrom(returnType)) {
element = (T) new FileElement().setModelClass((Class<? extends FileHolderModel>) returnType);
} else if (Collection.class.isAssignableFrom(returnType)) {
element = createCollectionElementByType(method);
}
if (element != null) {
elementBaseData((AbstractBaseElement<?>) element, elementFieldPath, mandatory, label);
}
return element;
}
@SuppressWarnings("unchecked")
private static <T extends FormElement> T createCollectionElementByType(Method method) {
T element = null;
Type type = ((ParameterizedType) method.getGenericReturnType()).getActualTypeArguments()[0];
if (type.equals(String.class)) {
element = (T) new StringCollectionElement();
} else if (type.equals(Long.class)) {
element = (T) new LongCollectionElement();
} else if (type.equals(Integer.class)) {
element = (T) new IntegerCollectionElement();
} else if (LangModel.class.isAssignableFrom((Class<?>) type)) {
element = (T) new LanguageElement().setModelClass((Class<? extends LangModel>) type);
} else if (FileHolderModel.class.isAssignableFrom((Class<?>) type)) {
element = (T) new FileCollectionElement().setModelClass((Class<FileHolderModel>) type);
} else {
throw new NotImplementedException("Not implemented type: " + type);
}
return element;
}
@SuppressWarnings("unchecked")
private static <T extends FormElement> T createElementByAnnotation(Method method, String elementFieldPath) {
T element = null;
if (method.isAnnotationPresent(StringField.class)) {
element = (T) FieldHelper.createStringElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(LongField.class)) {
element = (T) FieldHelper.createLongElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(DoubleField.class)) {
element = (T) FieldHelper.createDoubleElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(IntegerField.class)) {
element = (T) FieldHelper.createIntegerElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(BooleanField.class)) {
element = (T) FieldHelper.createBooleanElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(DateTimeField.class)) {
element = (T) FieldHelper.createDateTimeElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(StringCollectionField.class)) {
element = (T) FieldHelper.createStringCollectionElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(LongCollectionField.class)) {
element = (T) FieldHelper.createLongCollectionElement(method, elementFieldPath);
} else if (method.isAnnotationPresent(IntegerCollectionField.class)) {
element = (T) FieldHelper.createIntegerCollectionElement(method, elementFieldPath);
}
return element;
}
private static BooleanElement createBooleanElement(Method method, String elementFieldPath) {
BooleanField field = method.getAnnotation(BooleanField.class);
BooleanElement elementComp = new BooleanElement();
elementBaseData(elementComp, elementFieldPath, false, field.label());
return elementComp;
}
public static DoubleElement createDoubleElement(Method method, String elementFieldPath) {
DoubleField field = method.getAnnotation(DoubleField.class);
String label = StringUtils.isEmpty(field.label()) ? elementFieldPath : field.label();
boolean mandatory = field.required();
DoubleElement elementComp = new DoubleElement();
Range<BigDecimal> range = Range.between(BigDecimal.valueOf(field.between()[0]), BigDecimal.valueOf(field.between()[1]));
elementComp.setRange(range);
elementBaseData(elementComp, elementFieldPath, mandatory, label);
return elementComp;
}
private static LongElement createLongElement(Method method, String elementFieldPath) {
LongField field = method.getAnnotation(LongField.class);
boolean mandatory = field.required();
LongElement elementComp = new LongElement();
Range<Long> longRange = Range.between(field.between()[0], field.between()[1]);
elementComp.setRange(longRange);
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static IntegerElement createIntegerElement(Method method, String elementFieldPath) {
IntegerField field = method.getAnnotation(IntegerField.class);
boolean mandatory = field.required();
IntegerElement elementComp = new IntegerElement();
Range<Integer> intRange = Range.between(field.between()[0], field.between()[1]);
elementComp.setRange(intRange);
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static StringElement createStringElement(Method method, String elementFieldPath) {
StringField field = method.getAnnotation(StringField.class);
boolean mandatory = field.required();
StringElement elementComp = new StringElement();
elementComp.setLength(field.length());
elementComp.setRows(field.rows());
elementComp.setCols(field.cols());
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static DateTimeElement createDateTimeElement(Method method, String elementFieldPath) {
DateTimeField field = method.getAnnotation(DateTimeField.class);
boolean mandatory = field.required();
DateTimeElement elementComp = new DateTimeElement();
Date min = StringUtils.isEmpty(field.between()[0]) ? null : DateTime.parse(field.between()[0]).toDate();
Date max = StringUtils.isEmpty(field.between()[1]) ? null : DateTime.parse(field.between()[1]).toDate();
Range<Long> longRange = null;
if (min != null && max != null) {
longRange = Range.between(min.getTime(), max.getTime());
} else if (min != null || max != null) {
longRange = Range.is((min == null ? max : min).getTime());
}
elementComp.setRange(longRange);
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static StringCollectionElement createStringCollectionElement(Method method, String elementFieldPath) {
StringCollectionField field = method.getAnnotation(StringCollectionField.class);
boolean mandatory = field.required();
StringCollectionElement elementComp = new StringCollectionElement();
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static LongCollectionElement createLongCollectionElement(Method method, String elementFieldPath) {
LongCollectionField field = method.getAnnotation(LongCollectionField.class);
boolean mandatory = field.required();
LongCollectionElement elementComp = new LongCollectionElement();
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static IntegerCollectionElement createIntegerCollectionElement(Method method, String elementFieldPath) {
IntegerCollectionField field = method.getAnnotation(IntegerCollectionField.class);
boolean mandatory = field.required();
IntegerCollectionElement elementComp = new IntegerCollectionElement();
elementBaseData(elementComp, elementFieldPath, mandatory, field.label());
return elementComp;
}
private static void elementBaseData(AbstractBaseElement<?> baseElementComp, String elementFieldPath, boolean mandatory, String label) {
baseElementComp.setId(elementFieldPath);
baseElementComp.setMandatory(mandatory);
baseElementComp.setLabel(label);
}
}
|
#!/bin/bash
#
# A shell script to load some pre generated data file to a DB using ldb tool
# ./ldb needs to be avaible to be executed.
#
# Usage: <SCRIPT> [checkout]
# `checkout` can be a tag, commit or branch name. Will build using it and check DBs generated by all previous branches (or tags for very old versions without branch) can be opened by it.
# Return value 0 means all regression tests pass. 1 if not pass.
scriptpath=`dirname $BASH_SOURCE`
test_dir=${TEST_TMPDIR:-"/tmp"}"/format_compatible_check"
script_copy_dir=$test_dir"/script_copy"
input_data_path=$test_dir"/test_data_input/"
mkdir $test_dir || true
mkdir $input_data_path || true
rm -rf $script_copy_dir
cp $scriptpath $script_copy_dir -rf
# Generate four random files.
for i in {1..6}
do
input_data[$i]=$input_data_path/data$i
echo == Generating random input file ${input_data[$i]}
python - <<EOF
import random
random.seed($i)
symbols=['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
with open('${input_data[$i]}', 'w') as f:
for i in range(1,1024):
k = ""
for j in range(1, random.randint(1,32)):
k=k + symbols[random.randint(0, len(symbols) - 1)]
vb = ""
for j in range(1, random.randint(0,128)):
vb = vb + symbols[random.randint(0, len(symbols) - 1)]
v = ""
for j in range(1, random.randint(1, 5)):
v = v + vb
print >> f, k + " ==> " + v
EOF
done
declare -a checkout_objs=("2.2.fb.branch" "2.3.fb.branch" "2.4.fb.branch" "2.5.fb.branch" "2.6.fb.branch" "2.7.fb.branch" "2.8.1.fb" "3.0.fb.branch" "3.1.fb" "3.2.fb" "3.3.fb" "3.4.fb" "3.5.fb" "3.6.fb" "3.7.fb" "3.8.fb" "3.9.fb" "3.10.fb" "3.11.fb" "3.12.fb" "3.13.fb" "4.0.fb" "4.1.fb" "4.2.fb" "4.3.fb" "4.4.fb" "4.5.fb" "4.6.fb" "4.7.fb" "4.8.fb" "4.9.fb" "4.10.fb" "4.11.fb" "4.12.fb")
declare -a forward_compatible_checkout_objs=("3.10.fb" "3.11.fb" "3.12.fb" "3.13.fb" "4.0.fb" "4.1.fb" "4.2.fb" "4.3.fb" "4.4.fb" "4.5.fb" "4.6.fb" "4.7.fb" "4.8.fb" "4.9.fb" "4.10.fb" "4.11.fb" "4.12.fb")
generate_db()
{
set +e
$script_copy_dir/generate_random_db.sh $1 $2
if [ $? -ne 0 ]; then
echo ==== Error loading data from $2 to $1 ====
exit 1
fi
set -e
}
compare_db()
{
set +e
$script_copy_dir/verify_random_db.sh $1 $2 $3
if [ $? -ne 0 ]; then
echo ==== Read different content from $1 and $2 or error happened. ====
exit 1
fi
set -e
}
# Sandcastle sets us up with a remote that is just another directory on the same
# machine and doesn't have our branches. Need to fetch them so checkout works.
# Remote add may fail if added previously (we don't cleanup).
git remote add github_origin "https://github.com/facebook/rocksdb.git"
set -e
https_proxy="fwdproxy:8080" git fetch github_origin
for checkout_obj in "${checkout_objs[@]}"
do
echo == Generating DB from "$checkout_obj" ...
git checkout $checkout_obj
make clean
make ldb -j32
generate_db $input_data_path $test_dir/$checkout_obj
done
checkout_flag=${1:-"master"}
echo == Building $checkout_flag debug
git checkout $checkout_flag
make clean
make ldb -j32
compare_base_db_dir=$test_dir"/base_db_dir"
echo == Generate compare base DB to $compare_base_db_dir
generate_db $input_data_path $compare_base_db_dir
for checkout_obj in "${checkout_objs[@]}"
do
echo == Opening DB from "$checkout_obj" using debug build of $checkout_flag ...
compare_db $test_dir/$checkout_obj $compare_base_db_dir db_dump.txt
done
for checkout_obj in "${forward_compatible_checkout_objs[@]}"
do
echo == Build "$checkout_obj" and try to open DB generated using $checkout_flag...
git checkout $checkout_obj
make clean
make ldb -j32
compare_db $test_dir/$checkout_obj $compare_base_db_dir forward_${checkout_obj}_dump.txt
done
echo ==== Compatibility Test PASSED ====
|
SELECT *
FROM Articles
ORDER BY published DESC
LIMIT 1;
|
#!/bin/bash
#
# https://github.com/Nyr/wireguard-install
#
# Copyright (c) 2020 Nyr. Released under the MIT License.
# Detect Debian users running the script with "sh" instead of bash
if readlink /proc/$$/exe | grep -q "dash"; then
echo 'This installer needs to be run with "bash", not "sh".'
exit
fi
# Discard stdin. Needed when running from an one-liner which includes a newline
read -N 999999 -t 0.001
# Detect OpenVZ 6
if [[ $(uname -r | cut -d "." -f 1) -eq 2 ]]; then
echo "The system is running an old kernel, which is incompatible with this installer."
exit
fi
# Detect OS
# $os_version variables aren't always in use, but are kept here for convenience
if grep -qs "ubuntu" /etc/os-release; then
os="ubuntu"
os_version=$(grep 'VERSION_ID' /etc/os-release | cut -d '"' -f 2 | tr -d '.')
elif [[ -e /etc/debian_version ]]; then
os="debian"
os_version=$(grep -oE '[0-9]+' /etc/debian_version | head -1)
elif [[ -e /etc/almalinux-release || -e /etc/rocky-release || -e /etc/centos-release || -e /etc/oracle-release ]]; then
os="centos"
os_version=$(grep -shoE '[0-9]+' /etc/almalinux-release /etc/rocky-release /etc/centos-release /etc/oracle-release | head -1)
elif [[ -e /etc/fedora-release ]]; then
os="fedora"
os_version=$(grep -oE '[0-9]+' /etc/fedora-release | head -1)
else
echo "This installer seems to be running on an unsupported distribution.
Supported distros are Ubuntu, Debian, AlmaLinux, Rocky Linux, CentOS and Fedora."
exit
fi
if [[ "$os" == "ubuntu" && "$os_version" -lt 1804 ]]; then
echo "Ubuntu 18.04 or higher is required to use this installer.
This version of Ubuntu is too old and unsupported."
exit
fi
if [[ "$os" == "debian" && "$os_version" -lt 10 ]]; then
echo "Debian 10 or higher is required to use this installer.
This version of Debian is too old and unsupported."
exit
fi
if [[ "$os" == "centos" && "$os_version" -lt 7 ]]; then
echo "CentOS 7 or higher is required to use this installer.
This version of CentOS is too old and unsupported."
exit
fi
# Detect environments where $PATH does not include the sbin directories
if ! grep -q sbin <<< "$PATH"; then
echo '$PATH does not include sbin. Try using "su -" instead of "su".'
exit
fi
systemd-detect-virt -cq
is_container="$?"
if [[ "$os" == "fedora" && "$os_version" -eq 31 && $(uname -r | cut -d "." -f 2) -lt 6 && ! "$is_container" -eq 0 ]]; then
echo 'Fedora 31 is supported, but the kernel is outdated.
Upgrade the kernel using "dnf upgrade kernel" and restart.'
exit
fi
if [[ "$EUID" -ne 0 ]]; then
echo "This installer needs to be run with superuser privileges."
exit
fi
if [[ "$is_container" -eq 0 ]]; then
if [ "$(uname -m)" != "x86_64" ]; then
echo "In containerized systems, this installer supports only the x86_64 architecture.
The system runs on $(uname -m) and is unsupported."
exit
fi
# TUN device is required to use BoringTun if running inside a container
if [[ ! -e /dev/net/tun ]] || ! ( exec 7<>/dev/net/tun ) 2>/dev/null; then
echo "The system does not have the TUN device available.
TUN needs to be enabled before running this installer."
exit
fi
fi
new_client_dns () {
echo "Select a DNS server for the client:"
echo " 1) Current system resolvers"
echo " 2) Google"
echo " 3) 1.1.1.1"
echo " 4) OpenDNS"
echo " 5) Quad9"
echo " 6) AdGuard"
read -p "DNS server [1]: " dns
until [[ -z "$dns" || "$dns" =~ ^[1-6]$ ]]; do
echo "$dns: invalid selection."
read -p "DNS server [1]: " dns
done
# DNS
case "$dns" in
1|"")
# Locate the proper resolv.conf
# Needed for systems running systemd-resolved
if grep -q '^nameserver 127.0.0.53' "/etc/resolv.conf"; then
resolv_conf="/run/systemd/resolve/resolv.conf"
else
resolv_conf="/etc/resolv.conf"
fi
# Extract nameservers and provide them in the required format
dns=$(grep -v '^#\|^;' "$resolv_conf" | grep '^nameserver' | grep -oE '[0-9]{1,3}(\.[0-9]{1,3}){3}' | xargs | sed -e 's/ /, /g')
;;
2)
dns="8.8.8.8, 8.8.4.4"
;;
3)
dns="1.1.1.1, 1.0.0.1"
;;
4)
dns="208.67.222.222, 208.67.220.220"
;;
5)
dns="9.9.9.9, 149.112.112.112"
;;
6)
dns="94.140.14.14, 94.140.15.15"
;;
esac
}
new_client_setup () {
# Get the first 3 octets of wireguard's `Address`
first3octets=$(grep Address /etc/wireguard/wg0.conf | grep -Pom 1 '[0-9.]{7,15}' | rev | cut -d "." -f2- | rev) && echo $first3octets
# Given a list of the assigned internal IPv4 addresses, obtain the lowest still
# available octet. Important to start looking at 2, because 1 is our gateway.
octet=2
while grep AllowedIPs /etc/wireguard/wg0.conf | cut -d "." -f 4 | cut -d "/" -f 1 | grep -q "$octet"; do
(( octet++ ))
done
# Don't break the WireGuard configuration in case the address space is full
if [[ "$octet" -eq 255 ]]; then
echo "253 clients are already configured. The WireGuard internal subnet is full!"
exit
fi
key=$(wg genkey)
psk=$(wg genpsk)
# Configure client in the server
cat << EOF >> /etc/wireguard/wg0.conf
# BEGIN_PEER $client
[Peer]
PublicKey = $(wg pubkey <<< $key)
PresharedKey = $psk
AllowedIPs = $first3octets.$octet/32$(grep -q 'fddd:2c4:2c4:2c4::1' /etc/wireguard/wg0.conf && echo ", fddd:2c4:2c4:2c4::$octet/128")
# END_PEER $client
EOF
# Create client configuration
cat << EOF > ~/"$client".conf
[Interface]
Address = $first3octets.$octet/24$(grep -q 'fddd:2c4:2c4:2c4::1' /etc/wireguard/wg0.conf && echo ", fddd:2c4:2c4:2c4::$octet/64")
DNS = $dns
PrivateKey = $key
[Peer]
PublicKey = $(grep PrivateKey /etc/wireguard/wg0.conf | cut -d " " -f 3 | wg pubkey)
PresharedKey = $psk
AllowedIPs = 0.0.0.0/0, ::/0
Endpoint = $(grep '^# ENDPOINT' /etc/wireguard/wg0.conf | cut -d " " -f 3):$(grep ListenPort /etc/wireguard/wg0.conf | cut -d " " -f 3)
PersistentKeepalive = 25
EOF
}
if [[ ! -e /etc/wireguard/wg0.conf ]]; then
# Detect some Debian minimal setups where neither wget nor curl are installed
if ! hash wget 2>/dev/null && ! hash curl 2>/dev/null; then
echo "Wget is required to use this installer."
read -n1 -r -p "Press any key to install Wget and continue..."
apt-get update
apt-get install -y wget
fi
clear
echo 'Welcome to this WireGuard road warrior installer!'
# If system has a single IPv4, it is selected automatically. Else, ask the user
if [[ $(ip -4 addr | grep inet | grep -vEc '127(\.[0-9]{1,3}){3}') -eq 1 ]]; then
ip=$(ip -4 addr | grep inet | grep -vE '127(\.[0-9]{1,3}){3}' | cut -d '/' -f 1 | grep -oE '[0-9]{1,3}(\.[0-9]{1,3}){3}')
else
number_of_ip=$(ip -4 addr | grep inet | grep -vEc '127(\.[0-9]{1,3}){3}')
echo
echo "Which IPv4 address should be used?"
ip -4 addr | grep inet | grep -vE '127(\.[0-9]{1,3}){3}' | cut -d '/' -f 1 | grep -oE '[0-9]{1,3}(\.[0-9]{1,3}){3}' | nl -s ') '
read -p "IPv4 address [1]: " ip_number
until [[ -z "$ip_number" || "$ip_number" =~ ^[0-9]+$ && "$ip_number" -le "$number_of_ip" ]]; do
echo "$ip_number: invalid selection."
read -p "IPv4 address [1]: " ip_number
done
[[ -z "$ip_number" ]] && ip_number="1"
ip=$(ip -4 addr | grep inet | grep -vE '127(\.[0-9]{1,3}){3}' | cut -d '/' -f 1 | grep -oE '[0-9]{1,3}(\.[0-9]{1,3}){3}' | sed -n "$ip_number"p)
fi
# If $ip is a private IP address, the server must be behind NAT
if echo "$ip" | grep -qE '^(10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|192\.168)'; then
echo
echo "This server is behind NAT. What is the public IPv4 address or hostname?"
# Get public IP and sanitize with grep
get_public_ip=$(grep -m 1 -oE '^[0-9]{1,3}(\.[0-9]{1,3}){3}$' <<< "$(wget -T 10 -t 1 -4qO- "http://ip1.dynupdate.no-ip.com/" || curl -m 10 -4Ls "http://ip1.dynupdate.no-ip.com/")")
read -p "Public IPv4 address / hostname [$get_public_ip]: " public_ip
# If the checkip service is unavailable and user didn't provide input, ask again
until [[ -n "$get_public_ip" || -n "$public_ip" ]]; do
echo "Invalid input."
read -p "Public IPv4 address / hostname: " public_ip
done
[[ -z "$public_ip" ]] && public_ip="$get_public_ip"
fi
# If system has a single IPv6, it is selected automatically
if [[ $(ip -6 addr | grep -c 'inet6 [23]') -eq 1 ]]; then
ip6=$(ip -6 addr | grep 'inet6 [23]' | cut -d '/' -f 1 | grep -oE '([0-9a-fA-F]{0,4}:){1,7}[0-9a-fA-F]{0,4}')
fi
# If system has multiple IPv6, ask the user to select one
if [[ $(ip -6 addr | grep -c 'inet6 [23]') -gt 1 ]]; then
number_of_ip6=$(ip -6 addr | grep -c 'inet6 [23]')
echo
echo "Which IPv6 address should be used?"
ip -6 addr | grep 'inet6 [23]' | cut -d '/' -f 1 | grep -oE '([0-9a-fA-F]{0,4}:){1,7}[0-9a-fA-F]{0,4}' | nl -s ') '
read -p "IPv6 address [1]: " ip6_number
until [[ -z "$ip6_number" || "$ip6_number" =~ ^[0-9]+$ && "$ip6_number" -le "$number_of_ip6" ]]; do
echo "$ip6_number: invalid selection."
read -p "IPv6 address [1]: " ip6_number
done
[[ -z "$ip6_number" ]] && ip6_number="1"
ip6=$(ip -6 addr | grep 'inet6 [23]' | cut -d '/' -f 1 | grep -oE '([0-9a-fA-F]{0,4}:){1,7}[0-9a-fA-F]{0,4}' | sed -n "$ip6_number"p)
fi
echo
echo "Which private network should WireGuard use?"
read -p "WG Network [10.7.0.0/24]: " wg_private_net
# Check for valid IPv4 address and private address
until [[ -z "$wg_private_net" || "$wg_private_net" =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+ && "$wg_private_net" =~ ^(10\.|172\.1[6789]\.|172\.2[0-9]\.|172\.3[01]\.|192\.168) ]]; do
echo "$wg_private_net: invalid private network."
read -p "WG Network [10.7.0.0/24]: " wg_private_net
done
[[ -z "$wg_private_net" ]] && wg_private_net="10.7.0.0/24"
echo $wg_private_net
echo
echo "What port should WireGuard listen to?"
read -p "Port [51820]: " port
until [[ -z "$port" || "$port" =~ ^[0-9]+$ && "$port" -le 65535 ]]; do
echo "$port: invalid port."
read -p "Port [51820]: " port
done
[[ -z "$port" ]] && port="51820"
echo
echo "Enter a name for the first client:"
read -p "Name [client]: " unsanitized_client
# Allow a limited set of characters to avoid conflicts
client=$(sed 's/[^0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-]/_/g' <<< "$unsanitized_client")
[[ -z "$client" ]] && client="client"
echo
new_client_dns
# Set up automatic updates for BoringTun if the user is fine with that
if [[ "$is_container" -eq 0 ]]; then
echo
echo "BoringTun will be installed to set up WireGuard in the system."
read -p "Should automatic updates be enabled for it? [Y/n]: " boringtun_updates
until [[ "$boringtun_updates" =~ ^[yYnN]*$ ]]; do
echo "$remove: invalid selection."
read -p "Should automatic updates be enabled for it? [Y/n]: " boringtun_updates
done
[[ -z "$boringtun_updates" ]] && boringtun_updates="y"
if [[ "$boringtun_updates" =~ ^[yY]$ ]]; then
if [[ "$os" == "centos" || "$os" == "fedora" ]]; then
cron="cronie"
elif [[ "$os" == "debian" || "$os" == "ubuntu" ]]; then
cron="cron"
fi
fi
fi
echo
echo "WireGuard installation is ready to begin."
# Install a firewall if firewalld or iptables are not already available
if ! systemctl is-active --quiet firewalld.service && ! hash iptables 2>/dev/null; then
if [[ "$os" == "centos" || "$os" == "fedora" ]]; then
firewall="firewalld"
# We don't want to silently enable firewalld, so we give a subtle warning
# If the user continues, firewalld will be installed and enabled during setup
echo "firewalld, which is required to manage routing tables, will also be installed."
elif [[ "$os" == "debian" || "$os" == "ubuntu" ]]; then
# iptables is way less invasive than firewalld so no warning is given
firewall="iptables"
fi
fi
read -n1 -r -p "Press any key to continue..."
# Install WireGuard
# If not running inside a container, set up the WireGuard kernel module
if [[ ! "$is_container" -eq 0 ]]; then
if [[ "$os" == "ubuntu" ]]; then
# Ubuntu
apt-get update
apt-get install -y wireguard qrencode $firewall
elif [[ "$os" == "debian" && "$os_version" -ge 11 ]]; then
# Debian 11 or higher
apt-get update
apt-get install -y wireguard qrencode $firewall
elif [[ "$os" == "debian" && "$os_version" -eq 10 ]]; then
# Debian 10
if ! grep -qs '^deb .* buster-backports main' /etc/apt/sources.list /etc/apt/sources.list.d/*.list; then
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
fi
apt-get update
# Try to install kernel headers for the running kernel and avoid a reboot. This
# can fail, so it's important to run separately from the other apt-get command.
apt-get install -y linux-headers-"$(uname -r)"
# There are cleaner ways to find out the $architecture, but we require an
# specific format for the package name and this approach provides what we need.
architecture=$(dpkg --get-selections 'linux-image-*-*' | cut -f 1 | grep -oE '[^-]*$' -m 1)
# linux-headers-$architecture points to the latest headers. We install it
# because if the system has an outdated kernel, there is no guarantee that old
# headers were still downloadable and to provide suitable headers for future
# kernel updates.
apt-get install -y linux-headers-"$architecture"
apt-get install -y wireguard qrencode $firewall
elif [[ "$os" == "centos" && "$os_version" -eq 8 ]]; then
# CentOS 8
dnf install -y epel-release elrepo-release
dnf install -y kmod-wireguard wireguard-tools qrencode $firewall
mkdir -p /etc/wireguard/
elif [[ "$os" == "centos" && "$os_version" -eq 7 ]]; then
# CentOS 7
yum install -y epel-release https://www.elrepo.org/elrepo-release-7.el7.elrepo.noarch.rpm
yum install -y yum-plugin-elrepo
yum install -y kmod-wireguard wireguard-tools qrencode $firewall
mkdir -p /etc/wireguard/
elif [[ "$os" == "fedora" ]]; then
# Fedora
dnf install -y wireguard-tools qrencode $firewall
mkdir -p /etc/wireguard/
fi
# Else, we are inside a container and BoringTun needs to be used
else
# Install required packages
if [[ "$os" == "ubuntu" ]]; then
# Ubuntu
apt-get update
apt-get install -y qrencode ca-certificates $cron $firewall
apt-get install -y wireguard-tools --no-install-recommends
elif [[ "$os" == "debian" && "$os_version" -ge 11 ]]; then
# Debian 11 or higher
apt-get update
apt-get install -y qrencode ca-certificates $cron $firewall
apt-get install -y wireguard-tools --no-install-recommends
elif [[ "$os" == "debian" && "$os_version" -eq 10 ]]; then
# Debian 10
if ! grep -qs '^deb .* buster-backports main' /etc/apt/sources.list /etc/apt/sources.list.d/*.list; then
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
fi
apt-get update
apt-get install -y qrencode ca-certificates $cron $firewall
apt-get install -y wireguard-tools --no-install-recommends
elif [[ "$os" == "centos" && "$os_version" -eq 8 ]]; then
# CentOS 8
dnf install -y epel-release
dnf install -y wireguard-tools qrencode ca-certificates tar $cron $firewall
mkdir -p /etc/wireguard/
elif [[ "$os" == "centos" && "$os_version" -eq 7 ]]; then
# CentOS 7
yum install -y epel-release
yum install -y wireguard-tools qrencode ca-certificates tar $cron $firewall
mkdir -p /etc/wireguard/
elif [[ "$os" == "fedora" ]]; then
# Fedora
dnf install -y wireguard-tools qrencode ca-certificates tar $cron $firewall
mkdir -p /etc/wireguard/
fi
# Grab the BoringTun binary using wget or curl and extract into the right place.
# Don't use this service elsewhere without permission! Contact me before you do!
{ wget -qO- https://wg.nyr.be/1/latest/download 2>/dev/null || curl -sL https://wg.nyr.be/1/latest/download ; } | tar xz -C /usr/local/sbin/ --wildcards 'boringtun-*/boringtun' --strip-components 1
# Configure wg-quick to use BoringTun
mkdir /etc/systemd/system/wg-quick@wg0.service.d/ 2>/dev/null
echo "[Service]
Environment=WG_QUICK_USERSPACE_IMPLEMENTATION=boringtun
Environment=WG_SUDO=1" > /etc/systemd/system/wg-quick@wg0.service.d/boringtun.conf
if [[ -n "$cron" ]] && [[ "$os" == "centos" || "$os" == "fedora" ]]; then
systemctl enable --now crond.service
fi
fi
# If firewalld was just installed, enable it
if [[ "$firewall" == "firewalld" ]]; then
systemctl enable --now firewalld.service
fi
# Generate wg0.conf
cat << EOF > /etc/wireguard/wg0.conf
# Do not alter the commented lines
# They are used by wireguard-install
# ENDPOINT $([[ -n "$public_ip" ]] && echo "$public_ip" || echo "$ip")
[Interface]
Address = $wg_private_net$([[ -n "$ip6" ]] && echo ", fddd:2c4:2c4:2c4::1/64")
PrivateKey = $(wg genkey)
ListenPort = $port
EOF
chmod 600 /etc/wireguard/wg0.conf
# Enable net.ipv4.ip_forward for the system
echo 'net.ipv4.ip_forward=1' > /etc/sysctl.d/99-wireguard-forward.conf
# Enable without waiting for a reboot or service restart
echo 1 > /proc/sys/net/ipv4/ip_forward
if [[ -n "$ip6" ]]; then
# Enable net.ipv6.conf.all.forwarding for the system
echo "net.ipv6.conf.all.forwarding=1" >> /etc/sysctl.d/99-wireguard-forward.conf
# Enable without waiting for a reboot or service restart
echo 1 > /proc/sys/net/ipv6/conf/all/forwarding
fi
if systemctl is-active --quiet firewalld.service; then
# Using both permanent and not permanent rules to avoid a firewalld
# reload.
firewall-cmd --add-port="$port"/udp
firewall-cmd --zone=trusted --add-source="$wg_private_net"
firewall-cmd --permanent --add-port="$port"/udp
firewall-cmd --permanent --zone=trusted --add-source="$wg_private_net"
# Set NAT for the VPN subnet
firewall-cmd --direct --add-rule ipv4 nat POSTROUTING 0 -s "$wg_private_net" ! -d "$wg_private_net" -j SNAT --to "$ip"
firewall-cmd --permanent --direct --add-rule ipv4 nat POSTROUTING 0 -s "$wg_private_net" ! -d "$wg_private_net" -j SNAT --to "$ip"
if [[ -n "$ip6" ]]; then
firewall-cmd --zone=trusted --add-source=fddd:2c4:2c4:2c4::/64
firewall-cmd --permanent --zone=trusted --add-source=fddd:2c4:2c4:2c4::/64
firewall-cmd --direct --add-rule ipv6 nat POSTROUTING 0 -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to "$ip6"
firewall-cmd --permanent --direct --add-rule ipv6 nat POSTROUTING 0 -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to "$ip6"
fi
else
# Create a service to set up persistent iptables rules
iptables_path=$(command -v iptables)
ip6tables_path=$(command -v ip6tables)
# nf_tables is not available as standard in OVZ kernels. So use iptables-legacy
# if we are in OVZ, with a nf_tables backend and iptables-legacy is available.
if [[ $(systemd-detect-virt) == "openvz" ]] && readlink -f "$(command -v iptables)" | grep -q "nft" && hash iptables-legacy 2>/dev/null; then
iptables_path=$(command -v iptables-legacy)
ip6tables_path=$(command -v ip6tables-legacy)
fi
echo "[Unit]
Before=network.target
[Service]
Type=oneshot
Restart=on-failure
RestartSec=5s
ExecStart=$iptables_path -t nat -A POSTROUTING -s $wg_private_net ! -d $wg_private_net -j SNAT --to $ip
ExecStart=$iptables_path -I INPUT -p udp --dport $port -j ACCEPT
ExecStart=$iptables_path -I FORWARD -s $wg_private_net -j ACCEPT
ExecStart=$iptables_path -I FORWARD -m state --state RELATED,ESTABLISHED -j ACCEPT
ExecStop=$iptables_path -t nat -D POSTROUTING -s $wg_private_net ! -d $wg_private_net -j SNAT --to $ip
ExecStop=$iptables_path -D INPUT -p udp --dport $port -j ACCEPT
ExecStop=$iptables_path -D FORWARD -s $wg_private_net -j ACCEPT
ExecStop=$iptables_path -D FORWARD -m state --state RELATED,ESTABLISHED -j ACCEPT" > /etc/systemd/system/wg-iptables.service
if [[ -n "$ip6" ]]; then
echo "ExecStart=$ip6tables_path -t nat -A POSTROUTING -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to $ip6
ExecStart=$ip6tables_path -I FORWARD -s fddd:2c4:2c4:2c4::/64 -j ACCEPT
ExecStart=$ip6tables_path -I FORWARD -m state --state RELATED,ESTABLISHED -j ACCEPT
ExecStop=$ip6tables_path -t nat -D POSTROUTING -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to $ip6
ExecStop=$ip6tables_path -D FORWARD -s fddd:2c4:2c4:2c4::/64 -j ACCEPT
ExecStop=$ip6tables_path -D FORWARD -m state --state RELATED,ESTABLISHED -j ACCEPT" >> /etc/systemd/system/wg-iptables.service
fi
echo "RemainAfterExit=yes
[Install]
WantedBy=multi-user.target" >> /etc/systemd/system/wg-iptables.service
systemctl enable --now wg-iptables.service
fi
# Generates the custom client.conf
new_client_setup
# Enable and start the wg-quick service
systemctl enable --now wg-quick@wg0.service
# Set up automatic updates for BoringTun if the user wanted to
if [[ "$boringtun_updates" =~ ^[yY]$ ]]; then
# Deploy upgrade script
cat << 'EOF' > /usr/local/sbin/boringtun-upgrade
#!/bin/bash
latest=$(wget -qO- https://wg.nyr.be/1/latest 2>/dev/null || curl -sL https://wg.nyr.be/1/latest 2>/dev/null)
# If server did not provide an appropriate response, exit
if ! head -1 <<< "$latest" | grep -qiE "^boringtun.+[0-9]+\.[0-9]+.*$"; then
echo "Update server unavailable"
exit
fi
current=$(/usr/local/sbin/boringtun -V)
if [[ "$current" != "$latest" ]]; then
download="https://wg.nyr.be/1/latest/download"
xdir=$(mktemp -d)
# If download and extraction are successful, upgrade the boringtun binary
if { wget -qO- "$download" 2>/dev/null || curl -sL "$download" ; } | tar xz -C "$xdir" --wildcards "boringtun-*/boringtun" --strip-components 1; then
systemctl stop wg-quick@wg0.service
rm -f /usr/local/sbin/boringtun
mv "$xdir"/boringtun /usr/local/sbin/boringtun
systemctl start wg-quick@wg0.service
echo "Succesfully updated to $(/usr/local/sbin/boringtun -V)"
else
echo "boringtun update failed"
fi
rm -rf "$xdir"
else
echo "$current is up to date"
fi
EOF
chmod +x /usr/local/sbin/boringtun-upgrade
# Add cron job to run the updater daily at a random time between 3:00 and 5:59
{ crontab -l 2>/dev/null; echo "$(( $RANDOM % 60 )) $(( $RANDOM % 3 + 3 )) * * * /usr/local/sbin/boringtun-upgrade &>/dev/null" ; } | crontab -
fi
echo
qrencode -t UTF8 < ~/"$client.conf"
echo -e '\xE2\x86\x91 That is a QR code containing the client configuration.'
echo
# If the kernel module didn't load, system probably had an outdated kernel
# We'll try to help, but will not will not force a kernel upgrade upon the user
if [[ ! "$is_container" -eq 0 ]] && ! modprobe -nq wireguard; then
echo "Warning!"
echo "Installation was finished, but the WireGuard kernel module could not load."
if [[ "$os" == "ubuntu" && "$os_version" -eq 1804 ]]; then
echo 'Upgrade the kernel and headers with "apt-get install linux-generic" and restart.'
elif [[ "$os" == "debian" && "$os_version" -eq 10 ]]; then
echo "Upgrade the kernel with \"apt-get install linux-image-$architecture\" and restart."
elif [[ "$os" == "centos" && "$os_version" -le 8 ]]; then
echo "Reboot the system to load the most recent kernel."
fi
else
echo "Finished!"
fi
echo
echo "The client configuration is available in:" ~/"$client.conf"
echo "New clients can be added by running this script again."
else
clear
echo "WireGuard is already installed."
echo
echo "Select an option:"
echo " 1) Add a new client"
echo " 2) Remove an existing client"
echo " 3) Remove WireGuard"
echo " 4) Exit"
read -p "Option: " option
until [[ "$option" =~ ^[1-4]$ ]]; do
echo "$option: invalid selection."
read -p "Option: " option
done
case "$option" in
1)
echo
echo "Provide a name for the client:"
read -p "Name: " unsanitized_client
# Allow a limited set of characters to avoid conflicts
client=$(sed 's/[^0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-]/_/g' <<< "$unsanitized_client")
while [[ -z "$client" ]] || grep -q "^# BEGIN_PEER $client$" /etc/wireguard/wg0.conf; do
echo "$client: invalid name."
read -p "Name: " unsanitized_client
client=$(sed 's/[^0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_-]/_/g' <<< "$unsanitized_client")
done
echo
new_client_dns
new_client_setup
# Append new client configuration to the WireGuard interface
wg addconf wg0 <(sed -n "/^# BEGIN_PEER $client/,/^# END_PEER $client/p" /etc/wireguard/wg0.conf)
echo
qrencode -t UTF8 < ~/"$client.conf"
echo -e '\xE2\x86\x91 That is a QR code containing your client configuration.'
echo
echo "$client added. Configuration available in:" ~/"$client.conf"
exit
;;
2)
# This option could be documented a bit better and maybe even be simplified
# ...but what can I say, I want some sleep too
number_of_clients=$(grep -c '^# BEGIN_PEER' /etc/wireguard/wg0.conf)
if [[ "$number_of_clients" = 0 ]]; then
echo
echo "There are no existing clients!"
exit
fi
echo
echo "Select the client to remove:"
grep '^# BEGIN_PEER' /etc/wireguard/wg0.conf | cut -d ' ' -f 3 | nl -s ') '
read -p "Client: " client_number
until [[ "$client_number" =~ ^[0-9]+$ && "$client_number" -le "$number_of_clients" ]]; do
echo "$client_number: invalid selection."
read -p "Client: " client_number
done
client=$(grep '^# BEGIN_PEER' /etc/wireguard/wg0.conf | cut -d ' ' -f 3 | sed -n "$client_number"p)
echo
read -p "Confirm $client removal? [y/N]: " remove
until [[ "$remove" =~ ^[yYnN]*$ ]]; do
echo "$remove: invalid selection."
read -p "Confirm $client removal? [y/N]: " remove
done
if [[ "$remove" =~ ^[yY]$ ]]; then
# The following is the right way to avoid disrupting other active connections:
# Remove from the live interface
wg set wg0 peer "$(sed -n "/^# BEGIN_PEER $client$/,\$p" /etc/wireguard/wg0.conf | grep -m 1 PublicKey | cut -d " " -f 3)" remove
# Remove from the configuration file
sed -i "/^# BEGIN_PEER $client$/,/^# END_PEER $client$/d" /etc/wireguard/wg0.conf
echo
echo "$client removed!"
else
echo
echo "$client removal aborted!"
fi
exit
;;
3)
echo
read -p "Confirm WireGuard removal? [y/N]: " remove
until [[ "$remove" =~ ^[yYnN]*$ ]]; do
echo "$remove: invalid selection."
read -p "Confirm WireGuard removal? [y/N]: " remove
done
if [[ "$remove" =~ ^[yY]$ ]]; then
port=$(grep '^ListenPort' /etc/wireguard/wg0.conf | cut -d " " -f 3)
if systemctl is-active --quiet firewalld.service; then
ip=$(firewall-cmd --direct --get-rules ipv4 nat POSTROUTING | grep '\-s $wg_private_net '"'"'!'"'"' -d $wg_private_net' | grep -oE '[^ ]+$')
# Using both permanent and not permanent rules to avoid a firewalld reload.
firewall-cmd --remove-port="$port"/udp
firewall-cmd --zone=trusted --remove-source="$wg_private_net"
firewall-cmd --permanent --remove-port="$port"/udp
firewall-cmd --permanent --zone=trusted --remove-source="$wg_private_net"
firewall-cmd --direct --remove-rule ipv4 nat POSTROUTING 0 -s "$wg_private_net" ! -d "$wg_private_net" -j SNAT --to "$ip"
firewall-cmd --permanent --direct --remove-rule ipv4 nat POSTROUTING 0 -s "$wg_private_net" ! -d "$wg_private_net" -j SNAT --to "$ip"
if grep -qs 'fddd:2c4:2c4:2c4::1/64' /etc/wireguard/wg0.conf; then
ip6=$(firewall-cmd --direct --get-rules ipv6 nat POSTROUTING | grep '\-s fddd:2c4:2c4:2c4::/64 '"'"'!'"'"' -d fddd:2c4:2c4:2c4::/64' | grep -oE '[^ ]+$')
firewall-cmd --zone=trusted --remove-source=fddd:2c4:2c4:2c4::/64
firewall-cmd --permanent --zone=trusted --remove-source=fddd:2c4:2c4:2c4::/64
firewall-cmd --direct --remove-rule ipv6 nat POSTROUTING 0 -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to "$ip6"
firewall-cmd --permanent --direct --remove-rule ipv6 nat POSTROUTING 0 -s fddd:2c4:2c4:2c4::/64 ! -d fddd:2c4:2c4:2c4::/64 -j SNAT --to "$ip6"
fi
else
systemctl disable --now wg-iptables.service
rm -f /etc/systemd/system/wg-iptables.service
fi
systemctl disable --now wg-quick@wg0.service
rm -f /etc/systemd/system/wg-quick@wg0.service.d/boringtun.conf
rm -f /etc/sysctl.d/99-wireguard-forward.conf
# Different packages were installed if the system was containerized or not
if [[ ! "$is_container" -eq 0 ]]; then
if [[ "$os" == "ubuntu" ]]; then
# Ubuntu
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard wireguard-tools
elif [[ "$os" == "debian" && "$os_version" -ge 11 ]]; then
# Debian 11 or higher
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard wireguard-tools
elif [[ "$os" == "debian" && "$os_version" -eq 10 ]]; then
# Debian 10
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard wireguard-dkms wireguard-tools
elif [[ "$os" == "centos" && "$os_version" -eq 8 ]]; then
# CentOS 8
dnf remove -y kmod-wireguard wireguard-tools
rm -rf /etc/wireguard/
elif [[ "$os" == "centos" && "$os_version" -eq 7 ]]; then
# CentOS 7
yum remove -y kmod-wireguard wireguard-tools
rm -rf /etc/wireguard/
elif [[ "$os" == "fedora" ]]; then
# Fedora
dnf remove -y wireguard-tools
rm -rf /etc/wireguard/
fi
else
{ crontab -l 2>/dev/null | grep -v '/usr/local/sbin/boringtun-upgrade' ; } | crontab -
if [[ "$os" == "ubuntu" ]]; then
# Ubuntu
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard-tools
elif [[ "$os" == "debian" && "$os_version" -ge 11 ]]; then
# Debian 11 or higher
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard-tools
elif [[ "$os" == "debian" && "$os_version" -eq 10 ]]; then
# Debian 10
rm -rf /etc/wireguard/
apt-get remove --purge -y wireguard-tools
elif [[ "$os" == "centos" && "$os_version" -eq 8 ]]; then
# CentOS 8
dnf remove -y wireguard-tools
rm -rf /etc/wireguard/
elif [[ "$os" == "centos" && "$os_version" -eq 7 ]]; then
# CentOS 7
yum remove -y wireguard-tools
rm -rf /etc/wireguard/
elif [[ "$os" == "fedora" ]]; then
# Fedora
dnf remove -y wireguard-tools
rm -rf /etc/wireguard/
fi
rm -f /usr/local/sbin/boringtun /usr/local/sbin/boringtun-upgrade
fi
echo
echo "WireGuard removed!"
else
echo
echo "WireGuard removal aborted!"
fi
exit
;;
4)
exit
;;
esac
fi
|
<gh_stars>0
var splitSourceMaps = (info) => {
if (info.resourcePath.startsWith('webpack'))
return `webpack:///${info.resourcePath}`;
else if (info.resourcePath.indexOf('@') > 0) {
var cl = info.resourcePath.substring(info.resourcePath.indexOf('@'));
var scope = cl.substring(1, cl.indexOf('/'));
var path = cl.substring(cl.indexOf('/'));
return `${scope}://${path}`;
}
else
return '../'.concat(info.resourcePath);
}
module.exports = {
entry: './02-cube/main.ts',
output: {
filename: '02-cube/dist/main.js'
},
devtool: "source-map",
resolve: {
extensions: ['.ts', '.js']
},
module: {
loaders: [
{test: /\.ts$/, exclude: /node_modules/, loader: "ts-loader"}
]
}
};
|
import Foundation
typealias AppState = [String: Any]
typealias State = Any
typealias EmptyFunction = () -> Void
/**
* Composes single-argument functions from right to left.
*
* - Parameter funks: functions to compose.
*
* - Returns: A function obtained by composing functions from right to left.
*/
func compose<T>(_ funks: ((T) -> T)...) -> (T) -> T {
return { input in
var result = input
for funk in funks.reversed() {
result = funk(result)
}
return result
}
}
// Example usage
func addOne(_ x: Int) -> Int {
return x + 1
}
func multiplyByTwo(_ x: Int) -> Int {
return x * 2
}
let composed = compose(multiplyByTwo, addOne)
let result = composed(3) // result is 7
|
package strategies
import (
"context"
"errors"
"reflect"
"testing"
"github.com/1pkg/gopium/collections"
"github.com/1pkg/gopium/gopium"
"github.com/1pkg/gopium/tests/mocks"
)
func TestPad(t *testing.T) {
// prepare
cctx, cancel := context.WithCancel(context.Background())
cancel()
table := map[string]struct {
pad pad
c gopium.Curator
ctx context.Context
o gopium.Struct
r gopium.Struct
err error
}{
"empty struct should be applied to empty struct": {
pad: padsys,
c: mocks.Maven{SAlign: 16},
ctx: context.Background(),
},
"non empty struct should be applied to expected aligned struct": {
pad: padsys,
c: mocks.Maven{SAlign: 6},
ctx: context.Background(),
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test",
Size: 8,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test",
Size: 8,
},
collections.PadField(4),
},
},
},
"non empty struct should be applied to expected aligned struct on canceled context": {
pad: padtnat,
c: mocks.Maven{SAlign: 12},
ctx: cctx,
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test",
Size: 8,
Align: 5,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test",
Size: 8,
Align: 5,
},
collections.PadField(2),
},
},
err: context.Canceled,
},
"mixed struct should be applied to expected aligned struct on type natural pad": {
pad: padtnat,
c: mocks.Maven{SAlign: 24},
ctx: context.Background(),
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test4",
Size: 3,
Align: 1,
},
{
Name: "test1",
Size: 32,
Align: 4,
},
{
Name: "test2",
Size: 6,
Align: 6,
},
{
Name: "test3",
Size: 8,
Align: 8,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test4",
Size: 3,
Align: 1,
},
collections.PadField(1),
{
Name: "test1",
Size: 32,
Align: 4,
},
{
Name: "test2",
Size: 6,
Align: 6,
},
collections.PadField(6),
{
Name: "test3",
Size: 8,
Align: 8,
},
},
},
},
"mixed struct should be applied to expected aligned on field sys pad": {
pad: padsys,
c: mocks.Maven{SAlign: 9},
ctx: context.Background(),
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 9,
},
{
Name: "test2",
Size: 7,
},
{
Name: "test3",
Size: 5,
},
{
Name: "test4",
Size: 3,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 9,
},
{
Name: "test2",
Size: 7,
},
collections.PadField(2),
{
Name: "test3",
Size: 5,
},
collections.PadField(4),
{
Name: "test4",
Size: 3,
},
collections.PadField(6),
},
},
},
"mixed struct should be applied to expected aligned on big sys pad": {
pad: padsys,
c: mocks.Maven{SAlign: 12},
ctx: context.Background(),
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 9,
},
{
Name: "test2",
Size: 7,
},
{
Name: "test3",
Size: 5,
},
{
Name: "test4",
Size: 3,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 9,
},
collections.PadField(3),
{
Name: "test2",
Size: 7,
},
collections.PadField(5),
{
Name: "test3",
Size: 5,
},
collections.PadField(7),
{
Name: "test4",
Size: 3,
},
collections.PadField(9),
},
},
},
"mixed struct should be applied to expected aligned no additional aligment": {
pad: padsys,
c: mocks.Maven{SAlign: 4},
ctx: context.Background(),
o: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 24,
},
{
Name: "test2",
Size: 12,
},
{
Name: "test3",
Size: 36,
},
},
},
r: gopium.Struct{
Name: "test",
Fields: []gopium.Field{
{
Name: "test1",
Size: 24,
},
{
Name: "test2",
Size: 12,
},
{
Name: "test3",
Size: 36,
},
},
},
},
}
for name, tcase := range table {
t.Run(name, func(t *testing.T) {
// prepare
pad := tcase.pad.Curator(tcase.c)
// exec
r, err := pad.Apply(tcase.ctx, tcase.o)
// check
if !reflect.DeepEqual(r, tcase.r) {
t.Errorf("actual %v doesn't equal to expected %v", r, tcase.r)
}
if !errors.Is(err, tcase.err) {
t.Errorf("actual %v doesn't equal to expected %v", err, tcase.err)
}
})
}
}
|
import gql from 'graphql-tag';
export default gql`
mutation RegisterAppStoreSubscription(
$input: RegisterAppStoreSubscriptionInput!
) {
registerAppStoreSubscription(input: $input) {
success
subscription {
isActive
}
}
}
`;
|
#!/usr/bin/bash
# fail fast
#
set -e
# print each command before it's executed
#
set -x
export RUSTFLAGS="-D warnings"
wasm-pack test --firefox --headless -- --all-features
wasm-pack test --chrome --headless -- --all-features
wasm-pack test --firefox --headless -- --all-features --release
wasm-pack test --chrome --headless -- --all-features --release
|
#!/bin/bash
#=================================================
# Description: DIY script
# Lisence: MIT
# Author: P3TERX
# Blog: https://p3terx.com
#=================================================
# Modify default IP
sed -i 's/192.168.1.1/192.168.0.1/g' package/base-files/files/bin/config_generate
echo "src-git lienol https://github.com/Lienol/openwrt-package" >> feeds.conf.default
./scripts/feeds update -a
./scripts/feeds install -a
#移除不用软件包
#rm -rf k3screenctrl package/lean
#添加额外软件包
#git clone https://github.com/jefferymvp/luci-app-koolproxyR package/luci-app-koolproxyR
#git clone https://github.com/yangsongli/luci-theme-atmaterial.git package/luci-theme-atmaterial
git clone https://github.com/pymumu/luci-app-smartdns.git package/luci-app-smartdns
git clone https://github.com/littletao08/luci-app-eqos.git package/luci-app-eqos
#git clone https://github.com/hong0980/luci-app-passwall.git package/luci-app-passwall
#git clone https://github.com/pexcn/openwrt-chinadns-ng.git package/chinadns-ng
#git clone https://github.com/kenzok8/openwrt-packages.git package/ken
|
<filename>grpc-kit/server.go
package main
import (
"context"
"flag"
"fmt"
"net"
"time"
"github.com/yunfeiyang1916/micro-go-course/grpc-kit/pb"
"google.golang.org/grpc"
"golang.org/x/time/rate"
"github.com/yunfeiyang1916/micro-go-course/grpc-kit/user"
)
func main() {
flag.Parse()
var (
//logger = log.NewLogfmtLogger(os.Stderr)
//logger = log.With(logger, "ts", log.DefaultTimestampUTC)
//logger = log.With(logger, "caller", log.DefaultCaller)
ctx = context.Background()
// 服务
svc = user.UserServiceImpl{}
// 建立endpoint
endpoint = user.MakeUserEndpoint(svc)
// 构造限流中间件
ratebucket = rate.NewLimiter(rate.Every(time.Second*1), 100)
)
endpoint = user.NewTokenBucketLimitterWithBuildIn(ratebucket)(endpoint)
endpts := user.Endpoints{
UserEndpoint: endpoint,
}
// 使用transport构造UserServiceServer
handler := user.NewUserServer(ctx, endpts)
// 监听端口,建立gRPC网络服务器,注册RPC服务
ls, err := net.Listen("tcp", "127.0.0.1:1234")
if err != nil {
fmt.Println("Listen error:", err)
return
}
grpcServer := grpc.NewServer()
pb.RegisterUserServiceServer(grpcServer, handler)
grpcServer.Serve(ls)
}
|
#!/bin/sh
chmod +x /opt/scoping-tool-backend
/opt/scoping-tool-backend
|
module Things
# Things::Todo
class Project < Reference::Record
properties :name
# identifier is required for creation
identifier :project
# collection is used for findings
collection :projects
class << self
def all
convert(Things::App.instance.projects.get)
end
end
def todos
Things::Todo.convert(reference.todos.get)
end
end
end
|
from random import randrange
import Others.City_Manager as cm
class Greedy:
def __init__(self):
self.startCity: int = randrange(cm.getLength())
self.possibleCities = [i for i in range(cm.getLength())]
self.route = []
self.distanceTravelled = 0.0
self.currentCity = self.startCity
self.route.append(self.startCity)
self.possibleCities.remove(self.currentCity)
def selectCity(self):
nearestCity = self.possibleCities[0]
nearestCityDistance = cm.getDistance(self.currentCity, nearestCity)
for city in self.possibleCities:
distanceFromCity = cm.getDistance(self.currentCity, city)
if distanceFromCity < nearestCityDistance:
nearestCity = city
nearestCityDistance = distanceFromCity
return nearestCity
def findSolution(self):
# Until Possible Locations is not Empty
while self.possibleCities:
nextCity = self.selectCity()
# Update Route
self.route.append(nextCity)
self.possibleCities.remove(nextCity)
# Update Distance
self.distanceTravelled += cm.getDistance(
self.currentCity, nextCity)
# Update Current Location
self.currentCity = nextCity
self.distanceTravelled += cm.getDistance(
self.currentCity, self.startCity)
def getRoute(self):
return self.route
def getDistance(self):
return self.distanceTravelled
|
import React, { FunctionComponent } from "react"
interface FlagFranceProps {
selected?: boolean
onClick?: () => void
}
export const FlagFrance: FunctionComponent<FlagFranceProps> = ({ selected = true, onClick }) => {
return (
<svg
onClick={onClick}
x="0px"
y="0px"
viewBox="0 0 512 512"
width="16px"
height="16px"
style={{ filter: selected ? "grayscale(0)" : "grayscale(1)" }}
>
<circle style={{ fill: "#F0F0F0" }} cx="256" cy="256" r="256" />
<path
style={{ fill: "#D80027" }}
d="M512,256c0-110.071-69.472-203.906-166.957-240.077v480.155C442.528,459.906,512,366.071,512,256z"
/>
<path
style={{ fill: "#0052B4" }}
d="M0,256c0,110.071,69.473,203.906,166.957,240.077V15.923C69.473,52.094,0,145.929,0,256z"
/>
</svg>
)
}
|
# frozen_string_literal: true
module OpenApi
module Router
module_function
def routes
@routes ||=
if (file = Config.rails_routes_file)
File.read(file)
else
# :nocov:
# ref https://github.com/rails/rails/blob/master/railties/lib/rails/tasks/routes.rake
require './config/routes'
all_routes = Rails.application.routes.routes
require 'action_dispatch/routing/inspector'
inspector = ActionDispatch::Routing::RoutesInspector.new(all_routes)
if Rails::VERSION::MAJOR < 6
inspector.format(ActionDispatch::Routing::ConsoleFormatter.new, nil)
else
inspector.format(ActionDispatch::Routing::ConsoleFormatter::Sheet.new)
end
# :nocov:
end
end
def routes_list
@routes_list ||= routes.split("\n").drop(1).map do |line|
next unless line['#']
infos = line.match(/[A-Z|].*/).to_s.split(' ') # => [GET, /api/v1/examples/:id, api/v1/examples#index]
{
http_verb: infos[0].downcase, # => "get" / "get|post"
path: infos[1][0..-11].split('/').map do |item|
item[':'] ? "{#{item[1..-1]}}" : item
end.join('/'), # => "/api/v1/examples/{id}"
action_path: infos[2] # => "api/v1/examples#index"
} rescue next
end.compact.group_by { |api| api[:action_path].split('#').first } # => { "api/v1/examples" => [..] }, group by paths
end
def get_actions_by_route_base(route_base)
routes_list[route_base]&.map { |action_info| action_info[:action_path].split('#').last }
end
def find_path_httpverb_by(route_base, action)
routes_list[route_base]&.map do |action_info|
if action_info[:action_path].split('#').last == action.to_s
return [ action_info[:path], action_info[:http_verb].split('|').first ]
end
end ; nil
end
end
end
|
#!/usr/bin/env bash
EXEDIR=`pwd`
BASEDIR=$(dirname $0)
SYSTYPE=`uname -s`
#
# Serial port defaults.
#
# XXX The uploader should be smarter than this.
#
if [ $SYSTYPE = "Darwin" ];
then
SERIAL_PORTS="/dev/tty.usbmodemPX*,/dev/tty.usbmodem*"
fi
if [ $SYSTYPE = "Linux" ];
then
SERIAL_PORTS="/dev/serial/by-id/usb-3D_Robotics*,/dev/serial/by-id/usb-The_Autopilot*"
fi
if [ $SYSTYPE = "" ];
then
SERIAL_PORTS="COM32,COM31,COM30,COM29,COM28,COM27,COM26,COM25,COM24,COM23,COM22,COM21,COM20,COM19,COM18,COM17,COM16,COM15,COM14,COM13,COM12,COM11,COM10,COM9,COM8,COM7,COM6,COM5,COM4,COM3,COM2,COM1,COM0"
fi
python $BASEDIR/px_uploader.py --port $SERIAL_PORTS $1
|
#!/bin/bash
# inputs: [$1:BROKER_IP] [$2:BROKER_PORT] [$3:BROKER_CHANNEL] #
# [$4:BROKER_USER] [$5:BROKER_USER_PASSWORD] #
# [$6:KEY_PASSWORD] #
# 1# recieve script's parameters and initialize magic variables
# 2# create file strings
# 3# create user
# 4# make app user sudoer only for /bin/java
# 5# create app structure
# 6# create and copy binary files
# 7# move service files to system.d
# 8# copy ca, client crt and key
# 9# echo user credentials into file
#10# enable services
# cmds
OPENSSL="/usr/bin/openssl"
SUDO="/usr/bin/sudo"
REMOVE="/bin/rm"
ECHO="/bin/echo"
COPY="/bin/cp"
MKDIR="/bin/mkdir"
TOUCH="/bin/touch"
SHC="/usr/bin/shc"
MOVE="/bin/mv"
CHMOD="/bin/chmod"
CHOWN="/bin/chown"
CHGRP="/bin/chgrp"
GIT="/usr/bin/git"
JAVA="/usr/bin/java"
MOTION="/usr/bin/motion"
USERMOD="/usr/sbin/usermod"
USERADD="/usr/sbin/useradd"
### recieve script's parameters and initialize magic variables
# progress notifications
PROGRESS_START="Installing dirPic!"
PROGRESS_END="dirPic installed - check output above for errors!"
PROGRESS_LIMITER=" "
PROGRESS_NOTIFICATION_CREATE_FILE_STRINGS="Binding services' starting file contents! Errors:"
PROGRESS_NOTIFICATION_CREATE_USER="Creating user! Errors:"
PROGRESS_NOTIFICATION_CREATE_SUDOERS_ENTRY="Creating entry in sudoers file! Errors:"
PROGRESS_NOTIFICATION_CREATE_DIRECTORIES="Creating structural directories! Errors:"
PROGRESS_NOTIFICATION_CLONE_GIT_REPOSITORIES="Cloning subscriber's and publisher's git repositories:"
PROGRESS_NOTIFICATION_CREATE_BINARIES="Creating services' binary files! Errors:"
PROGRESS_NOTIFICATION_MOVE_FILES="Moving all relevant key, certificate, config and binary files! Errors:"
PROGRESS_NOTIFICATION_GIVE_PRIVS_TO_APP_USER="Giving application's folder to application's user! Errors:"
PROGRESS_NOTIFICATION_ECHO_USER_CREDENTIALS="Creating file with application's user credentials (in /home/dirpic/encrypt)! Errors:"
# app user information
APP_USER="dirpic"
APP_USER_PASSWORD_PLAIN_TEXT=$($OPENSSL rand 1000 | strings | grep -io [[:alnum:]] | head -n 16 | tr -d '\n')
APP_USER_PASSWORD_SHA256_HASH=$($OPENSSL passwd -5 "$APP_USER_PASSWORD_PLAIN_TEXT")
APP_USER_PRIV_SUDOERS_STRING="dirpic ALL=(ALL) NOPASSWD:/usr/bin/java, $MOTION"
# create new als keys
alsKeys=($(sudo java -jar bin/generateAlsKeys.jar))
# directories
APP_USER_HOME_DIRECTORY="/home/$APP_USER/"
APP_ENV_ROOT_DIRECTORY=$APP_USER_HOME_DIRECTORY"root/"
APP_ENV_DIRECTORY=$APP_USER_HOME_DIRECTORY"env/"
APP_TMP_DIRECTORY=$APP_USER_HOME_DIRECTORY"tmp/"
APP_RUNTIME_DIRECTORY=$APP_USER_HOME_DIRECTORY"runtime/"
APP_KEYSTORE_DIRECTORY=$APP_USER_HOME_DIRECTORY"keystores/"
APP_BINARY_DIRECTORY=$APP_USER_HOME_DIRECTORY"binaries/"
APP_CAMERA_DIRECTORY=$APP_USER_HOME_DIRECTORY"camera/"
APP_STORAGE_DIRECTORY=$APP_USER_HOME_DIRECTORY"storage/"
# files and hyper links
MOTION_CONFIG="bin/config/motion.conf"
GIT_BINARY_SUBSCRIBER_LINK="https://github.com/shooty215/dirPicSubscriber.git"
GIT_BINARY_PUBLISHER_LINK="https://github.com/shooty215/dirPicPublisher.git"
GIT_BINARY_SUBSCRIBER=$APP_USER_HOME_DIRECTORY"dirPicSubscriber/jars/dirPicSubscriber.jar"
GIT_BINARY_PUBLISHER=$APP_USER_HOME_DIRECTORY"dirPicPublisher/jars/dirPicPublisher.jar"
GIT_BINARY_SUBSCRIBER_SERVICE=$APP_USER_HOME_DIRECTORY"dirPicSubscriber/service/dirpicsubscriber.service"
GIT_BINARY_PUBLISHER_SERVICE=$APP_USER_HOME_DIRECTORY"dirPicPublisher/service/dirpicpublisher.service"
APP_BINARY_SUBSCRIBER=$APP_BINARY_DIRECTORY"dirPicSubscriber.jar"
APP_BINARY_PUBLISHER=$APP_BINARY_DIRECTORY"dirPicPublisher.jar"
APP_BINARY_SUBSCRIBER_START=$APP_BINARY_DIRECTORY"dirPicSubscriber.sh"
APP_BINARY_PUBLISHER_START=$APP_BINARY_DIRECTORY"dirPicPublisher.sh"
APP_BINARY_SUBSCRIBER_START_ACTUAL=$APP_USER_HOME_DIRECTORY"dirPicSubscriber.sh"
APP_BINARY_PUBLISHER_START_ACTUAL=$APP_USER_HOME_DIRECTORY"dirPicPublisher.sh"
APP_BINARY_SUBSCRIBER_START_FILENAME="dirpicsubscriber"
APP_BINARY_PUBLISHER_START_FILENAME="dirpicpublisher"
APP_BINARY_SUBSCRIBER_SERVICE=$APP_USER_HOME_DIRECTORY"dirPicSubscriber/service/dirpicsubscriber.service"
APP_BINARY_PUBLISHER_SERVICE=$APP_USER_HOME_DIRECTORY"dirPicPublisher/service/dirpicpublisher.service"
APP_JSON_PROPERTIES_NAME="properties.json"
APP_JSON_PROPERTIES_PATH=$APP_USER_HOME_DIRECTORY$APP_JSON_PROPERTIES_NAME
SERVICE_FILES_DIRECTORY="/etc/systemd/system/"
# broker information
BROKER_IP=$1
BROKER_PORT=$2
BROKER_CHANNEL=$3
BROKER_USER=$4
BROKER_USER_PASSWORD=$5
# ca password
#CA_PASSWORD=$6
# aes key password
#AES_KEY_PWD=$7
# rsa keys password
#RSA_KEY_PWD=$8
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_START
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CREATE_FILE_STRINGS
### create file strings
APP_BINARY_SUBSCRIBER_START_SCRIPT="
#!/bin/bash\n
$SUDO $JAVA -jar $APP_BINARY_SUBSCRIBER $APP_JSON_PROPERTIES_PATH\n
"
APP_BINARY_PUBLISHER_START_SCRIPT="
#!/bin/bash\n
$SUDO $MOTION -c /home/dirpic/motion.conf\n
$SUDO $JAVA -jar $APP_BINARY_PUBLISHER $APP_JSON_PROPERTIES_PATH\n
"
APP_JSON_PROPERTIES_SCRIPT='{
"brokerIp": "'$BROKER_IP'",
"brokerPort": "'$BROKER_PORT'",
"channelName": "'$BROKER_CHANNEL'",
"cameraPath": "'$APP_CAMERA_DIRECTORY'",
"storagePath": "'$BROKER_IP'",
"keyStorePath": "'$APP_KEYSTORE_DIRECTORY'",
"brokerAuthUser": "'$BROKER_USER'",
"rsaPublicKey": "'${alsKeys[0]}'",
"rsaPrivateKey": "'${alsKeys[1]}'",
"aesKey": "'${alsKeys[2]}'",
}
'
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CREATE_USER
### create user
$SUDO $USERADD -p $APP_USER_PASSWORD_SHA256_HASH $APP_USER -r -d $APP_USER_HOME_DIRECTORY
#$SUDO /usr/sbin/useradd -r -m
### make app user sudoer only for /bin/java
# add app user to sudo group in /etc/group
$SUDO $USERMOD -a -G sudo $APP_USER
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CREATE_SUDOERS_ENTRY
# modify app user's sudo privs, restricting it to only use /bin/java in sudo context
$SUDO $ECHO $APP_USER_PRIV_SUDOERS_STRING >> /etc/sudoers
### create app structure
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CREATE_DIRECTORIES
# create directories
$SUDO $MKDIR $APP_USER_HOME_DIRECTORY
#$SUDO $MKDIR $APP_RUNTIME_DIRECTORY
$SUDO $MKDIR $APP_BINARY_DIRECTORY
$SUDO $MKDIR $APP_KEYSTORE_DIRECTORY
$SUDO $MKDIR $APP_CAMERA_DIRECTORY
$SUDO $MKDIR $APP_STORAGE_DIRECTORY
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CLONE_GIT_REPOSITORIES
$ECHO $PROGRESS_LIMITER
## clone git repositories
$SUDO $GIT -C $APP_USER_HOME_DIRECTORY clone $GIT_BINARY_PUBLISHER_LINK
$SUDO $GIT -C $APP_USER_HOME_DIRECTORY clone $GIT_BINARY_SUBSCRIBER_LINK
## copy binary files from git repository to app directory
$SUDO $COPY $GIT_BINARY_SUBSCRIBER $APP_BINARY_DIRECTORY
$SUDO $COPY $GIT_BINARY_PUBLISHER $APP_BINARY_DIRECTORY
$SUDO $COPY $GIT_BINARY_SUBSCRIBER_SERVICE $APP_BINARY_DIRECTORY
$SUDO $COPY $GIT_BINARY_PUBLISHER_SERVICE $APP_BINARY_DIRECTORY
### create and copy binary files
# progress notification
$ECHO $PROGRESS_LIMITER
$ECHO $PROGRESS_NOTIFICATION_CREATE_BINARIES
$ECHO $PROGRESS_LIMITER
# create file not needed due to > operators functionality (creates the file)
$SUDO $TOUCH $APP_BINARY_SUBSCRIBER_START
$SUDO $TOUCH $APP_BINARY_PUBLISHER_START
$SUDO $TOUCH $APP_JSON_PROPERTIES_PATH
# load file
$SUDO $ECHO -e $APP_BINARY_PUBLISHER_START_SCRIPT > $APP_BINARY_PUBLISHER_START
$SUDO $ECHO -e $APP_BINARY_SUBSCRIBER_START_SCRIPT > $APP_BINARY_SUBSCRIBER_START
$SUDO $ECHO -e $APP_JSON_PROPERTIES_SCRIPT > $APP_JSON_PROPERTIES_PATH
# turn shell files into binaries
$SUDO $SHC -f $APP_BINARY_PUBLISHER_START -o $APP_BINARY_PUBLISHER_START_ACTUAL
$SUDO $SHC -f $APP_BINARY_SUBSCRIBER_START -o $APP_BINARY_SUBSCRIBER_START_ACTUAL
# progress notification
$ECHO $PROGRESS_NOTIFICATION_MOVE_FILES
$ECHO $PROGRESS_LIMITER
# move binary shell files to /usr/bin/
$SUDO $MOVE $APP_BINARY_PUBLISHER_START_ACTUAL /usr/bin/${APP_BINARY_PUBLISHER_START_FILENAME}
$SUDO $MOVE $APP_BINARY_SUBSCRIBER_START_ACTUAL /usr/bin/${APP_BINARY_SUBSCRIBER_START_FILENAME}
### move service files to system.d
$SUDO $COPY $APP_BINARY_SUBSCRIBER_SERVICE $SERVICE_FILES_DIRECTORY
$SUDO $COPY $APP_BINARY_PUBLISHER_SERVICE $SERVICE_FILES_DIRECTORY
# copy pem files to their destination
$SUDO $COPY bin/deployables/serverCrt.pem $APP_KEYSTORE_DIRECTORY"tls_server_crt.pem"
$SUDO $COPY bin/deployables/clientCrt.pem $APP_KEYSTORE_DIRECTORY"tls_client_crt.pem"
$SUDO $COPY bin/deployables/clientKey.pem $APP_KEYSTORE_DIRECTORY"tls_client_private_key.pem"
# copy motion config
$SUDO $COPY $MOTION_CONFIG /home/dirpic/motion.conf
### set privs, ownership and group of app user's home directory, the service and the binary files
# maybe not the service and the binary files
# home directory
# progress notification
$ECHO $PROGRESS_NOTIFICATION_GIVE_PRIVS_TO_APP_USER
$ECHO $PROGRESS_LIMITER
$SUDO $CHMOD -R 750 $APP_USER_HOME_DIRECTORY
$SUDO $CHOWN-R dirpic $APP_USER_HOME_DIRECTORY
$SUDO $CHGRP -R dirpic $APP_USER_HOME_DIRECTORY
### echo user password into file
# progress notification
$ECHO $PROGRESS_NOTIFICATION_ECHO_USER_CREDENTIALS
$ECHO $PROGRESS_LIMITER
$SUDO $ECHO $APP_USER_PASSWORD_PLAIN_TEXT'|:::::|'$APP_USER_PASSWORD_SHA256_HASH > $APP_USER_HOME_DIRECTORY'encrypt'
# progress notification
$ECHO $PROGRESS_END
$ECHO $PROGRESS_LIMITER
|
<filename>node_modules/react-icons-kit/iconic/stepBackward.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.stepBackward = void 0;
var stepBackward = {
"viewBox": "0 0 8 8",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0v6h2v-6h-2zm2 3l5 3v-6l-5 3z",
"transform": "translate(0 1)"
}
}]
};
exports.stepBackward = stepBackward;
|
export interface Person {
_id: number;
firstName: string;
lastName: string;
fullName: string;
email: string;
department: string;
dui: string;
nit: string;
cellphone: string;
telephone: string;
birthDate: string;
gender: string;
lenguages: string;
address: string;
emergencyContact: string;
licenceNumber: string;
hireOn: string;
}
|
import datetime
now = datetime.datetime.now()
print ('Current date and time: ', now)
|
<gh_stars>0
numbers = [1,2,3,4,5,6]
for number1 in range(1,4):
for number2 in range(1,4):
print(f'Pervoe 4islo= {number1}, Vtoroe 4islo = {number2}')
|
<reponame>thepanlab/Endoscopic_OCT_Epidural<gh_stars>0
import sys
import sklearn
from sklearn.model_selection import train_test_split
from scipy.stats import sem
import tensorflow as tf
from tensorflow import keras
from keras.models import load_model
import numpy as np
import os
import pandas as pd
import time
import pickle
'''
Callbacks
- TimeHistory,
- PrintValTrainRatioCallback,
- LossAndErrorPrintingCallback,
- EarlyStopping (from keras)
'''
class TimeHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.times = []
def on_epoch_begin(self, epoch, logs={}):
self.epoch_time_start = time.time()
def on_epoch_end(self, epoch, logs={}):
self.times.append(time.time() - self.epoch_time_start)
class PrintValTrainRatioCallback(keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs):
print("\nval/train: {:.2f}".format(logs["val_loss"] / logs["loss"]))
class LossAndErrorPrintingCallback(keras.callbacks.Callback):
def on_train_batch_end(self, batch, logs=None):
print("For batch {}, loss is {:7.2f}.".format(batch, logs["loss"]))
def on_test_batch_end(self, batch, logs=None):
print("For batch {}, loss is {:7.2f}.".format(batch, logs["loss"]))
def on_epoch_end(self, epoch, logs=None):
print(
"The average loss for epoch {} is {:7.2f} "
"and mean absolute error is {:7.2f}.".format(
epoch, logs["loss"], logs["mean_absolute_error"])
)
'''
Architectures
- ResNet50,
- InceptionResNet50
- Xception
'''
# ResNet50 model
def create_ResNet50_model(input_shape):
#ResNet50
base_model_empty = keras.applications.resnet50.ResNet50(
include_top=False,
weights=None,
input_tensor=None,
input_shape=input_shape,
pooling=None
)
avg = keras.layers.GlobalAveragePooling2D()(base_model_empty.output)
output = keras.layers.Dense(1, activation="linear")(avg)
model_resnet50 = keras.models.Model(inputs=base_model_empty.input, outputs=output)
return model_resnet50
def create_InceptionV3_model(input_shape, top='max'):
if top not in ('flatten', 'avg', 'max'):
raise ValueError('unexpected top layer type: %s' % top)
Layer = {
'flatten': keras.layers.Flatten(),
'avg': keras.layers.GlobalAveragePooling2D(),
'max': keras.layers.GlobalMaxPooling2D()
}[top]
base = keras.applications.InceptionV3(input_shape=input_shape, include_top=False, weights=None)
x = Layer(base.output)
x = keras.layers.Dense(1, activation="linear")(x)
model = keras.models.Model(inputs=base.inputs, outputs=x)
return model
def create_Xception_model(input_shape, top='max'):
if top not in ('flatten', 'avg', 'max'):
raise ValueError('unexpected top layer type: %s' % top)
Layer = {
'flatten': keras.layers.Flatten(),
'avg': keras.layers.GlobalAveragePooling2D(),
'max': keras.layers.GlobalMaxPooling2D()
}[top]
base = keras.applications.Xception(input_shape=input_shape, include_top=False, weights=None)
x = Layer(base.output)
x = keras.layers.Dense(1, activation="linear")(x)
model = keras.models.Model(inputs=base.inputs, outputs=x)
return model
''' MAIN FUNCTION '''
if __name__ == '__main__':
my_gpu = 0
########## Edit paths ##########
# edit paths as needed
path2procdata = "/home/jreynolds/21summer/epidural/processed_data/"
export_path = "/home/jreynolds/21summer/epidural/ct_cv_models/"
this_model = "A/"
this_model_version = "2/" # EDIT ME !!!!
this_export_path = export_path+this_model+this_model_version
# for EXCLUSION of zero-distance data == 24000 total images
imp_images = "export_nz_images_1D_20210607.npy"
imp_imagefilenames = "export_nz_names_20210607.npy"
imp_distances = "export_nz_distances_20210607.npy"
imp_eid = "export_nz_eid_20210607.npy"
# for INCLUSION of zero-distance data == 28800 total images
#imp_images = "export_wz_images_1D.npy"
#imp_imagefilenames = "export_wz_names.npy"
#imp_distances = "export_wz_distances.npy"
#imp_eid = "export_wz_eid.npy"
select_test_eids = [1, 2, 3, 4, 5, 6, 7, 8] # the subjects we choose for testing
select_val_eids = [1, 2, 3, 4, 5, 6, 7, 8] # all subjects for cross-validation
n_epochs = 20
batch_size = 32
my_metrics = ['mape', 'mae', 'mse']
architecture_dict = {0: 'ResNet50', 1: 'InceptionV3', 2: 'Xception'}
##########################################
image_shape = (681, 241, 1)
# input images
with open(path2procdata+imp_images, 'rb') as f:
images_1D_list = np.load(f)
# input image names
with open(path2procdata+imp_imagefilenames, 'rb') as f:
image_names_list = np.load(f)
# input image distances (ground truth)
with open(path2procdata+imp_distances, 'rb') as f:
image_dist_list = np.load(f)
# input EID
with open(path2procdata+imp_eid, 'rb') as f:
image_eid_list = np.load(f)
# get list of unique EIDs
eid_unique = np.unique(image_eid_list)
# satisfying tf needs
images_1D = images_1D_list[..., np.newaxis]
# zip imported arrays
data_mat = list(zip(image_names_list, image_eid_list, image_dist_list, images_1D))
my_shape_count = 0
for i in images_1D:
if i.shape == image_shape:
my_shape_count+=1
print("----- About -----")
print("n_epochs: ", n_epochs)
print("batch_size: ", batch_size)
print("Num images: ", my_shape_count)
print("Image shape: ", image_shape)
print("GPUs Available: ", len(tf.config.experimental.list_physical_devices('GPU')))
gpus = tf.config.list_physical_devices('GPU')
if gpus:
# Restrict TensorFlow to only use the first GPU
try:
tf.config.experimental.set_visible_devices(gpus[my_gpu], 'GPU')
logical_gpus = tf.config.experimental.list_logical_devices('GPU')
print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPU")
except RuntimeError as e:
# Visible devices must be set before GPUs have been initialized
print(e)
print("-----------------")
total_fold_time=0
global_count = 1
# Start Cross-Testing
for testeid in select_test_eids:
start_time = time.time()
# Get X_test and y_test
test_img_list = [data_mat[i][3] for i in range(len(data_mat)) if data_mat[i][1] == testeid]
test_dist_list = [data_mat[i][2] for i in range(len(data_mat)) if data_mat[i][1] == testeid]
test_check = [data_mat[i][1] for i in range(len(data_mat)) if data_mat[i][1] == testeid]
# Get training images without validation set for best model in current fold
all_train_imgs = [data_mat[i][3] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] in select_val_eids]
all_train_dists = [data_mat[i][2] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] in select_val_eids]
all_train_check = [data_mat[i][1] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] in select_val_eids]
all_X_train = np.array(all_train_imgs)
all_y_train = np.array(all_train_dists)
unique_all_train_eid = np.unique(all_train_check)
arch_mean_mape_dict = {}
arch_sem_dict = {}
arch_mean_mae_and_sem_dict = {}
arch_mean_mape_and_sem_dict = {}
# Loop through all the architectures for the current test-fold and perform CV with that arch.
print("\n--- start S%d test-fold ---" %testeid)
for arch in architecture_dict:
print("--- enter "+str(architecture_dict[arch])+"...")
cv_results_list = []
cv_mape_list = []
cv_mae_list=[]
cv_sem_list=[]
cv_time_list = []
# Cross-validation with current configuration in he current testing fold.
for valeid in select_val_eids:
if valeid == testeid:
print("##### skipping using valeid(%d)==testeid(%d) #####" %(valeid, testeid))
continue
# validation data
val_img_list = [data_mat[i][3] for i in range(len(data_mat)) if data_mat[i][1] == valeid]
val_dist_list = [data_mat[i][2] for i in range(len(data_mat)) if data_mat[i][1] == valeid]
val_check = [data_mat[i][1] for i in range(len(data_mat)) if data_mat[i][1] == valeid]
# training data
train_img_list = [data_mat[i][3] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] != valeid and data_mat[i][1] in select_val_eids]
train_dist_list = [data_mat[i][2] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] != valeid and data_mat[i][1] in select_val_eids]
train_check = [data_mat[i][1] for i in range(len(data_mat)) if data_mat[i][1] != testeid and data_mat[i][1] != valeid and data_mat[i][1] in select_val_eids]
keras.backend.clear_session()
np.random.seed(0)
tf.random.set_seed(0)
X_train = np.array(train_img_list)
y_train = np.array(train_dist_list)
X_val = np.array(val_img_list)
y_val = np.array(val_dist_list)
X_test = np.array(test_img_list)
y_test = np.array(test_dist_list)
# Confirming integrity of splits
if len(X_train) != len(y_train):
print("ERROR - length mismatch len(X_train)=", len(X_train), ", len(y_train)=", len(y_train))
exit()
if len(X_val) != len(y_val):
print("ERROR - length mismatch len(X_val)=", len(X_val), ", len(y_val)=", len(y_val))
if len(X_test) != len(y_test):
print("ERROR - length mismatch len(X_test)=", len(X_test), ", len(y_test)=", len(y_test))
exit()
unique_train_eid = np.unique(train_check)
unique_val_eid = np.unique(val_check)
unique_test_eid = np.unique(test_check)
for i in range(len(train_check)):
if train_check[i] == testeid or train_check[i] == valeid:
print("ERROR - train set contamination, train_check[", i, "]=", train_check[i], " belongs elsewhere.")
exit()
for i in range(len(val_check)):
if val_check[i] != valeid:
print("ERROR - validation set contamination, val_check[", i, "]=", val_check[i], " belongs elsewhere.")
exit()
for i in range(len(test_check)):
if test_check[i] != testeid:
print("ERROR - validation set contamination, test_check[", i, "]=", test_check[i], " belongs elsewhere.")
exit()
### Get the appropriate model architecture
if arch == 0:
# ResNet50
print("* ResNet50 - S%d - V%d - #%d *" %(testeid, valeid, global_count))
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_ResNet50_model(image_shape)
elif arch == 1:
# InceptionV3
print("* InceptionV3 - S%d - V%d - #%d *" %(testeid, valeid, global_count))
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_InceptionV3_model(image_shape)
elif arch == 2:
# Xception
print("* Xception - S%d - V%d - #%d *" %(testeid, valeid, global_count))
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_Xception_model(image_shape)
print("\t", len(X_train), "train images from subjects", unique_train_eid)
print("\t", len(X_val), "val images from subject", unique_val_eid)
print("\t", len(X_test), "test images from subject", unique_test_eid)
# Compile the model
model.compile(loss=keras.losses.MeanAbsolutePercentageError(), optimizer=opt, metrics=my_metrics)
# Fit the model
history = model.fit(X_train, y_train, validation_data=(X_val, y_val), epochs=n_epochs, batch_size=batch_size, callbacks=[time_cb, early_stopping_cb])
########### Evaluate ###########
times = time_cb.times
print("--- Validate - S%d - V%d - %s ---:" %(testeid, valeid, architecture_dict[arch]))
# store the validation error (mape)
my_model_val = model.evaluate(X_val, y_val)
# append validation score
cv_mape_list.append(my_model_val[1]) # mapes
cv_mae_list.append(my_model_val[2]) # maes
cv_time_list.append(times)
# delete the model for the next round
del model
global_count += 1
#--------- END FOR valeid in select_val_eid_list ---------
my_mean_mape = np.mean(cv_mape_list)
my_mean_mae = np.mean(cv_mae_list)
my_sem = sem(cv_mape_list)
my_sem2 = sem(cv_mae_list)
my_duo = (my_mean_mape, my_sem)
my_duo2 = (my_mean_mae, my_sem2)
arch_mean_mape_dict[arch] = my_mean_mape
arch_sem_dict[arch] = my_sem
arch_mean_mape_and_sem_dict[arch] = my_duo
arch_mean_mae_and_sem_dict[arch] = my_duo2
print("\n--- done with S%d %s ---" %(testeid, str(architecture_dict[arch])))
print("arch_mean_MAPE_and_sem_dict:")
print(arch_mean_mape_and_sem_dict)
print("arch_mean_MAE_and_sem_dict:")
print(arch_mean_mae_and_sem_dict)
#--------- END FOR arch in architecture_dict ---------
print("\n--- done with cross-validation in S%d ---" %testeid)
outfile0 = open(this_export_path+"S"+str(testeid)+"_CV_MeanMAPEandSem_allArchs", "wb")
pickle.dump(arch_mean_mape_and_sem_dict, outfile0)
outfile0.close()
outfile025 = open(this_export_path+"S"+str(testeid)+"_CV_MeanMAEandSem_allArchs", "wb")
pickle.dump(arch_mean_mae_and_sem_dict, outfile025)
outfile025.close()
####### NEW MODEL ######
# Using the config (i.e. resnet50, inceptionV3, xception) with the
# lowest average CV score in this testing fold.
best_arch_mean = 999999
best_arch=''
for i in arch_mean_mape_dict:
if arch_mean_mape_dict[i] < best_arch_mean:
best_arch_mean = arch_mean_mape_dict[i]
best_arch = architecture_dict[i]
print("--- finding best S%d configuration ---" %(testeid))
print("arch_mean_dict:")
print(arch_mean_mape_dict)
print("WINNER -- best_arch=", best_arch, ", best_arch_mean=", best_arch_mean)
print("--- training new %s model with all %d train images for S%d fold ---" %(str(best_arch), int(len(all_X_train)), testeid))
# Train a new model based off the best of the models in the current testing fold.
if best_arch == architecture_dict[0]:
print("ResNet50")
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_ResNet50_model(image_shape)
elif best_arch == architecture_dict[1]:
print("InceptionV3")
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_InceptionV3_model(image_shape)
elif best_arch == architecture_dict[2]:
print("Xception")
early_stopping_cb = keras.callbacks.EarlyStopping(monitor='loss', patience=10, restore_best_weights=True)
time_cb = TimeHistory()
opt = keras.optimizers.SGD(learning_rate=0.01, momentum=0.9, nesterov=True, decay=0.01)
model = create_Xception_model(image_shape)
print("* testeid=", testeid, " *")
print("\t", len(all_X_train), "train images from subjects", unique_all_train_eid)
print("\t", len(X_test), "test images from subject", unique_test_eid)
# Compile the new model
model.compile(loss=keras.losses.MeanAbsolutePercentageError(), optimizer=opt, metrics=my_metrics)
# Fit the new model without validation data
history = model.fit(all_X_train, all_y_train, epochs=n_epochs, batch_size=batch_size, callbacks=[time_cb, early_stopping_cb])
times = time_cb.times
print(f'total time for %d epochs is %.3f secs or %.3f mins' % (n_epochs, sum(times), sum(times)/60.0))
print(f'average time per epoch is %.3f secs or %.3f mins' % (np.mean(times), np.mean(times)/60.0))
print("--- test new model on unseen S%s ---" %str(testeid))
test_eval = model.evaluate(X_test, y_test)
print("\n")
y_preds = model.predict(X_test)
model.save(this_export_path+'model_S%s_%s.h5' %(str(testeid), str(architecture_dict[arch]))) # creates a HDF5 file 'my_model.h5'
my_history = history.history
hist_df = pd.DataFrame(history.history)
print("\nTraining history of new model")
print(hist_df)
print("\nExporting results for fold", testeid)
print("\ttraining history")
outfile1 = open(this_export_path+"S"+str(testeid)+"_trainhist_"+str(best_arch), "wb")
pickle.dump(my_history, outfile1)
outfile1.close()
print("\ty_preds")
outfile2 = open(this_export_path+"S"+str(testeid)+"_y_preds_"+str(best_arch), "wb")
pickle.dump(y_preds, outfile2)
outfile2.close()
print("\ty_test")
outfile3 = open(this_export_path+"S"+str(testeid)+"_y_test_"+str(best_arch), "wb")
pickle.dump(y_test, outfile3)
outfile3.close()
print("\ttimes")
outfile4 = open(this_export_path+"S"+str(testeid)+"_times_"+str(best_arch), "wb")
pickle.dump(times, outfile4)
outfile4.close()
print("\tX_test")
outfile5 = open(this_export_path+"S"+str(testeid)+"_X_test_"+str(best_arch), "wb")
pickle.dump(X_test, outfile5)
outfile5.close()
print("\ttest evaluation")
outfile6 = open(this_export_path+"S"+str(testeid)+"_testEval_"+str(best_arch), "wb")
pickle.dump(test_eval, outfile6)
outfile6.close()
total_fold_time = total_fold_time + (time.time() - start_time)
print("#-#-#-#-#-#-# Done with S%s" %str(testeid))
print(f'#-#-#-#-#-#-# time: %.2f seconds, %.2f mins, %.2f hrs' %(total_fold_time, total_fold_time/60.0, total_fold_time/3600.0))
print("#-#-#-#-#-#-# best_arch: ", best_arch)
print("#-#-#-#-#-#-# test_score: ", test_eval[1], ", ", test_eval[2])
print("#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#")
# END FOR testeid in select_test_eids ------------
|
sed -i "s/ICE_SERVER_ADDR/$PUBLIC_IP/g" /apprtc_configs/ice.js
# sed -i 's/wss:\/\//ws:\/\//g' apprtc/out/app_engine/apprtc.py
# sed -i 's/https:\/\//http:\/\//g' apprtc/out/app_engine/apprtc.py
cp /apprtc_configs/constants.py /apprtc/out/app_engine/constants.py
nodejs /apprtc_configs/ice.js 2>> /iceconfig.log &
$GOPATH/bin/collidermain -port=8089 -tls=false --room-server=0.0.0.0 2>> /collider.log &
dev_appserver.py /apprtc/out/app_engine --skip_sdk_update_check --enable_host_checking=False --host=0.0.0.0 2>> /room_server.log &
turnserver -v -L 0.0.0.0 -a -f -r apprtc -c /apprtc_configs/turnserver.conf --no-tls --no-dtls
|
#!/bin/bash
# Archived program command-line for experiment
# Copyright 2021 ServiceNow All Rights Reserved
#
# Usage: bash {this_file} [additional_options]
set -x;
set -e;
../bytesteady/bytesteady -driver_location models/gene/doublefnvnllgram1a2a4a8a16size16777216dimension16a0.01b0alpha0lambda0.001n0rho0 -driver_epoch_size 100 -data_file data/gene/train.bytesteady -data_format kBytes -model_input_size 16777216,1024 -model_output_size 7 -model_gram '{1,2,4,8,16}' -model_dimension 16 -train_a 0.01 -train_b 0 -train_alpha 0 -train_lambda 0.001 -train_n 0 -train_rho 0 "$@";
|
@bot.message_handler(func=lambda message: message.text == 'Introduction')
def send_intro(message):
# Reply to the user
bot.send_message(message.chat.id,
"I'm a telegram bot to provide simple introduction. I can provide a list of introductions, in different areas, such as programming, finance, medicine, etc. I can also suggest the right introduction for you based on your preference.")
# Reply with a list of introductions
bot.send_message(message.chat.id, "Here is the list of introductions:
1. Introduction to programming
2. Introduction to finance
3. Introduction to medicine
4. Introduction to art
5. Introduction to mathematics
6. Introduction to physics
7. Introduction to chemistry")
# Prompt the user for action
bot.send_message(message.chat.id, "Which one do you want to take a look?")
|
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.raigad.monitoring;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.netflix.raigad.configuration.IConfiguration;
import com.netflix.raigad.scheduler.SimpleTimer;
import com.netflix.raigad.scheduler.Task;
import com.netflix.raigad.scheduler.TaskTimer;
import com.netflix.raigad.utils.ESTransportClient;
import com.netflix.raigad.utils.ElasticsearchProcessMonitor;
import com.netflix.servo.annotations.DataSourceType;
import com.netflix.servo.annotations.Monitor;
import com.netflix.servo.monitor.Monitors;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.monitor.os.OsStats;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.atomic.AtomicReference;
@Singleton
public class OsStatsMonitor extends Task
{
private static final Logger logger = LoggerFactory.getLogger(OsStatsMonitor.class);
public static final String METRIC_NAME = "Elasticsearch_OsStatsMonitor";
private final Elasticsearch_OsStatsReporter osStatsReporter;
@Inject
public OsStatsMonitor(IConfiguration config)
{
super(config);
osStatsReporter = new Elasticsearch_OsStatsReporter();
Monitors.registerObject(osStatsReporter);
}
@Override
public void execute() throws Exception {
// If Elasticsearch is started then only start the monitoring
if (!ElasticsearchProcessMonitor.isElasticsearchRunning()) {
String exceptionMsg = "Elasticsearch is not yet started, check back again later";
logger.info(exceptionMsg);
return;
}
OsStatsBean osStatsBean = new OsStatsBean();
try
{
NodesStatsResponse ndsStatsResponse = ESTransportClient.getNodesStatsResponse(config);
OsStats osStats = null;
NodeStats ndStat = null;
if (ndsStatsResponse.getNodes().length > 0) {
ndStat = ndsStatsResponse.getAt(0);
}
if (ndStat == null) {
logger.info("NodeStats is null,hence returning (No OsStats).");
return;
}
osStats = ndStat.getOs();
if (osStats == null) {
logger.info("OsStats is null,hence returning (No OsStats).");
return;
}
//Mem
osStatsBean.freeInBytes = osStats.getMem().getFree().getBytes();
osStatsBean.usedInBytes = osStats.getMem().getUsed().getBytes();
osStatsBean.actualFreeInBytes = osStats.getMem().getActualFree().getBytes();
osStatsBean.actualUsedInBytes = osStats.getMem().getActualUsed().getBytes();
osStatsBean.freePercent = osStats.getMem().getFreePercent();
osStatsBean.usedPercent = osStats.getMem().getUsedPercent();
//CPU
osStatsBean.cpuSys = osStats.getCpu().getSys();
osStatsBean.cpuUser = osStats.getCpu().getUser();
osStatsBean.cpuIdle = osStats.getCpu().getIdle();
osStatsBean.cpuStolen = osStats.getCpu().getStolen();
//Swap
osStatsBean.swapFreeInBytes = osStats.getSwap().getFree().getBytes();
osStatsBean.swapUsedInBytes = osStats.getSwap().getUsed().getBytes();
//Uptime
osStatsBean.uptimeInMillis = osStats.getUptime().getMillis();
//Load Average ??
//Timestamp
osStatsBean.osTimestamp = osStats.getTimestamp();
}
catch(Exception e)
{
logger.warn("failed to load Os stats data", e);
}
osStatsReporter.osStatsBean.set(osStatsBean);
}
public class Elasticsearch_OsStatsReporter
{
private final AtomicReference<OsStatsBean> osStatsBean;
public Elasticsearch_OsStatsReporter()
{
osStatsBean = new AtomicReference<OsStatsBean>(new OsStatsBean());
}
@Monitor(name ="free_in_bytes", type=DataSourceType.GAUGE)
public long getFreeInBytes()
{
return osStatsBean.get().freeInBytes;
}
@Monitor(name ="used_in_bytes", type=DataSourceType.GAUGE)
public long getUsedInBytes()
{
return osStatsBean.get().usedInBytes;
}
@Monitor(name ="actual_free_in_bytes", type=DataSourceType.GAUGE)
public long getActualFreeInBytes()
{
return osStatsBean.get().actualFreeInBytes;
}
@Monitor(name ="actual_used_in_bytes", type=DataSourceType.GAUGE)
public long geActualUsedInBytes()
{
return osStatsBean.get().actualUsedInBytes;
}
@Monitor(name ="free_percent", type=DataSourceType.GAUGE)
public short getFreePercent()
{
return osStatsBean.get().freePercent;
}
@Monitor(name ="used_percent", type=DataSourceType.GAUGE)
public short getUsedPercent()
{
return osStatsBean.get().usedPercent;
}
@Monitor(name ="cpu_sys", type=DataSourceType.GAUGE)
public short getCpuSys()
{
return osStatsBean.get().cpuSys;
}
@Monitor(name ="cpu_user", type=DataSourceType.GAUGE)
public short getCpuUser()
{
return osStatsBean.get().cpuUser;
}
@Monitor(name ="cpu_idle", type=DataSourceType.GAUGE)
public short getCpuIdle()
{
return osStatsBean.get().cpuIdle;
}
@Monitor(name ="cpu_stolen", type=DataSourceType.GAUGE)
public short getCpuStolen()
{
return osStatsBean.get().cpuStolen;
}
@Monitor(name ="swap_used_in_bytes", type=DataSourceType.GAUGE)
public long getSwapUsedInBytes()
{
return osStatsBean.get().swapUsedInBytes;
}
@Monitor(name ="swap_free_in_bytes", type=DataSourceType.GAUGE)
public long getSwapFreeInBytes()
{
return osStatsBean.get().swapFreeInBytes;
}
@Monitor(name ="uptime_in_millis", type=DataSourceType.GAUGE)
public double getUptimeInMillis()
{
return osStatsBean.get().uptimeInMillis;
}
@Monitor(name ="os_timestamp", type=DataSourceType.GAUGE)
public long getOsTimestamp()
{
return osStatsBean.get().osTimestamp;
}
}
private static class OsStatsBean
{
private long freeInBytes;
private long usedInBytes;
private long actualFreeInBytes;
private long actualUsedInBytes;
private short freePercent;
private short usedPercent;
private short cpuSys;
private short cpuUser;
private short cpuIdle;
private short cpuStolen;
private long swapUsedInBytes;
private long swapFreeInBytes;
private long uptimeInMillis;
private long osTimestamp;
}
public static TaskTimer getTimer(String name)
{
return new SimpleTimer(name, 60 * 1000);
}
@Override
public String getName()
{
return METRIC_NAME;
}
}
|
import matplotlib.pyplot as plt
values = [10, 15, 30, 50, 70, 90, 20, 40, 60]
# create figure
plt.figure()
# create chart
plt.plot(values)
# display chart
plt.show()
|
<gh_stars>1-10
import axios from 'axios'
export const GET_REVIEWS = 'GET_REVIEWS'
export const ADD_REVIEW = 'ADD_REVIEW'
const initialState = []
export const getReviews = reviews => ({
type: GET_REVIEWS,
reviews
})
export const addReview = review => ({
type: ADD_REVIEW,
review
})
export const postReview = review => async dispatch => {
try {
const {data} = await axios.post('/api/reviews', review)
dispatch(addReview(data))
} catch (err) {
console.error(err)
}
}
export const reviewsReducer = (state = initialState, action) => {
switch (action.type) {
case GET_REVIEWS:
return [...action.reviews]
case ADD_REVIEW:
return [...state, action.review]
default:
return state
}
}
|
<filename>runtime/browser/runtime_geolocation_permission_context.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "xwalk/runtime/browser/runtime_geolocation_permission_context.h"
#include "base/bind.h"
#include "base/callback.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/render_process_host.h"
#include "content/public/browser/render_view_host.h"
#include "content/public/browser/web_contents.h"
#if defined(OS_ANDROID)
#include "xwalk/runtime/browser/android/xwalk_content.h"
#endif
#if defined(OS_TIZEN)
#include "xwalk/application/browser/application_system.h"
#include "xwalk/application/browser/application_service.h"
#include "xwalk/application/browser/application.h"
#include "xwalk/application/common/application_manifest_constants.h"
#include "xwalk/application/common/manifest_handlers/permissions_handler.h"
#include "xwalk/runtime/browser/xwalk_runner.h"
#endif
namespace xwalk {
void RuntimeGeolocationPermissionContext::
CancelGeolocationPermissionRequestOnUIThread(
content::WebContents* web_contents,
const GURL& requesting_frame) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI));
#if defined(OS_ANDROID)
XWalkContent* xwalk_content =
XWalkContent::FromWebContents(web_contents);
if (xwalk_content) {
xwalk_content->HideGeolocationPrompt(requesting_frame);
}
#endif
// TODO(yongsheng): Handle this for other platforms.
}
void RuntimeGeolocationPermissionContext::CancelGeolocationPermissionRequest(
content::WebContents* web_contents,
const GURL& requesting_frame) {
content::BrowserThread::PostTask(
content::BrowserThread::UI, FROM_HERE,
base::Bind(
&RuntimeGeolocationPermissionContext::
CancelGeolocationPermissionRequestOnUIThread,
this,
web_contents,
requesting_frame));
}
RuntimeGeolocationPermissionContext::~RuntimeGeolocationPermissionContext() {
}
void
RuntimeGeolocationPermissionContext::RequestGeolocationPermissionOnUIThread(
content::WebContents* web_contents,
const GURL& requesting_frame,
base::Callback<void(bool)> result_callback) {
DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI));
#if defined(OS_ANDROID)
XWalkContent* xwalk_content =
XWalkContent::FromWebContents(web_contents);
if (!xwalk_content) {
result_callback.Run(false);
return;
}
xwalk_content->ShowGeolocationPrompt(requesting_frame, result_callback);
#elif defined(OS_TIZEN)
int render_view_id = web_contents->GetRenderViewHost()->GetRoutingID();
bool has_geolocation_permission = false;
XWalkRunner* runner = XWalkRunner::GetInstance();
application::ApplicationSystem* app_system = runner->app_system();
application::ApplicationService* app_service =
app_system->application_service();
application::Application* application =
app_service->GetApplicationByRenderHostID(render_view_id);
if (application) {
DCHECK(application->data());
application::PermissionsInfo* info =
static_cast<application::PermissionsInfo*>(
application->data()->GetManifestData(
application_manifest_keys::kPermissionsKey));
if (info) {
const application::PermissionSet& permissions = info->GetAPIPermissions();
application::PermissionSet::const_iterator it =
std::find(permissions.begin(), permissions.end(), "geolocation");
has_geolocation_permission = it != permissions.end();
}
}
result_callback.Run(has_geolocation_permission);
#endif
// TODO(yongsheng): Handle this for other platforms.
}
void
RuntimeGeolocationPermissionContext::RequestGeolocationPermission(
content::WebContents* web_contents,
const GURL& requesting_frame,
base::Callback<void(bool)> result_callback) {
content::BrowserThread::PostTask(
content::BrowserThread::UI, FROM_HERE,
base::Bind(
&RuntimeGeolocationPermissionContext::
RequestGeolocationPermissionOnUIThread,
this,
web_contents,
requesting_frame,
result_callback));
}
} // namespace xwalk
|
#!/usr/bin/env bash
cd /home/pi/toku1/01-04-a1
. ../python3-toku1/bin/activate
if [ -e /home/pi/toku1/01-04-a1/app_03.db ];then
rm /home/pi/toku1/01-04-a1/app_03.db
fi
python app_03.py &
python server_03.py
deactivate
|
function isPalindrome(str) {
let i = 0;
let j = str.length - 1;
while (i < j) {
if (str[i] !== str[j]) {
return false;
}
i++;
j--;
}
return true;
}
console.log(isPalindrome("racecar"));
|
#!/bin/sh
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# -----------------------------------------------------------------------------------------------
# Script to start Cassandra daemon (used by cassandra-bootstrap.sh)
# -----------------------------------------------------------------------------------------------
#profile=/home/cassandra/.bash_profile
profile=/root/.bash_profile
. $profile
. /opt/ignite-cassandra-tests/bootstrap/aws/common.sh "cassandra"
# Setups Cassandra seeds for this EC2 node. Looks for the information in S3 about
# already up and running Cassandra cluster nodes
setupCassandraSeeds()
{
if [ "$FIRST_NODE_LOCK" == "true" ]; then
echo "[INFO] Setting up Cassandra seeds"
CLUSTER_SEEDS=$(hostname -f | tr '[:upper:]' '[:lower:]')
echo "[INFO] Using host address as a seed for the first Cassandra node: $CLUSTER_SEEDS"
aws s3 rm --recursive ${S3_CASSANDRA_NODES_DISCOVERY::-1}
if [ $? -ne 0 ]; then
terminate "Failed to clean Cassandra node discovery URL: $S3_CASSANDRA_NODES_DISCOVERY"
fi
else
setupClusterSeeds "cassandra" "true"
CLUSTER_SEEDS=$(echo $CLUSTER_SEEDS | sed -r "s/ /,/g")
fi
cat /opt/cassandra/conf/cassandra-template.yaml | sed -r "s/\\\$\{CASSANDRA_SEEDS\}/$CLUSTER_SEEDS/g" > /opt/cassandra/conf/cassandra.yaml
}
# Gracefully starts Cassandra daemon and waits until it joins Cassandra cluster
startCassandra()
{
echo "[INFO]-------------------------------------------------------------"
echo "[INFO] Trying attempt $START_ATTEMPT to start Cassandra daemon"
echo "[INFO]-------------------------------------------------------------"
echo ""
setupCassandraSeeds
waitToJoinCluster
if [ "$FIRST_NODE_LOCK" == "true" ]; then
aws s3 rm --recursive ${S3_CASSANDRA_NODES_DISCOVERY::-1}
if [ $? -ne 0 ]; then
terminate "Failed to clean Cassandra node discovery URL: $S3_IGNITE_NODES_DISCOVERY"
fi
fi
proc=$(ps -ef | grep java | grep "org.apache.cassandra.service.CassandraDaemon")
proc=($proc)
if [ -n "${proc[1]}" ]; then
echo "[INFO] Terminating existing Cassandra process ${proc[1]}"
kill -9 ${proc[1]}
fi
echo "[INFO] Starting Cassandra"
rm -Rf /opt/cassandra/logs/* /storage/cassandra/*
/opt/cassandra/bin/cassandra -R &
echo "[INFO] Cassandra job id: $!"
sleep 1m
START_ATTEMPT=$(( $START_ATTEMPT+1 ))
}
#######################################################################################################
START_ATTEMPT=0
# Cleans all the previous metadata about this EC2 node
unregisterNode
# Tries to get first-node lock
tryToGetFirstNodeLock
echo "[INFO]-----------------------------------------------------------------"
if [ "$FIRST_NODE_LOCK" == "true" ]; then
echo "[INFO] Starting first Cassandra node"
else
echo "[INFO] Starting Cassandra node"
fi
echo "[INFO]-----------------------------------------------------------------"
printInstanceInfo
echo "[INFO]-----------------------------------------------------------------"
if [ "$FIRST_NODE_LOCK" != "true" ]; then
waitFirstClusterNodeRegistered "true"
else
cleanupMetadata
fi
# Start Cassandra daemon
startCassandra
startTime=$(date +%s)
# Trying multiple attempts to start Cassandra daemon
while true; do
proc=$(ps -ef | grep java | grep "org.apache.cassandra.service.CassandraDaemon")
/opt/cassandra/bin/nodetool status &> /dev/null
if [ $? -eq 0 ]; then
echo "[INFO]-----------------------------------------------------"
echo "[INFO] Cassandra daemon successfully started"
echo "[INFO]-----------------------------------------------------"
echo $proc
echo "[INFO]-----------------------------------------------------"
# Once node joined the cluster we need to remove cluster-join lock
# to allow other EC2 nodes to acquire it and join cluster sequentially
removeClusterJoinLock
break
fi
currentTime=$(date +%s)
duration=$(( $currentTime-$startTime ))
duration=$(( $duration/60 ))
if [ $duration -gt $SERVICE_STARTUP_TIME ]; then
if [ "$FIRST_NODE_LOCK" == "true" ]; then
# If the first node of Cassandra cluster failed to start Cassandra daemon in SERVICE_STARTUP_TIME min,
# we will not try any other attempts and just terminate with error. Terminate function itself, will
# take care about removing all the locks holding by this node.
terminate "${SERVICE_STARTUP_TIME}min timeout expired, but first Cassandra daemon is still not up and running"
else
# If node isn't the first node of Cassandra cluster and it failed to start we need to
# remove cluster-join lock to allow other EC2 nodes to acquire it
removeClusterJoinLock
# If node failed all SERVICE_START_ATTEMPTS attempts to start Cassandra daemon we will not
# try anymore and terminate with error
if [ $START_ATTEMPT -gt $SERVICE_START_ATTEMPTS ]; then
terminate "${SERVICE_START_ATTEMPTS} attempts exceed, but Cassandra daemon is still not up and running"
fi
# New attempt to start Cassandra daemon
startCassandra
fi
continue
fi
# Checking for the situation when two nodes trying to simultaneously join Cassandra cluster.
# This actually can happen only in not standard situation, when you are trying to start
# Cassandra daemon on some EC2 nodes manually and not using bootstrap script.
concurrencyError=$(cat /opt/cassandra/logs/system.log | grep "java.lang.UnsupportedOperationException: Other bootstrapping/leaving/moving nodes detected, cannot bootstrap while cassandra.consistent.rangemovement is true")
if [ -n "$concurrencyError" ] && [ "$FIRST_NODE_LOCK" != "true" ]; then
# Remove cluster-join lock to allow other EC2 nodes to acquire it
removeClusterJoinLock
echo "[WARN] Failed to concurrently start Cassandra daemon. Sleeping for extra 30sec"
sleep 30s
# New attempt to start Cassandra daemon
startCassandra
continue
fi
# Handling situation when Cassandra daemon process abnormally terminated
if [ -z "$proc" ]; then
# If this is the first node of Cassandra cluster just terminating with error
if [ "$FIRST_NODE_LOCK" == "true" ]; then
terminate "Failed to start Cassandra daemon"
fi
# Remove cluster-join lock to allow other EC2 nodes to acquire it
removeClusterJoinLock
echo "[WARN] Failed to start Cassandra daemon. Sleeping for extra 30sec"
sleep 30s
# New attempt to start Cassandra daemon
startCassandra
continue
fi
echo "[INFO] Waiting for Cassandra daemon to start, time passed ${duration}min"
sleep 30s
done
# Once Cassandra daemon successfully started we registering new Cassandra node in S3
registerNode
# Terminating script with zero exit code
terminate
|
// Merge Sort in Golang
package main
import (
"fmt"
"math/rand"
"time"
)
func main() {
slice := generateSlice(20)
fmt.Println("\n--- Unsorted --- \n\n", slice)
fmt.Println("\n--- Sorted ---\n\n", mergeSort(slice), "\n")
}
// Generates a slice of size, size filled with random numbers
func generateSlice(size int) []int {
slice := make([]int, size, size)
rand.Seed(time.Now().UnixNano())
for i := 0; i < size; i++ {
slice[i] = rand.Intn(999) - rand.Intn(999)
}
return slice
}
func mergeSort(items []int) []int {
var num = len(items)
if num == 1 {
return items
}
middle := int(num / 2)
var (
left = make([]int, middle)
right = make([]int, num-middle)
)
for i := 0; i < num; i++ {
if i < middle {
left[i] = items[i]
} else {
right[i-middle] = items[i]
}
}
return merge(mergeSort(left), mergeSort(right))
}
func merge(left, right []int) (result []int) {
result = make([]int, len(left) + len(right))
i := 0
for len(left) > 0 && len(right) > 0 {
if left[0] < right[0] {
result[i] = left[0]
left = left[1:]
} else {
result[i] = right[0]
right = right[1:]
}
i++
}
for j := 0; j < len(left); j++ {
result[i] = left[j]
i++
}
for j := 0; j < len(right); j++ {
result[i] = right[j]
i++
}
return
}
|
# generate music using a particular model
from model import SampleRNN, Predictor, Generator
import os, sys, time
import torch
from librosa.output import write_wav
# edit the modeldir and other variables below
modeldir = "colab_results/"
modelname = "ep1-it625"
audio_pref = "r001_{}"
save_raw = False
n_samples = 1
sample_length = 800
sample_rate = 16000
samples_path = os.path.join(modeldir, "gen_samples")
os.makedirs(samples_path, exist_ok=True)
# sys.stderr.write("available models are: {}".format(listdir(modeldir)))
modelpath = os.path.join(modeldir, modelname)
srnn_model1 = SampleRNN(frame_sizes=[4, 16], n_rnn=2, dim=1024, learn_h0=True,
q_levels=256, weight_norm=True)
if torch.cuda.is_available():
srnn_model1 = srnn_model1.cuda()
predictor1 = Predictor(srnn_model1)
if torch.cuda.is_available():
predictor1 = predictor1.cuda()
if torch.cuda.is_available():
predictor1.load_state_dict(torch.load(modelpath)['model'])
else:
predictor1.load_state_dict(torch.load(modelpath, map_location='cpu')['model'])
print("model loaded successfully!")
generate = Generator(srnn_model1, True)
import time
s_time = time.time()
sys.stderr.write("Generating {} sequences, each of length {}."\
.format(n_samples, sample_length))
samples = generate(n_samples, sample_length).cpu().float().numpy()
sys.stderr.write("Total time taken = {}".format(time.time() - s_time))
for i in range(n_samples):
if save_raw:
samples.tofile('debug_seq_{}.csv'.format(i),
sep=',', format='%10.5f')
write_wav(
os.path.join(
samples_path, audio_pref.format(i + 1)
),
samples[i, :], sr=sample_rate, norm=True
)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.medkit = void 0;
var medkit = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M472.2,144H352v-30.7C351,85.1,330.3,64,300.8,64H256h-44.8c-29.4,0-50.2,21.1-51.2,49.3V144H39.8c-4.4,0-7.8,3.6-7.8,8\r\n\t\tv288c0,4.4,3.3,8,7.8,8h432.5c4.4,0,7.8-3.6,7.8-8V152C480,147.6,476.7,144,472.2,144z M192,117.2c0-0.4,0-0.7,0-1s0-0.6,0-1\r\n\t\tc0-9.7,8.6-19.3,18.8-19.3H256h45.2c10.1,0,18.8,9.5,18.8,19.3c0,0.3,0,0.6,0,1s0,0.6,0,1V144H192V117.2z M352,320h-64v64h-64v-64\r\n\t\th-64v-64h64v-64h64v64h64V320z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M472.2,144H352v-30.7C351,85.1,330.3,64,300.8,64H256h-44.8c-29.4,0-50.2,21.1-51.2,49.3V144H39.8c-4.4,0-7.8,3.6-7.8,8\r\n\t\tv288c0,4.4,3.3,8,7.8,8h432.5c4.4,0,7.8-3.6,7.8-8V152C480,147.6,476.7,144,472.2,144z M192,117.2c0-0.4,0-0.7,0-1s0-0.6,0-1\r\n\t\tc0-9.7,8.6-19.3,18.8-19.3H256h45.2c10.1,0,18.8,9.5,18.8,19.3c0,0.3,0,0.6,0,1s0,0.6,0,1V144H192V117.2z M352,320h-64v64h-64v-64\r\n\t\th-64v-64h64v-64h64v64h64V320z"
},
"children": []
}]
}]
}]
};
exports.medkit = medkit;
|
#!/bin/bash
set -ex
dockerize -template /etc/cadence/config/config_template.yaml:/etc/cadence/config/docker.yaml
exec cadence-server --root $CADENCE_HOME --env docker start --services=$SERVICES
|
import {Component, Input, AfterViewInit, EventEmitter, Output} from '@angular/core';
import {AccessGroup} from '../../index';
import {BsModalService} from 'ngx-bootstrap';
import {BsModalRef } from 'ngx-bootstrap/modal/bs-modal-ref.service';
import {ApiService} from '../../services/api/api.service';
@Component({
selector: 'app-group-edit',
templateUrl: './group-edit.component.html',
providers: [BsModalService]
})
export class GroupEditComponent {
group: AccessGroup = new AccessGroup({});
isNew = true;
@Input() set groupId(groupId: string){
this.isNew = false;
this.apiService.getGroup(groupId).subscribe((g: AccessGroup) => this.group = g);
}
@Output() onSubmit: EventEmitter<string> = new EventEmitter<string>();
constructor(public bsModalRef: BsModalRef, private apiService: ApiService) { }
ok() {
this.apiService.saveGroup(this.group, this.isNew).subscribe((group: AccessGroup) => {
this.onSubmit.emit(group.id);
this.bsModalRef.hide();
});
}
}
|
#!/usr/bin/env bash
# @(#) Install UNetbootin
# Created: 2019/11/28 16:01:00.
# Last Change: 2019/11/28 16:06:31.
set -ueo pipefail
export LC_ALL=C
for f in ~/dotfiles/function/*.sh
do
source ${f}
done
readonly PROCESS="install UNetbootin"
gm_echo ">> Check ${PROCESS} or not"
if ! has "unetbootin"; then
ym_echo ">> Update software information and ${PROCESS}"
if has "apt"; then
sudo add-apt-repository ppa:gezakovacs/ppa
sudo apt update
sudo apt install unebootin
elif has "yum"; then
bash ~/dotfiles/etc/init/linux/settings/update_yum.sh
fi
result_echo $? "${PROCESS}"
else
gm_echo ">> Already ${PROCESS}"
fi
|
#include <stdio.h>
int main()
{
int N = 3;
char ch = 'a';
for (int i = 0; i < N; i++)
{
printf("%c ", ch);
ch++;
}
return 0;
}
Output: a b c
|
import openpyxl
def createSpreadsheet(name):
# Create an Excel workbook
workbook = openpyxl.Workbook()
# Set up the first sheet with the list of expenses
expenseSheet = workbook.active
expenseSheet.title = 'Expenses'
expenseSheet.cell(row=1, column=1).value = 'Item'
expenseSheet.cell(row=1, column=2).value = 'Amount'
# Set up the second sheet with the chart of expenses
chartSheet = workbook.create_sheet('Chart')
chartSheet.add_data(expenseSheet['A2':'B4'],
from_rows=True,
titles_from_data=True)
chartSheet.add_chart(openpyxl.chart.BarChart(), 'C1')
# Save the workbook
workbook.save(name + '.xlsx')
|
#include "AssetLoader.hpp"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.