text stringlengths 1 1.05M |
|---|
#!/bin/bash
set -e
./configure --prefix=/usr --host=$LFS_TGT
make && make DESTDIR=$LFS install
|
<filename>2-resources/3-misc/Beginner/mergeArrays/test.js
const mergeArrays = require('./index-START');
test('mergeArrays is a function', () => {
expect(typeof mergeArrays).toEqual('function');
});
test('Combines 5 arrays of numbers without dubplicates', () => {
expect(mergeArrays([1,2],[2,3],[3,4],[4,5])).toEqual([1,2,3,4,5]);
});
test('Combines 3 arrays of strings without dubplicates', () => {
expect(mergeArrays(['a','b','z'],['m','n','a'],['z','y'])).toEqual(['a','b', 'z', 'm', 'n', 'y']);
});
|
$ v run directories.v
Listing subdir/parent
child true
file2 false
file3 false
Listing subdir/parent/child
file4 false
Visiting subdir
subdir true
subdir/file1 false
subdir/parent true
subdir/parent/child true
subdir/parent/child/file4 false
subdir/parent/file2 false
subdir/parent/file3 false
|
export default function () {
return `
<svg class="icon-contract svg-icon" fill="none" height="13" viewBox="0 0 17 13" width="17">
<path d="M17 3.33337V12C17 12.5523 16.5523 13 16 13H1C0.447715 13 0 12.5523 0 12V2.33337V1C0 0.447715 0.447715 0 1 0H6.54006C6.83199 0 7.10934 0.127565 7.29932 0.349215L9 2.33337H16C16.5523 2.33337 17 2.78109 17 3.33337Z"
fill="#58595B"/>
</svg>
`;
}; |
<filename>WebContent/resources/js/script.js
Event.waitUntilLoaded(function() {
$('time').innerHTML = new Date().toTimeString();
}); |
<gh_stars>0
require 'test_helper'
class DLocalTest < Test::Unit::TestCase
include CommStub
def setup
@gateway = DLocalGateway.new(login: 'login', trans_key: 'password', secret_key: '<KEY>')
@credit_card = credit_card
@wallet_token = wallet_token
@psp_tokenized_card = psp_tokenized_card('CV-993903e4-0b33-48fd-8d9b-99fd6c3f0d1a')
@amount = 100
@options = {
order_id: '1',
billing_address: address
}
end
def test_successful_purchase
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'D-15104-05b0ec0c-5a1e-470a-b342-eb5f20758ef7', response.authorization
assert response.test?
end
def test_failed_purchase
@gateway.expects(:ssl_post).returns(failed_purchase_response)
response = @gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert_equal '300', response.error_code
end
def test_successful_offsite_payment_initiation
@gateway.expects(:ssl_post).returns(successful_offsite_payment_response)
response = @gateway.initiate(@amount, @wallet_token, @options)
assert_success response
assert_equal 'D-15104-c3027e67-21f8-4308-8c94-06c44ffcea67', response.authorization
assert_match 'The payment is pending', response.message
assert response.test?
end
def test_failed_offsite_payment_initiation
@gateway.expects(:ssl_post).returns(failed_offsite_payment_response)
response = @gateway.initiate(@amount, @wallet_token, @options)
assert_failure response
assert_match 'Invalid request', response.message
assert response.test?
end
def test_successful_card_save
@gateway.expects(:ssl_post).times(2).returns(successful_authorize_response, successful_void_response)
response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
assert_equal 'CV-ecd897ac-5361-45a1-a407-aaab044ce87e', response.primary_response.params['card']['card_id']
assert response.test?
end
def test_failed_verify_during_card_save
@gateway.expects(:ssl_post).returns(failed_authorize_response)
response = @gateway.verify(@credit_card, @options)
assert_failure response
assert_equal '309', response.error_code
end
def test_failed_void_during_card_save_and_verification
@gateway.expects(:ssl_request).times(2).returns(successful_authorize_response, failed_void_response)
response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
assert_equal 'CV-ecd897ac-5361-45a1-a407-aaab044ce87e', response.primary_response.params['card']['card_id']
assert response.test?
end
def test_successful_purchase_using_token
@gateway.expects(:ssl_post).returns(successful_purchase_response)
response = @gateway.purchase(@amount, @psp_tokenized_card, @options)
assert_success response
assert_equal 'D-15104-05b0ec0c-5a1e-470a-b342-eb5f20758ef7', response.authorization
assert response.test?
end
def test_failed_purchase_using_token
@gateway.expects(:ssl_post).returns(failed_purchase_response)
response = @gateway.purchase(@amount, @psp_tokenized_card, @options)
assert_failure response
assert_equal '300', response.error_code
end
def test_purchase_with_installments
installments = '6'
installments_id = 'INS54434'
stub_comms do
@gateway.purchase(@amount, @credit_card, @options.merge(installments: installments, installments_id: installments_id))
end.check_request do |_endpoint, data, _headers|
assert_equal installments, JSON.parse(data)['card']['installments']
assert_equal installments_id, JSON.parse(data)['card']['installments_id']
end.respond_with(successful_purchase_response)
end
def test_successful_authorize
@gateway.expects(:ssl_post).returns(successful_authorize_response)
response = @gateway.authorize(@amount, @credit_card, @options)
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
end
def test_successful_authorize_without_address
@gateway.expects(:ssl_post).returns(successful_authorize_response)
response = @gateway.authorize(@amount, @credit_card, @options.delete(:billing_address))
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
end
def test_passing_billing_address
stub_comms(@gateway, :ssl_request) do
@gateway.authorize(@amount, @credit_card, @options)
end.check_request do |_method, _endpoint, data, _headers|
assert_match(/"state\":\"ON\"/, data)
assert_match(/"city\":\"Ottawa\"/, data)
assert_match(/"zip_code\":\"K1C2N6\"/, data)
assert_match(/"street\":\"My Street\"/, data)
assert_match(/"number\":\"456\"/, data)
end.respond_with(successful_authorize_response)
end
def test_passing_incomplete_billing_address
stub_comms(@gateway, :ssl_request) do
@gateway.authorize(@amount, @credit_card, @options.merge(billing_address: address(address1: 'Just a Street')))
end.check_request do |_method, _endpoint, data, _headers|
assert_match(/"state\":\"ON\"/, data)
assert_match(/"city\":\"Ottawa\"/, data)
assert_match(/"zip_code\":\"K1C2N6\"/, data)
assert_match(/"street\":\"Just a Street\"/, data)
end.respond_with(successful_authorize_response)
end
def test_passing_nil_address_1
stub_comms(@gateway, :ssl_request) do
@gateway.authorize(@amount, @credit_card, @options.merge(billing_address: address(address1: nil)))
end.check_request do |_method, _endpoint, data, _headers|
refute_match(/"street\"/, data)
end.respond_with(successful_authorize_response)
end
def test_passing_country_as_string
stub_comms(@gateway, :ssl_request) do
@gateway.authorize(@amount, @credit_card, @options)
end.check_request do |_method, _endpoint, data, _headers|
assert_match(/"country\":\"CA\"/, data)
end.respond_with(successful_authorize_response)
end
def test_invalid_country
stub_comms(@gateway, :ssl_request) do
@gateway.authorize(@amount, @credit_card, @options.merge(billing_address: address(country: 'INVALID')))
end.check_request do |_method, _endpoint, data, _headers|
assert_match(/\"country\":null/, data)
end.respond_with(successful_authorize_response)
end
def test_failed_authorize
@gateway.expects(:ssl_post).returns(failed_authorize_response)
response = @gateway.authorize(@amount, @credit_card, @options)
assert_failure response
assert_equal '309', response.error_code
end
def test_successful_capture
@gateway.expects(:ssl_post).returns(successful_capture_response)
response = @gateway.capture(@amount, 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_success response
assert_equal 'D-15104-5a914b68-afb8-44f8-a849-8cf09ab6c246', response.authorization
end
def test_failed_capture
@gateway.expects(:ssl_post).returns(failed_capture_response)
response = @gateway.capture(@amount, 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_failure response
assert_equal '4000', response.error_code
end
def test_successful_refund
@gateway.expects(:ssl_post).returns(successful_refund_response)
response = @gateway.refund(@amount, 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_success response
assert_equal 'REF-15104-a9cc29e5-1895-4cec-94bd-aa16c3b92570', response.authorization
end
def test_pending_refund
@gateway.expects(:ssl_post).returns(pending_refund_response)
response = @gateway.refund(@amount, 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_success response
assert_equal 'REF-15104-a9cc29e5-1895-4cec-94bd-aa16c3b92570', response.authorization
end
def test_failed_refund
@gateway.expects(:ssl_post).returns(failed_refund_response)
response = @gateway.refund(@amount, 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_failure response
assert_equal '5007', response.error_code
end
def test_successful_void
@gateway.expects(:ssl_post).returns(successful_void_response)
response = @gateway.void('D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_success response
assert_equal 'D-15104-c147279d-14ab-4537-8ba6-e3e1cde0f8d2', response.authorization
end
def test_failed_void
@gateway.expects(:ssl_post).returns(failed_void_response)
response = @gateway.void('D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', @options)
assert_failure response
assert_equal '5002', response.error_code
end
def test_successful_verify_credentials
@gateway.expects(:ssl_get).returns(successful_verify_credentials_response)
response = @gateway.verify_credentials()
assert_success response
end
def test_failed_verify_credentials
@gateway.expects(:ssl_get).returns(failed_verify_credentials_response)
response = @gateway.verify_credentials()
assert_failure response
assert_equal '3001', response.error_code
end
def test_successful_verify
@gateway.expects(:ssl_request).times(2).returns(successful_authorize_response, successful_void_response)
response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
end
def test_successful_verify_with_failed_void
@gateway.expects(:ssl_request).times(2).returns(successful_authorize_response, failed_void_response)
response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3', response.authorization
end
def test_failed_verify
@gateway.expects(:ssl_post).returns(failed_authorize_response)
response = @gateway.verify(@credit_card, @options)
assert_failure response
assert_equal '309', response.error_code
end
def test_scrub
assert @gateway.supports_scrubbing?
assert_equal @gateway.scrub(pre_scrubbed), post_scrubbed
end
private
def pre_scrubbed
%q(
<- "POST /secure_payments/ HTTP/1.1\r\nContent-Type: application/json\r\nX-Date: 2018-12-04T18:24:21Z\r\nX-Login: aeaf9bbfa1\r\nX-Trans-Key: 9de3769b7e\r\nAuthorization: V2-HMAC-SHA256, Signature: d58d0e87a59af50ff974dfeea176c067354682aa74a8ac115912576d4214a776\r\nConnection: close\r\nAccept-Encoding: gzip;q=1.0,deflate;q=0.6,identity;q=0.3\r\nAccept: */*\r\nUser-Agent: Ruby\r\nHost: sandbox.dlocal.com\r\nContent-Length: 441\r\n\r\n"
<- "{\"amount\":\"1.00\",\"currency\":\"BRL\",\"payment_method_id\":\"CARD\",\"payment_method_type\":\"CARD\",\"payment_method_flow\":\"DIRECT\",\"country\":\"BR\",\"payer\":{\"name\":\"<NAME>\",\"phone\":\"(555)555-5555\",\"document\":\"42243309114\",\"address\":null},\"card\":{\"holder_name\":\"<NAME>\",\"expiration_month\":9,\"expiration_year\":2019,\"number\":\"4111111111111111\",\"cvv\":\"123\",\"capture\":true},\"order_id\":\"62595c5db10fdf7b5d5bb3a16d130992\",\"description\":\"200\"}"
-> "HTTP/1.1 200 OK\r\n"
-> "Server: Reblaze Secure Web Gateway\r\n"
-> "Date: Tue, 04 Dec 2018 18:24:22 GMT\r\n"
-> "Content-Type: application/json;charset=utf-8\r\n"
-> "Content-Length: 565\r\n"
-> "Strict-Transport-Security: max-age=31536000; includeSubDomains\r\n"
-> "Via: 1.1 google\r\n"
-> "Alt-Svc: clear\r\n"
-> "Connection: close\r\n"
-> "\r\n"
reading 565 bytes...
-> "{\"id\":\"D-15104-9f5246d5-34e2-4f63-9d29-380ab1567ec9\",\"amount\":1.00,\"currency\":\"BRL\",\"payment_method_id\":\"CARD\",\"payment_method_type\":\"CARD\",\"payment_method_flow\":\"DIRECT\",\"country\":\"BR\",\"card\":{\"holder_name\":\"<NAME>\",\"expiration_month\":9,\"expiration_year\":2019,\"brand\":\"VI\",\"last4\":\"1111\",\"card_id\":\"CV-434cb5d1-aece-4878-8ce2-24f887fc7ff5\"},\"created_date\":\"2018-12-04T18:24:21.000+0000\",\"approved_date\":\"2018-12-04T18:24:22.000+0000\",\"status\":\"PAID\",\"status_detail\":\"The payment was paid\",\"status_code\":\"200\",\"order_id\":\"62595c5db10fdf7b5d5bb3a16d130992\"}"
)
end
def post_scrubbed
%q(
<- "POST /secure_payments/ HTTP/1.1\r\nContent-Type: application/json\r\nX-Date: 2018-12-04T18:24:21Z\r\nX-Login: aeaf9bbfa1\r\nX-Trans-Key: [FILTERED]\r\nAuthorization: V2-HMAC-SHA256, Signature: d58d0e87a59af50ff974dfeea176c067354682aa74a8ac115912576d4214a776\r\nConnection: close\r\nAccept-Encoding: gzip;q=1.0,deflate;q=0.6,identity;q=0.3\r\nAccept: */*\r\nUser-Agent: Ruby\r\nHost: sandbox.dlocal.com\r\nContent-Length: 441\r\n\r\n"
<- "{\"amount\":\"1.00\",\"currency\":\"BRL\",\"payment_method_id\":\"CARD\",\"payment_method_type\":\"CARD\",\"payment_method_flow\":\"DIRECT\",\"country\":\"BR\",\"payer\":{\"name\":\"<NAME>\",\"phone\":\"(555)555-5555\",\"document\":\"42243309114\",\"address\":null},\"card\":{\"holder_name\":\"<NAME>\",\"expiration_month\":9,\"expiration_year\":2019,\"number\":\"[FILTERED]\",\"cvv\":\"[FILTERED]\",\"capture\":true},\"order_id\":\"62595c5db10fdf7b5d5bb3a16d130992\",\"description\":\"200\"}"
-> "HTTP/1.1 200 OK\r\n"
-> "Server: Reblaze Secure Web Gateway\r\n"
-> "Date: Tue, 04 Dec 2018 18:24:22 GMT\r\n"
-> "Content-Type: application/json;charset=utf-8\r\n"
-> "Content-Length: 565\r\n"
-> "Strict-Transport-Security: max-age=31536000; includeSubDomains\r\n"
-> "Via: 1.1 google\r\n"
-> "Alt-Svc: clear\r\n"
-> "Connection: close\r\n"
-> "\r\n"
reading 565 bytes...
-> "{\"id\":\"D-15104-9f5246d5-34e2-4f63-9d29-380ab1567ec9\",\"amount\":1.00,\"currency\":\"BRL\",\"payment_method_id\":\"CARD\",\"payment_method_type\":\"CARD\",\"payment_method_flow\":\"DIRECT\",\"country\":\"BR\",\"card\":{\"holder_name\":\"<NAME>\",\"expiration_month\":9,\"expiration_year\":2019,\"brand\":\"VI\",\"last4\":\"1111\",\"card_id\":\"CV-434cb5d1-aece-4878-8ce2-24f887fc7ff5\"},\"created_date\":\"2018-12-04T18:24:21.000+0000\",\"approved_date\":\"2018-12-04T18:24:22.000+0000\",\"status\":\"PAID\",\"status_detail\":\"The payment was paid\",\"status_code\":\"200\",\"order_id\":\"62595c5db10fdf7b5d5bb3a16d130992\"}"
)
end
def successful_purchase_response
'{"id":"D-15104-05b0ec0c-5a1e-470a-b342-eb5f20758ef7","amount":1.00,"currency":"BRL","payment_method_id":"CARD","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","card":{"holder_name":"<NAME>","expiration_month":9,"expiration_year":2019,"brand":"VI","last4":"1111","card_id":"CV-993903e4-0b33-48fd-8d9b-99fd6c3f0d1a"},"created_date":"2018-12-06T20:20:41.000+0000","approved_date":"2018-12-06T20:20:42.000+0000","status":"PAID","status_detail":"The payment was paid","status_code":"200","order_id":"15940ef43d39331bc64f31341f8ccd93"}'
end
def successful_offsite_payment_response
'{"id":"D-15104-c3027e67-21f8-4308-8c94-06c44ffcea67","amount":10.0,"currency":"INR","payment_method_id":"PW","payment_method_type":"BANK_TRANSFER","payment_method_flow":"REDIRECT","country":"IN","created_date":"2021-08-19T06:42:57.000+0000","status":"PENDING","status_detail":"The payment is pending.","status_code":"100","order_id":"758c4ddf04ab6db119ec93aee2b7f64c","description":"","notification_url":"https://harish.local.inai-dev.com/notify","redirect_url":"https://sandbox.dlocal.com/collect/pay/pay/M-898eae4f-4e04-496e-ac4e-0dfc298cfae5?xtid=CATH-ST-1629355377-1016569328"}'
end
def failed_offsite_payment_response
'{"code":5001,"message":"Invalid request"}'
end
def successful_purchase_with_installments_response
'{"id":"D-4-e2227981-8ec8-48fd-8e9a-19fedb08d73a","amount":1000,"currency":"BRL","payment_method_id":"CARD","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","card":{"holder_name":"<NAME>","expiration_month":10,"expiration_year":2040,"brand":"VI","last4":"1111"},"created_date":"2019-02-06T21:04:43.000+0000","approved_date":"2019-02-06T21:04:44.000+0000","status":"PAID","status_detail":"The payment was paid.","status_code":"200","order_id":"657434343","notification_url":"http://merchant.com/notifications"}'
end
def failed_purchase_response
'{"id":"D-15104-c3027e67-21f8-4308-8c94-06c44ffcea67","amount":1.00,"currency":"BRL","payment_method_id":"CARD","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","card":{"holder_name":"<NAME>","expiration_month":9,"expiration_year":2019,"brand":"VI","last4":"1111","card_id":"CV-529b0bb1-8b8a-42f4-b5e4-d358ffb2c978"},"created_date":"2018-12-06T20:22:40.000+0000","status":"REJECTED","status_detail":"The payment was rejected.","status_code":"300","order_id":"7aa5cd3200f287fbac51dcee32184260"}'
end
def successful_authorize_response
'{"id":"D-15104-be03e883-3e6b-497d-840e-54c8b6209bc3","amount":1.00,"currency":"BRL","payment_method_id":"CARD","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","card":{"holder_name":"<NAME>","expiration_month":9,"expiration_year":2019,"brand":"VI","last4":"1111","card_id":"CV-ecd897ac-5361-45a1-a407-aaab044ce87e"},"created_date":"2018-12-06T20:24:46.000+0000","approved_date":"2018-12-06T20:24:46.000+0000","status":"AUTHORIZED","status_detail":"The payment was authorized","status_code":"600","order_id":"5694b51b79df484578158d7790b4aacf"}'
end
def failed_authorize_response
'{"id":"D-15104-e6ed3df3-1380-46c6-92d4-29f0f567f799","amount":1.00,"currency":"BRL","payment_method_id":"CARD","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","card":{"holder_name":"<NAME>","expiration_month":9,"expiration_year":2019,"brand":"VI","last4":"1111","card_id":"CV-a6326a1d-b706-4e89-9dff-091d73d85b26"},"created_date":"2018-12-06T20:26:57.000+0000","status":"REJECTED","status_detail":"Card expired.","status_code":"309","order_id":"8ecd3101ba7a9a2d6ccb6465d33ff10d"}'
end
def successful_capture_response
'{"id":"D-15104-5a914b68-afb8-44f8-a849-8cf09ab6c246","amount":1.00,"currency":"BRL","payment_method_id":"VI","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","created_date":"2018-12-06T20:26:17.000+0000","approved_date":"2018-12-06T20:26:18.000+0000","status":"PAID","status_detail":"The payment was paid","status_code":"200","order_id":"f8276e468120faf3e7252e33ac5f9a73"}'
end
def failed_capture_response
'{"code":4000,"message":"Payment not found"}'
end
def successful_refund_response
'{"id":"REF-15104-a9cc29e5-1895-4cec-94bd-aa16c3b92570","payment_id":"D-15104-f9e16b85-5fc8-40f0-a4d8-4e73a892594f","status":"SUCCESS","currency":"BRL","created_date":"2018-12-06T20:28:37.000+0000","amount":1.00,"status_code":200,"status_detail":"The refund was paid","notification_url":"http://example.com","amount_refunded":1.00,"id_payment":"D-15104-f9e16b85-5fc8-40f0-a4d8-4e73a892594f"}'
end
# I can't invoke a pending response and there is no example in docs, so this response is speculative
def pending_refund_response
'{"id":"REF-15104-a9cc29e5-1895-4cec-94bd-aa16c3b92570","payment_id":"D-15104-f9e16b85-5fc8-40f0-a4d8-4e73a892594f","status":"PENDING","currency":"BRL","created_date":"2018-12-06T20:28:37.000+0000","amount":1.00,"status_code":100,"status_detail":"The refund is pending","notification_url":"http://example.com","amount_refunded":1.00,"id_payment":"D-15104-f9e16b85-5fc8-40f0-a4d8-4e73a892594f"}'
end
def failed_refund_response
'{"code":5007,"message":"Amount exceeded"}'
end
def successful_void_response
'{"id":"D-15104-c147279d-14ab-4537-8ba6-e3e1cde0f8d2","amount":1.00,"currency":"BRL","payment_method_id":"VI","payment_method_type":"CARD","payment_method_flow":"DIRECT","country":"BR","created_date":"2018-12-06T20:38:01.000+0000","approved_date":"2018-12-06T20:38:01.000+0000","status":"CANCELLED","status_detail":"The payment was cancelled","status_code":"400","order_id":"46d8978863be935d892cfa3e992f65f3"}'
end
def failed_void_response
'{"code":5002,"message":"Invalid transaction status"}'
end
def successful_verify_credentials_response
'[{"id": "OX", "type": "TICKET", "name": "Oxxo", "logo": "https://pay.dlocal.com/views/2.0/images/payments/OX.png", "allowed_flows": ["REDIRECT"]}, {"id": "VI", "type": "CARD", "name": "Visa", "logo": "https://pay.dlocal.com/views/2.0/images/payments/VI.png", "allowed_flows": ["DIRECT", "REDIRECT"]}]'
end
def failed_verify_credentials_response
'{"code": "3001", "message": "Invalid credentials"}'
end
end
|
<reponame>mikitamironenka/job4j<filename>chapter_006_IO/src/test/java/ru/job4j/io/testtask/MainSearchTest.java<gh_stars>0
package ru.job4j.io.testtask;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.util.List;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
public class MainSearchTest {
private static String source;
private static String out;
private static File sourceFile;
private static File outPut;
private static String sourcePath;
private static String outPutPath;
@BeforeClass
public static void init() {
source = "src/test/java/ru/job4j/io/tmpdir";
out = "src/test/java/ru/job4j/io/tmpdir/log.txt";
sourceFile = new File(source);
outPut = new File(out);
sourcePath = sourceFile.getAbsolutePath();
outPutPath = outPut.getAbsolutePath();
}
@Test
public void whenSearchWithFileName() {
String[] arguments = new String[]{"-d", sourcePath, "-n", "file.txt", "-f", "-o", outPutPath};
List<String> result = new MainSearch(new Args(arguments)).init();
assertThat(result.get(0), is("file.txt"));
}
@Test
public void whenSearchWithFileMask() {
String[] arguments = new String[]{"-d", sourcePath, "-n", "*.txt", "-m", "-o", outPutPath};
List<String> result = new MainSearch(new Args(arguments)).init();
assertThat(result.get(1), is("log.txt"));
}
} |
#!/bin/sh
# Run the script as "source ./filename.sh" as the shell script needs to run with reference to the terminal's current environment
# You can use any API. In this case, I have used ipgeolocation.io's API
## Note - While using an API of choice, please make sure to adhere to the API's terms of service and privacy policy
export API_URL='https://api.ipgeolocation.io/ipgeo?apiKey='
# API Key only needed if API being used requires an API key
export API_KEY='API_KEY'
# IP Address of MongoDB
export IP_ADDRESS='localhost' # default
# PORT MongoDB is hosted on
export PORT='27017' # default
# Database Name
export DB_NAME='locationDetails'
# Device's Name stored as the collection's name
export COLLECTION_NAME='DEVICE_NAME' |
#!/usr/bin/env bash
curPath=$(pwd)
for dir in $(ls); do
if [ -d ${curPath}/${dir} ]; then
eval "unzip dummy_data_copy.zip"
eval "mv dummy_data/cfq/splits/data.json dummy_data/cfq/splits/${dir}.json"
eval "zip -r dummy_data.zip dummy_data"
eval "cp dummy_data.zip ${curPath}/${dir}/1.0.1/dummy_data.zip"
eval "rm dummy_data.zip"
eval "rm -r dummy_data"
fi
done
|
<filename>.solcover.js
module.exports = {
skipFiles: ["Migrations.sol"],
// need for dependencies
copyNodeModules: true,
copyPackages: ["zeppelin-solidity"],
dir: ".",
norpc: false,
};
|
<reponame>atomist/yaml-updater
/*
* Copyright © 2019 Atomist, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as deepEqual from "fast-deep-equal";
import * as yaml from "js-yaml";
export interface Options {
keepArrayIndent?: boolean;
updateAll?: boolean;
}
/**
* Parse the provides update string as JSON and use the keys in the
* provided object to update, insert, or delete keys in the provided
* YAML.
*
* @param updates string of updates
* @param currentYaml YAML document to update
* @param options format settings
* @return updated YAML document as a string
*/
export function updateYamlDocumentWithString(
updatesString: string,
currentYaml: string,
options: Options = { keepArrayIndent: false },
): string {
let updates = {};
try {
updates = JSON.parse(updatesString);
} catch (e) {
throw new Error(`failed to parse update JSON '${updatesString}': ${(e as Error).message}`);
}
return updateYamlDocument(updates, currentYaml, options);
}
/**
* Use the keys in the provided object to update, insert, or delete
* keys in the provided YAML. The YAML can be multiple documents.
*
* The updating follows two possible strategies depending on the ̀updateAll`
* option. When `false`, the default, if the keys are found in any of the
* documents, they are updated in the first document the key exists in. If a key
* is not found in any document, it is added to the first document. When
* `̀updateAll` is `true`, the updates append on all documents.
*
* @param updates object of updates
* @param currentYaml YAML document to update
* @param options format settings
* @return updated YAML document as a string
*/
export function updateYamlDocuments(
updates: { [key: string]: any },
yamlDocuments: string,
options: Options = { keepArrayIndent: false, updateAll: false },
): string {
const yamlSepRegExp = /^(---(?:[ \t]+.*)?\n)/m;
const yamlDocs = yamlDocuments.split(yamlSepRegExp);
const insertIndex = (/\S/.test(yamlDocs[0]) || yamlDocs.length === 1) ? 0 : 2;
const updateAll = options.updateAll || false;
for (const k of Object.keys(updates)) {
const v = updates[k];
let found = -1;
for (let i = 0; i < yamlDocs.length; i += 2) {
let current: any;
try {
current = yaml.safeLoad(yamlDocs[i]);
} catch (e) {
throw new Error(`failed to parse YAML document '${yamlDocs[i]}': ${(e as Error).message}`);
}
if (!current) {
continue;
}
if (updateAll) {
yamlDocs[i] = updateYamlKey(k, v, yamlDocs[i], options);
} else {
if (k in current) {
found = i;
break;
}
}
}
if (!updateAll) {
const index = (found < 0) ? insertIndex : found;
yamlDocs[index] = updateYamlKey(k, v, yamlDocs[index], options);
}
}
return yamlDocs.join("");
}
/**
* Use the keys in the provided object to update, insert, or delete
* keys in the provided YAML document.
*
* @param updates object of updates
* @param currentYaml YAML document to update
* @param options format settings
* @return updated YAML document as a string
*/
export function updateYamlDocument(
updates: { [key: string]: any },
currentYaml: string,
options: Options = { keepArrayIndent: false },
): string {
let updatedYaml = currentYaml;
for (const k of Object.keys(updates)) {
const v = updates[k];
updatedYaml = updateYamlKey(k, v, updatedYaml, options);
}
return updatedYaml;
}
/**
* Update, insert, or delete the value of a key in `currentYml`, a
* string containing valid YAML. It does its best to retain the same
* formatting, but empty lines and trailing whitespace may disappear
* if adjacent lines are edited and comments that are parsed as part
* of a value that is deleted will be deleted.
*
* @param key the key whose value should be replaced
* @param value the value to set the key to, set to `null` or `undefined` to remove the key
* @param options settings for the formatting
* @return updated YAML document as a string
*/
export function updateYamlKey(
key: string,
value: any,
currentYaml: string,
options: Options = { keepArrayIndent: false },
): string {
// match index 01 2
const keyValRegExp = new RegExp(`(^|\\n)${key}[^\\S\\n]*:(?:[^\\S\\n]*?\\n)?([\\s\\S]*?(?:\\n(?![\\n\\- #])|$))`);
let updatedYaml = (/\n$/.test(currentYaml)) ? currentYaml : currentYaml + "\n";
let current: any;
try {
current = yaml.safeLoad(updatedYaml);
} catch (e) {
throw new Error(`failed to parse current YAML '${updatedYaml}': ${(e as Error).message}`);
}
if (!current) {
updatedYaml = (updatedYaml === "\n") ? "" : updatedYaml;
updatedYaml += formatYamlKey(key, value, options);
return updatedYaml;
}
if (value === null || value === undefined) {
if (key in current) {
updatedYaml = updatedYaml.replace(keyValRegExp, "$1");
}
} else if (knownType(value)) {
if (key in current) {
if (deepEqual(current[key], value)) {
return currentYaml;
} else if (simpleType(value) || simpleType(current[key])) {
const newKeyValue = formatYamlKey(key, value, options);
updatedYaml = updatedYaml.replace(keyValRegExp, `\$1${newKeyValue}`);
} else if (typeof current[key] === "object") {
const keyMatches = keyValRegExp.exec(updatedYaml);
if (!keyMatches) {
throw new Error(`failed to match key ${key} in current YAML: ${updatedYaml}`);
}
const keyObject = keyMatches[2];
// find first properly indented line
const indentationRegExp = /^( +)[^\-# ]/m;
const indentMatches = indentationRegExp.exec(keyObject);
if (!indentMatches) {
throw new Error(`failed to match indentation for elements of key ${key}: ${keyObject}`);
}
const indentLevel = indentMatches[1];
const indentRegex = new RegExp(`^${indentLevel}`);
const lines = keyObject.split("\n");
const indentation: YamlLine[] = [];
const undentedLines = lines.map(l => {
indentation.push(new YamlLine(l, indentRegex.test(l)));
return l.replace(indentRegex, "");
});
let currentValueYaml = undentedLines.join("\n");
for (const k of Object.keys(value)) {
const v = value[k];
currentValueYaml = updateYamlKey(k, v, currentValueYaml, options);
}
const currentLines = currentValueYaml.split("\n");
let nextToMatch = 0;
const indentedLines = currentLines.map(l => {
for (let j = nextToMatch; j < indentation.length; j++) {
if (l.trim() === indentation[j].content.trim()) {
nextToMatch = j + 1;
if (indentation[j].indented) {
return indentLevel + l;
} else {
return l;
}
}
}
return (/\S/.test(l)) ? indentLevel + l : l;
});
const indentedYaml = indentedLines.join("\n");
// last line must be empty but indentation matching may erroneously indent the last
// empty line if there were indented empty lines after a deleted key
const trailerYaml = (/\n$/.test(indentedYaml)) ? indentedYaml : indentedYaml + "\n";
updatedYaml = updatedYaml.replace(keyValRegExp, `\$1${key}:\n${trailerYaml}`);
} else {
throw new Error(`cannot update current YAML key ${key} of type ${typeof current[key]}`);
}
} else {
const tailMatches = /\n(\n*)$/.exec(updatedYaml);
const tail = (tailMatches && tailMatches[1]) ? tailMatches[1] : "";
updatedYaml = updatedYaml.replace(/\n+$/, "\n") + formatYamlKey(key, value, options) + tail;
}
} else {
throw new Error(`cannot update YAML with value (${value}) of type ${typeof value}`);
}
return updatedYaml;
}
function simpleType(a: any): boolean {
const typ = typeof a;
return typ === "string" || typ === "boolean" || typ === "number" || Array.isArray(a);
}
function knownType(a: any): boolean {
return simpleType(a) || typeof a === "object";
}
class YamlLine {
constructor(public content: string, public indented: boolean) { }
}
/**
* Format a key and value into a YAML string.
*
* @param key key to serialize
* @param value value to serialize
* @param options settings for the formatting
*/
export function formatYamlKey(key: string, value: any, options: Options = { keepArrayIndent: false }): string {
const obj: any = {};
obj[key] = value;
let y: string;
try {
y = yaml.safeDump(obj);
} catch (e) {
throw new Error(`failed to create YAML for {${key}: ${value}}: ${(e as Error).message}`);
}
y = arrayIndent(y, options.keepArrayIndent);
return y;
}
/**
* Format object into a YAML string.
*
* @param obj object to serialize
* @param options settings for the formatting
*/
export function formatYaml(obj: any, options: Options = { keepArrayIndent: false }): string {
let y: string;
try {
y = yaml.safeDump(obj);
} catch (e) {
throw new Error(`failed to create YAML for '${JSON.stringify(obj)}': ${(e as Error).message}`);
}
y = arrayIndent(y, options.keepArrayIndent);
return y;
}
/**
* Remove superfluous indentation from array if `indentArray` is `false`.
*
* @param y YAML document as string
* @param indentArray retain indented arrays if `true`
* @return YAML document as string with arrays indented as desired.
*/
function arrayIndent(y: string, indentArray: boolean | undefined): string {
return (indentArray) ? y : y.replace(/^( *) - /gm, "$1- ");
}
|
arr <- c(5, 2, 7, 4, 3)
sum <- sum(arr)
print(sum) |
<html>
<head>
<title>My Page</title>
<style>
.section {
margin: 10px 0;
}
</style>
</head>
<body>
<div class="section">
<h2>Section 1</h2>
<p>This is the first section.</p>
</div>
<div class="section">
<h2>Section 2</h2>
<p>This is the second section.</p>
</div>
<div class="section">
<h2>Section 3</h2>
<p>This is the third section.</p>
</div>
</body>
</html> |
<filename>mbhd-core/src/main/java/org/multibit/hd/core/dto/PaymentSessionSummary.java<gh_stars>10-100
package org.multibit.hd.core.dto;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.ListenableFuture;
import org.bitcoin.protocols.payments.Protos;
import org.bitcoinj.core.Address;
import org.bitcoinj.core.Coin;
import org.bitcoinj.core.Transaction;
import org.bitcoinj.protocols.payments.PaymentProtocol;
import org.bitcoinj.protocols.payments.PaymentProtocolException;
import org.bitcoinj.protocols.payments.PaymentSession;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.io.IOException;
import java.security.KeyStoreException;
import java.security.cert.CertPathValidatorException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.TimeoutException;
/**
* <p>Value object to provide the following to Core API:</p>
* <ul>
* <li>Information about a payment session's status</li>
* </ul>
*
* @since 0.0.7
*/
public class PaymentSessionSummary {
private static final Logger log = LoggerFactory.getLogger(PaymentSessionSummary.class);
private final PaymentSessionStatus status;
private final Optional<PaymentSession> paymentSession;
private final Optional<PaymentProtocol.PkiVerificationData> pkiVerificationData;
private final RAGStatus severity;
private final CoreMessageKey messageKey;
private final Object[] messageData;
/**
* <p>The server has returned a well-formed payment request</p>
*
* @param paymentSession The payment session containing meta data (cannot be null to be OK)
* @param pkiVerificationData The PKI verification data containing identity information (cannot be null to be OK)
*
* @return A new "payment session OK" summary
*/
public static PaymentSessionSummary newPaymentSessionOK(PaymentSession paymentSession, PaymentProtocol.PkiVerificationData pkiVerificationData) {
Preconditions.checkNotNull(paymentSession, "'paymentSession' must be present");
return new PaymentSessionSummary(
Optional.of(paymentSession),
Optional.fromNullable(pkiVerificationData),
PaymentSessionStatus.TRUSTED,
RAGStatus.GREEN,
CoreMessageKey.PAYMENT_SESSION_OK,
new String[]{paymentSession.getMemo()}
);
}
/**
* <p>The server has returned a well-formed payment request that has failed PKI validation</p>
*
* <p>The user may want to proceed under these circumstances so we cater for it.</p>
*
* @param paymentSession The payment session containing meta data
*
* @return A new "payment session" summary with appropriate confidence level
*/
public static PaymentSessionSummary newPaymentSessionAlmostOK(PaymentSession paymentSession, Exception e) {
if (e instanceof PaymentProtocolException.InvalidPkiData) {
return new PaymentSessionSummary(
Optional.of(paymentSession),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_INVALID,
new String[]{paymentSession.getMemo(), e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidPkiType) {
return new PaymentSessionSummary(
Optional.of(paymentSession),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_INVALID_TYPE,
new String[]{paymentSession.getMemo(), e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.PkiVerificationException) {
return new PaymentSessionSummary(
Optional.of(paymentSession),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_MISSING,
new String[]{paymentSession.getMemo(), e.getMessage()}
);
}
if (e instanceof KeyStoreException) {
return new PaymentSessionSummary(
Optional.of(paymentSession),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_UNTRUSTED_CA,
new String[]{paymentSession.getMemo(), e.getMessage()}
);
}
// Assume the worst
return newPaymentSessionFromException(e, paymentSession.getMemo());
}
/**
* @param e The payment protocol exception (either an ERROR or a DOWN)
*
* @return A suitable payment session summary
*/
public static PaymentSessionSummary newPaymentSessionFromException(Exception e, String hostName) {
log.warn("Failed payment server: Host={} Failure={}", hostName, e.getMessage());
// Default handling is ERROR
if (e instanceof InterruptedException) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
null, PaymentSessionStatus.DOWN,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_DOWN,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof TimeoutException) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
null, PaymentSessionStatus.DOWN,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_DOWN,
new String[]{hostName, e.getMessage()}
);
}
// Use default response
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
null, PaymentSessionStatus.ERROR,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_ERROR,
new String[]{hostName, e.getMessage()}
);
}
/**
* @param e The payment protocol exception (specific problem)
* @param hostName The host name
*
* @return A suitable payment session summary
*/
public static PaymentSessionSummary newPaymentSessionFromException(PaymentProtocolException e, String hostName) {
log.warn("Failed payment session: Host={} Failure={}", hostName, e.getMessage());
// Default handling is ERROR
if (e instanceof PaymentProtocolException.Expired) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_EXPIRED,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidNetwork) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_INVALID_NETWORK,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidOutputs) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_INVALID_OUTPUTS,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidPaymentRequestURL) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_INVALID_REQUEST_URL,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidPaymentURL) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_INVALID_PAYMENT_URL,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidVersion) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_INVALID_VERSION,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidPkiData) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_INVALID,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.InvalidPkiType) {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_INVALID_TYPE,
new String[]{hostName, e.getMessage()}
);
}
if (e instanceof PaymentProtocolException.PkiVerificationException) {
// This is a bit lame but the only way to differentiate PKI failures from untrusted
if (e.getCause() != null && e.getCause() instanceof CertPathValidatorException) {
// Untrusted CA (user might want to add it to the trust store)
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_UNTRUSTED_CA,
new String[]{hostName, e.getMessage()}
);
} else {
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.UNTRUSTED,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_PKI_MISSING,
new String[]{hostName, e.getMessage()}
);
}
}
// Unknown
return new PaymentSessionSummary(
Optional.<PaymentSession>absent(),
Optional.<PaymentProtocol.PkiVerificationData>absent(),
PaymentSessionStatus.ERROR,
RAGStatus.AMBER,
CoreMessageKey.PAYMENT_SESSION_ERROR,
new String[]{hostName, e.getMessage()}
);
}
/**
* <p>See the utility factory methods for standard situations</p>
*
* @param paymentSession The optional payment session
* @param pkiVerificationData The PKI verification data
* @param status The payment session status (e.g. OK)
* @param severity The severity (Red, Amber, Green)
* @param messageKey The error key to allow localisation
* @param messageData The error data for insertion into the error message
*/
public PaymentSessionSummary(
Optional<PaymentSession> paymentSession,
Optional<PaymentProtocol.PkiVerificationData> pkiVerificationData,
PaymentSessionStatus status,
RAGStatus severity,
CoreMessageKey messageKey,
Object[] messageData) {
this.paymentSession = paymentSession;
this.pkiVerificationData = pkiVerificationData;
this.status = status;
this.severity = severity;
this.messageKey = messageKey;
this.messageData = Arrays.copyOf(messageData, messageData.length);
}
/**
* @return true if there is a payment session
*/
public boolean hasPaymentSession() {
return paymentSession.isPresent();
}
/**
* @return optional boolean, which holds true if the payment session has outputs
*/
public Optional<Boolean> hasPaymentSessionOutputs() {
if (hasPaymentSession()) {
return Optional.of(!paymentSession.get().getOutputs().isEmpty());
} else {
return Optional.absent();
}
}
/**
* @return The memo from the payment session object
*/
public Optional<String> getPaymentSessionMemo() {
if (hasPaymentSession()) {
return Optional.fromNullable(paymentSession.get().getMemo());
} else {
return Optional.absent();
}
}
/**
* @return The value from the payment session object
*/
public Optional<Coin> getPaymentSessionValue() {
if (hasPaymentSession()) {
return Optional.fromNullable(paymentSession.get().getValue());
} else {
return Optional.absent();
}
}
/**
* @return The expires from the payment session object
*/
public Optional<DateTime> getPaymentSessionExpires() {
if (hasPaymentSession() && paymentSession.get().getExpires() != null) {
return Optional.of(new DateTime(paymentSession.get().getExpires()));
} else {
return Optional.absent();
}
}
/**
* @return The date from the payment session object
*/
public Optional<DateTime> getPaymentSessionDate() {
if (hasPaymentSession() && paymentSession.get().getDate() != null) {
return Optional.of(new DateTime(paymentSession.get().getDate()));
} else {
return Optional.absent();
}
}
/**
* @return The payment URL from the payment session object
*/
public Optional<String> getPaymentSessionPaymentUrl() {
if (hasPaymentSession()) {
return Optional.fromNullable(paymentSession.get().getPaymentUrl());
} else {
return Optional.absent();
}
}
/**
* @return The payment request from the payment session object
*/
public Optional<Protos.PaymentRequest> getPaymentSessionPaymentRequest() {
if (hasPaymentSession()) {
return Optional.fromNullable(paymentSession.get().getPaymentRequest());
} else {
return Optional.absent();
}
}
public Optional<PaymentProtocolResponseDto> sendPaymentSessionPayment(List<Transaction> transactions, @Nullable Address refundAddr, @Nullable String memo)
throws IOException, PaymentProtocolException {
if (hasPaymentSession()) {
log.debug("Sending payment details to requester at URL '{}'", paymentSession.get().getPaymentUrl());
Protos.Payment payment = paymentSession.get().getPayment(transactions, refundAddr, memo);
ListenableFuture<PaymentProtocol.Ack> future = paymentSession.get().sendPayment(transactions, refundAddr, memo);
return Optional.of(new PaymentProtocolResponseDto(payment, future));
} else {
return Optional.absent();
}
}
/**
* Just a data holder for the result when sending the payment.
*/
public static class PaymentProtocolResponseDto {
private final Protos.Payment finalPayment;
private final ListenableFuture<PaymentProtocol.Ack> future;
public PaymentProtocolResponseDto(Protos.Payment finalPayment, ListenableFuture<PaymentProtocol.Ack> future) {
this.finalPayment = finalPayment;
this.future = future;
}
public Protos.Payment getFinalPayment() {
return finalPayment;
}
public ListenableFuture<PaymentProtocol.Ack> getFuture() {
return future;
}
}
/**
* @return The PKI verification data based on a second pass through the Payment Request (accurate)
*/
public Optional<PaymentProtocol.PkiVerificationData> getPkiVerificationData() {
return pkiVerificationData;
}
/**
* @return The severity (e.g. AMBER)
*/
public RAGStatus getSeverity() {
return severity;
}
/**
* @return The payment session status (e.g. "TRUSTED")
*/
public PaymentSessionStatus getStatus() {
return status;
}
/**
* @return An optional array of arbitrary objects, often for insertion into a resource bundle string
*/
public Object[] getMessageData() {
return Arrays.copyOf(messageData, messageData.length);
}
public CoreMessageKey getMessageKey() {
return messageKey;
}
@Override
public String toString() {
return "PaymentSessionSummary{" +
"messageData=" + Arrays.toString(messageData) +
", status=" + status +
", paymentSession=" + paymentSession +
", pkiVerificationData=" + pkiVerificationData +
", severity=" + severity +
", messageKey=" + messageKey +
'}';
}
}
|
import { Component, OnInit } from '@angular/core';
import { AngularFirestore,
AngularFirestoreCollection,
AngularFirestoreDocument,
DocumentChangeAction } from '@angular/fire/firestore';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
interface Doc {
date: any;
id?: string;
verified: boolean;
}
@Component({
selector: 'app-document-validate',
templateUrl: './document-validate.component.html',
styleUrls: ['./document-validate.component.scss']
})
export class DocumentValidateComponent implements OnInit {
DocCollection: AngularFirestoreCollection<Doc>;
Docs: Observable<Doc[]>;
MaxDocs = 5;
DocLength = 0;
Verified = 0;
currentDocs: Doc[];
constructor(private db: AngularFirestore) { }
ngOnInit() {
this.DocCollection = this.db.collection<Doc>('docs');
this.Docs = this.DocCollection.snapshotChanges().pipe(map(actions => {
this.DocLength = 0;
this.Verified = 0;
this.currentDocs = [];
return actions.map(action => {
const data = action.payload.doc.data();
const id = action.payload.doc.id;
this.DocLength++;
if (data.verified) {
this.Verified++;
}
this.currentDocs.push({ id, ...data });
return { id, ...data };
});
}));
}
accept(id) {
const doc = this.currentDocs.find(item => item.id === id);
this.DocCollection.doc(`/${id}`).update({verified: !doc.verified});
}
delete(id) {
this.DocCollection.doc(`/${id}`).delete();
}
}
|
// Code generated by entc, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"github.com/blushft/strana/modules/sink/reporter/store/ent/connectivity"
"github.com/blushft/strana/modules/sink/reporter/store/ent/event"
"github.com/blushft/strana/modules/sink/reporter/store/ent/predicate"
"github.com/facebook/ent/dialect/sql"
"github.com/facebook/ent/dialect/sql/sqlgraph"
"github.com/facebook/ent/schema/field"
"github.com/google/uuid"
)
// ConnectivityUpdate is the builder for updating Connectivity entities.
type ConnectivityUpdate struct {
config
hooks []Hook
mutation *ConnectivityMutation
predicates []predicate.Connectivity
}
// Where adds a new predicate for the builder.
func (cu *ConnectivityUpdate) Where(ps ...predicate.Connectivity) *ConnectivityUpdate {
cu.predicates = append(cu.predicates, ps...)
return cu
}
// SetBluetooth sets the bluetooth field.
func (cu *ConnectivityUpdate) SetBluetooth(b bool) *ConnectivityUpdate {
cu.mutation.SetBluetooth(b)
return cu
}
// SetCellular sets the cellular field.
func (cu *ConnectivityUpdate) SetCellular(b bool) *ConnectivityUpdate {
cu.mutation.SetCellular(b)
return cu
}
// SetWifi sets the wifi field.
func (cu *ConnectivityUpdate) SetWifi(b bool) *ConnectivityUpdate {
cu.mutation.SetWifi(b)
return cu
}
// SetEthernet sets the ethernet field.
func (cu *ConnectivityUpdate) SetEthernet(b bool) *ConnectivityUpdate {
cu.mutation.SetEthernet(b)
return cu
}
// SetCarrier sets the carrier field.
func (cu *ConnectivityUpdate) SetCarrier(b bool) *ConnectivityUpdate {
cu.mutation.SetCarrier(b)
return cu
}
// SetIsp sets the isp field.
func (cu *ConnectivityUpdate) SetIsp(b bool) *ConnectivityUpdate {
cu.mutation.SetIsp(b)
return cu
}
// SetEventID sets the event edge to Event by id.
func (cu *ConnectivityUpdate) SetEventID(id uuid.UUID) *ConnectivityUpdate {
cu.mutation.SetEventID(id)
return cu
}
// SetNillableEventID sets the event edge to Event by id if the given value is not nil.
func (cu *ConnectivityUpdate) SetNillableEventID(id *uuid.UUID) *ConnectivityUpdate {
if id != nil {
cu = cu.SetEventID(*id)
}
return cu
}
// SetEvent sets the event edge to Event.
func (cu *ConnectivityUpdate) SetEvent(e *Event) *ConnectivityUpdate {
return cu.SetEventID(e.ID)
}
// Mutation returns the ConnectivityMutation object of the builder.
func (cu *ConnectivityUpdate) Mutation() *ConnectivityMutation {
return cu.mutation
}
// ClearEvent clears the event edge to Event.
func (cu *ConnectivityUpdate) ClearEvent() *ConnectivityUpdate {
cu.mutation.ClearEvent()
return cu
}
// Save executes the query and returns the number of rows/vertices matched by this operation.
func (cu *ConnectivityUpdate) Save(ctx context.Context) (int, error) {
var (
err error
affected int
)
if len(cu.hooks) == 0 {
affected, err = cu.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*ConnectivityMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
cu.mutation = mutation
affected, err = cu.sqlSave(ctx)
mutation.done = true
return affected, err
})
for i := len(cu.hooks) - 1; i >= 0; i-- {
mut = cu.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, cu.mutation); err != nil {
return 0, err
}
}
return affected, err
}
// SaveX is like Save, but panics if an error occurs.
func (cu *ConnectivityUpdate) SaveX(ctx context.Context) int {
affected, err := cu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (cu *ConnectivityUpdate) Exec(ctx context.Context) error {
_, err := cu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (cu *ConnectivityUpdate) ExecX(ctx context.Context) {
if err := cu.Exec(ctx); err != nil {
panic(err)
}
}
func (cu *ConnectivityUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: connectivity.Table,
Columns: connectivity.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: connectivity.FieldID,
},
},
}
if ps := cu.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := cu.mutation.Bluetooth(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldBluetooth,
})
}
if value, ok := cu.mutation.Cellular(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldCellular,
})
}
if value, ok := cu.mutation.Wifi(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldWifi,
})
}
if value, ok := cu.mutation.Ethernet(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldEthernet,
})
}
if value, ok := cu.mutation.Carrier(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldCarrier,
})
}
if value, ok := cu.mutation.Isp(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldIsp,
})
}
if cu.mutation.EventCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: connectivity.EventTable,
Columns: []string{connectivity.EventColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: event.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := cu.mutation.EventIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: connectivity.EventTable,
Columns: []string{connectivity.EventColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: event.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if n, err = sqlgraph.UpdateNodes(ctx, cu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{connectivity.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return 0, err
}
return n, nil
}
// ConnectivityUpdateOne is the builder for updating a single Connectivity entity.
type ConnectivityUpdateOne struct {
config
hooks []Hook
mutation *ConnectivityMutation
}
// SetBluetooth sets the bluetooth field.
func (cuo *ConnectivityUpdateOne) SetBluetooth(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetBluetooth(b)
return cuo
}
// SetCellular sets the cellular field.
func (cuo *ConnectivityUpdateOne) SetCellular(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetCellular(b)
return cuo
}
// SetWifi sets the wifi field.
func (cuo *ConnectivityUpdateOne) SetWifi(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetWifi(b)
return cuo
}
// SetEthernet sets the ethernet field.
func (cuo *ConnectivityUpdateOne) SetEthernet(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetEthernet(b)
return cuo
}
// SetCarrier sets the carrier field.
func (cuo *ConnectivityUpdateOne) SetCarrier(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetCarrier(b)
return cuo
}
// SetIsp sets the isp field.
func (cuo *ConnectivityUpdateOne) SetIsp(b bool) *ConnectivityUpdateOne {
cuo.mutation.SetIsp(b)
return cuo
}
// SetEventID sets the event edge to Event by id.
func (cuo *ConnectivityUpdateOne) SetEventID(id uuid.UUID) *ConnectivityUpdateOne {
cuo.mutation.SetEventID(id)
return cuo
}
// SetNillableEventID sets the event edge to Event by id if the given value is not nil.
func (cuo *ConnectivityUpdateOne) SetNillableEventID(id *uuid.UUID) *ConnectivityUpdateOne {
if id != nil {
cuo = cuo.SetEventID(*id)
}
return cuo
}
// SetEvent sets the event edge to Event.
func (cuo *ConnectivityUpdateOne) SetEvent(e *Event) *ConnectivityUpdateOne {
return cuo.SetEventID(e.ID)
}
// Mutation returns the ConnectivityMutation object of the builder.
func (cuo *ConnectivityUpdateOne) Mutation() *ConnectivityMutation {
return cuo.mutation
}
// ClearEvent clears the event edge to Event.
func (cuo *ConnectivityUpdateOne) ClearEvent() *ConnectivityUpdateOne {
cuo.mutation.ClearEvent()
return cuo
}
// Save executes the query and returns the updated entity.
func (cuo *ConnectivityUpdateOne) Save(ctx context.Context) (*Connectivity, error) {
var (
err error
node *Connectivity
)
if len(cuo.hooks) == 0 {
node, err = cuo.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*ConnectivityMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
cuo.mutation = mutation
node, err = cuo.sqlSave(ctx)
mutation.done = true
return node, err
})
for i := len(cuo.hooks) - 1; i >= 0; i-- {
mut = cuo.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, cuo.mutation); err != nil {
return nil, err
}
}
return node, err
}
// SaveX is like Save, but panics if an error occurs.
func (cuo *ConnectivityUpdateOne) SaveX(ctx context.Context) *Connectivity {
c, err := cuo.Save(ctx)
if err != nil {
panic(err)
}
return c
}
// Exec executes the query on the entity.
func (cuo *ConnectivityUpdateOne) Exec(ctx context.Context) error {
_, err := cuo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (cuo *ConnectivityUpdateOne) ExecX(ctx context.Context) {
if err := cuo.Exec(ctx); err != nil {
panic(err)
}
}
func (cuo *ConnectivityUpdateOne) sqlSave(ctx context.Context) (c *Connectivity, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: connectivity.Table,
Columns: connectivity.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: connectivity.FieldID,
},
},
}
id, ok := cuo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "ID", err: fmt.Errorf("missing Connectivity.ID for update")}
}
_spec.Node.ID.Value = id
if value, ok := cuo.mutation.Bluetooth(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldBluetooth,
})
}
if value, ok := cuo.mutation.Cellular(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldCellular,
})
}
if value, ok := cuo.mutation.Wifi(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldWifi,
})
}
if value, ok := cuo.mutation.Ethernet(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldEthernet,
})
}
if value, ok := cuo.mutation.Carrier(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldCarrier,
})
}
if value, ok := cuo.mutation.Isp(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: connectivity.FieldIsp,
})
}
if cuo.mutation.EventCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: connectivity.EventTable,
Columns: []string{connectivity.EventColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: event.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := cuo.mutation.EventIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.O2O,
Inverse: true,
Table: connectivity.EventTable,
Columns: []string{connectivity.EventColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: event.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
c = &Connectivity{config: cuo.config}
_spec.Assign = c.assignValues
_spec.ScanValues = c.scanValues()
if err = sqlgraph.UpdateNode(ctx, cuo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{connectivity.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return nil, err
}
return c, nil
}
|
#!/bin/bash
rm -rf kermit.ply
../bin/emvs kermit/pmvs/ kermit.ply | tee kermit-emvs.log
|
<filename>src/slowstats.cc
/*****************************************************************************
* Copyright 2011 <NAME>, <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* *****************************************************************************/
/*****
* Authors: <NAME> <EMAIL>
* <NAME> <EMAIL>
*
* Slow HTTP attack vulnerability test tool
* https://github.com/shekyan/slowhttptest
*
* class StatsDumper and derived classes help to generate
* statistics of the test in CSV and Google Chart Tools
* based javascript.
*****/
#include "slowstats.h"
#include "slowlog.h"
#include <stdarg.h>
#include <string>
using std::string;
namespace {
const char* HTML_HEADER =
"<!-- SlowHTTPTest Analysis chart (c) <NAME>, <NAME> 2011 -->\n \
<html>\n \
<head>\n \
<style>\n \
body { font: 12px/18px \"Lucida Grande\", \"Lucida Sans Unicode\", Helvetica, Arial, Verdana, sans-serif; background-color: transparent; color: #333; -webkit-font-smoothing: antialiased; } \n \
.slow_results {font-size: 12px; } \n \
</style>\n \
<script type=\"text/javascript\" src=\"https://www.google.com/jsapi\"></script>\n \
<script type=\"text/javascript\">\n \
google.load(\"visualization\", \"1\", {packages:[\"corechart\"]});\n \
google.setOnLoadCallback(drawChart);\n \
function drawChart() {\n \
var data = new google.visualization.DataTable();\n \
data.addColumn('string', 'Seconds');\n \
data.addColumn('number', 'Closed');\n \
data.addColumn('number', 'Pending');\n \
data.addColumn('number', 'Connected');\n \
data.addColumn('number', 'Service available');\n \
data.addRows([\n";
const char* HTML_FOOTER =
" ]);\n \
var chart = new google.visualization.AreaChart(document.getElementById('chart_div'));\n \
chart.draw(data, {'width': 600, 'height': 360, 'title': 'Test results against %s', \
hAxis: {'title': 'Seconds', 'titleTextStyle': {color: '#FF0000'}},\n \
vAxis: {'title': 'Connections', 'titleTextStyle': {color: '#FF0000'}, 'viewWindowMode':'maximized'}\n \
});\n \
}\n \
</script>\n \
<title>SlowHTTPTest(tm) Connection Results</title>\n \
</head>\n \
<body>\n \
<p>%s</p>\n \
<div id=\"chart_div\"></div>\n \
</body>\n \
</html>\n";
}
namespace slowhttptest {
bool StatsDumper::Initialize() {
file_ = fopen(file_name_.c_str(), "w");
return file_ != NULL;
}
void StatsDumper::WriteStats(const char* format, ...) {
CHECK_NOTNULL(file_);
CHECK_NOTNULL(format);
// Also must be non-empty.
check(*format != 0, "Format string cannot be empty");
PreWrite();
const string new_format = ModifyFormatString(format);
va_list va;
va_start(va, format);
vfprintf(file_, new_format.c_str(), va);
va_end(va);
PostWrite();
fflush(file_);
}
void StatsDumper::PostWrite() {
fprintf(file_, "\n");
}
void StatsDumper::WriteString(const char* str) {
CHECK_NOTNULL(file_);
CHECK_NOTNULL(str);
if (*str) {
fprintf(file_, "%s", str);
}
}
CSVDumper::CSVDumper(const string& file_name, const string& header)
: StatsDumper(file_name),
header_(header) {
}
CSVDumper::CSVDumper(const string& file_name)
: StatsDumper(file_name) {
}
bool CSVDumper::Initialize() {
if (StatsDumper::Initialize()) {
WriteString(header_.c_str());
return true;
}
return false;
}
HTMLDumper::HTMLDumper(const std::string& file_name,
const string& url, const string& test_info)
: StatsDumper(file_name),
url_(url),
test_info_(test_info) {
}
bool HTMLDumper::Initialize() {
if (StatsDumper::Initialize()) {
WriteHeader();
return true;
}
return false;
}
HTMLDumper::~HTMLDumper() {
if (IsOpen()) {
WriteFooter();
}
}
void StatsDumper::WriteFormattedString(const char* fmt,
const char* str1, const char* str2) {
CHECK_NOTNULL(file_);
CHECK_NOTNULL(str1);
CHECK_NOTNULL(str2);
CHECK_NOTNULL(fmt);
if (*str1 && *str2) {
fprintf(file_, fmt, str1, str2);
}
}
void HTMLDumper::WriteHeader() {
WriteString(HTML_HEADER);
}
void HTMLDumper::WriteFooter() {
WriteFormattedString(HTML_FOOTER, url_.c_str(), test_info_.c_str());
}
void HTMLDumper::PreWrite() {
WriteString("[");
}
void HTMLDumper::PostWrite() {
WriteString("],\n");
}
string HTMLDumper::ModifyFormatString(const char* format) {
string new_format(format);
string::size_type pos = new_format.find('%');
if (pos != string::npos) {
// There must be something after the first %.
check(new_format.size() > pos + 1, "Incorrect format specification");
new_format.insert(pos + 2, 1, '\'');
new_format.insert(pos, 1, '\'');
}
return new_format;
}
} // namespace slowhttptest
|
#!/bin/bash
# Copyright 2021 Authors of Cilium
# SPDX-License-Identifier: Apache-2.0
set -o errexit
set -o pipefail
set -o nounset
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [ "$#" -ne 2 ] ; then
echo "$0 supports exactl two arguments"
echo "example: '$0 test-1 aws'"
exit 1
fi
name="${1}"
cloud_provider="${2}"
kubeconfig_path="${script_dir}/${name}.kubeconfig"
module_path="$(pwd)/${name}"
cd "${module_path}"
# AWS credentials are required for deletion hooks to work properly (see aws/ensure-*.sh)
if [ "${cloud_provider}" = "aws" ] ; then
if [ -z "${AWS_DEFAULT_REGION+x}" ] || [ -z "${AWS_ACCESS_KEY_ID+x}" ] || [ -z "${AWS_SECRET_ACCESS_KEY+x}" ] ; then
echo "AWS_DEFAULT_REGION, AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY must be set"
exit 3
fi
fi
CLUSTER_KUBECONFIG="${kubeconfig_path}" terraform destroy -auto-approve
rm -f "${kubeconfig_path}"
rm -rf "${module_path}"
|
/*
* Tencent is pleased to support the open source community by making Blueking Container Service available.
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package cache
import (
"context"
"time"
"github.com/Tencent/bk-bcs/bcs-services/bcs-user-manager/config"
"github.com/go-redis/redis/v8"
)
var RDB Cache
func InitRedis(conf *config.UserMgrConfig) error {
options, err := redis.ParseURL(conf.RedisDSN)
if err != nil {
return err
}
client := redis.NewClient(options)
RDB = &redisCache{client: client}
return nil
}
type Cache interface {
Set(key string, value interface{}, expiration time.Duration) (string, error)
SetNX(key string, value interface{}, expiration time.Duration) (bool, error)
SetEX(key string, value interface{}, expiration time.Duration) (string, error)
Get(key string) (string, error)
Del(key string) (uint64, error)
Expire(key string, expiration time.Duration) (bool, error)
}
var _ Cache = &redisCache{}
type redisCache struct {
client *redis.Client
}
func (r *redisCache) Set(key string, value interface{}, expiration time.Duration) (string, error) {
return r.client.Set(context.TODO(), key, value, expiration).Result()
}
func (r *redisCache) SetNX(key string, value interface{}, expiration time.Duration) (bool, error) {
return r.client.SetNX(context.TODO(), key, value, expiration).Result()
}
func (r *redisCache) SetEX(key string, value interface{}, expiration time.Duration) (string, error) {
return r.client.SetEX(context.TODO(), key, value, expiration).Result()
}
func (r *redisCache) Get(key string) (string, error) {
return r.client.Get(context.TODO(), key).Result()
}
func (r *redisCache) Del(key string) (uint64, error) {
return r.client.Del(context.TODO(), key).Uint64()
}
func (r *redisCache) Expire(key string, expiration time.Duration) (bool, error) {
return r.client.Expire(context.TODO(), key, expiration).Result()
}
|
#!/bin/bash -e
install -d "${ROOTFS_DIR}/etc/systemd/system/rc-local.service.d"
install -m 644 files/ttyoutput.conf "${ROOTFS_DIR}/etc/systemd/system/rc-local.service.d/"
install -m 644 files/50raspi "${ROOTFS_DIR}/etc/apt/apt.conf.d/"
install -m 644 files/console-setup "${ROOTFS_DIR}/etc/default/"
install -m 755 files/rc.local "${ROOTFS_DIR}/etc/"
if [ -n "${PUBKEY_SSH_FIRST_USER}" ]; then
install -v -m 0700 -o 1000 -g 1000 -d "${ROOTFS_DIR}"/home/"${FIRST_USER_NAME}"/.ssh
echo "${PUBKEY_SSH_FIRST_USER}" >"${ROOTFS_DIR}"/home/"${FIRST_USER_NAME}"/.ssh/authorized_keys
chown 1000:1000 "${ROOTFS_DIR}"/home/"${FIRST_USER_NAME}"/.ssh/authorized_keys
chmod 0600 "${ROOTFS_DIR}"/home/"${FIRST_USER_NAME}"/.ssh/authorized_keys
fi
if [ "${PUBKEY_ONLY_SSH}" = "1" ]; then
sed -i -Ee 's/^#?[[:blank:]]*PubkeyAuthentication[[:blank:]]*no[[:blank:]]*$/PubkeyAuthentication yes/
s/^#?[[:blank:]]*PasswordAuthentication[[:blank:]]*yes[[:blank:]]*$/PasswordAuthentication no/' "${ROOTFS_DIR}"/etc/ssh/sshd_config
fi
on_chroot << EOF
systemctl disable hwclock.sh
systemctl disable nfs-common
systemctl disable rpcbind
if [ "${ENABLE_SSH}" == "1" ]; then
systemctl enable ssh
else
systemctl disable ssh
fi
EOF
on_chroot <<EOF
for GRP in input spi i2c gpio; do
groupadd -f -r "\$GRP"
done
for GRP in adm dialout cdrom audio users sudo video games plugdev input gpio spi i2c netdev; do
adduser $FIRST_USER_NAME \$GRP
done
EOF
on_chroot << EOF
usermod --pass='*' root
EOF
rm -f "${ROOTFS_DIR}/etc/ssh/"ssh_host_*_key*
|
module Neo4j::Driver
module Internal
class InternalTransaction
include Ext::ExceptionCheckable
include Ext::RunOverride
delegate :open?, to: :@tx
def initialize(tx)
@tx = tx
end
def commit
check do
org.neo4j.driver.internal.util.Futures.blockingGet(@tx.commitAsync) do
terminateConnectionOnThreadInterrupt("Thread interrupted while committing the transaction")
end
end
end
def rollback
check do
org.neo4j.driver.internal.util.Futures.blockingGet(@tx.rollbackAsync) do
terminateConnectionOnThreadInterrupt("Thread interrupted while rolling back the transaction")
end
end
end
def close
check do
org.neo4j.driver.internal.util.Futures.blockingGet(@tx.closeAsync) do
terminateConnectionOnThreadInterrupt("Thread interrupted while closing the transaction")
end
end
end
def run(query, parameters = {})
check do
cursor = org.neo4j.driver.internal.util.Futures.blockingGet(@tx.runAsync(to_statement(query, parameters))) do
terminateConnectionOnThreadInterrupt("Thread interrupted while running query in transaction")
end
InternalResult.new(@tx.connection, cursor)
end
end
private
def terminateConnectionOnThreadInterrupt(reason)
@tx.connection.terminateAndRelease(reason)
end
end
end
end
|
package com.binaryheap.testing.controllers;
import com.binaryheap.testing.dtos.BagDto;
import com.binaryheap.testing.services.BagService;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import java.util.Arrays;
import java.util.List;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.hamcrest.Matchers.*;
import static org.mockito.BDDMockito.given;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@RunWith(SpringRunner.class)
@WebMvcTest(BagController.class)
public class BagControllerTests {
@Autowired
private MockMvc mvcMock;
@MockBean
private BagService bagService;
@Test
public void get_all_bags() throws Exception {
// arrange
BagDto bag = new BagDto();
bag.setName("My Bag");
bag.setId(1);
List<BagDto> bags = Arrays.asList(bag);
Page<BagDto> pagesOfBag = new PageImpl<>(bags);
Pageable pageable = PageRequest.of(0, 20);
// act
given(bagService.getBags(pageable)).willReturn(pagesOfBag);
// assert
mvcMock.perform(get("/bags")
.contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(jsonPath("$.content", hasSize(1)))
.andExpect(jsonPath("$.content[0].name", is(bag.getName())));
}
}
|
#!/bin/bash
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
cd /paiInternalStorage
ls READY || exit 1
|
#! /bin/sh
# Copyright (C) 2014 VA Linux Systems Japan K.K.
# Copyright (C) 2014 YAMAMOTO Takashi <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# The purpose of this script is to avoid casual introduction of more
# bash dependency. Please consider alternatives before commiting code
# which uses bash specific features.
# Ignore comments, but include shebangs
OBSERVED=$(grep -E '^([^#]|#!).*bash' tox.ini tools/* | wc -l)
EXPECTED=5
if [ ${EXPECTED} -ne ${OBSERVED} ]; then
echo Unexpected number of bash usages are detected.
echo Please read the comment in $0
exit 1
fi
exit 0
|
#!/usr/bin/env bash
livy-server start
tail -f /usr/lib/livy/logs/livy-*-server.out |
# Copyright (c) 2015 Oracle and/or its affiliates. All rights reserved. This
# code is released under a tri EPL/GPL/LGPL license. You can use it,
# redistribute it and/or modify it under the terms of the:
#
# Eclipse Public License version 1.0
# GNU General Public License version 2
# GNU Lesser General Public License version 2.1
require_relative '../../../ruby/spec_helper'
describe "Truffle.binding_of_caller" do
def binding_of_caller
Truffle.binding_of_caller
end
#it "returns nil if there is no caller"
#end
it "returns a Binding" do
binding_of_caller.should be_kind_of(Binding)
end
it "gives read access to local variables at the call site" do
x = 14
binding_of_caller.local_variable_get(:x).should == 14
end
it "gives write access to local variables at the call site" do
x = 2
binding_of_caller.local_variable_set(:x, 14)
x.should == 14
end
it "works through #send" do
x = 14
Truffle.send(:binding_of_caller).local_variable_get(:x).should == 14
end
end
|
#!/bin/bash
# This script will be sourced by the bmux command and the variables will be
# used by the bag command. It is imperative that each variable is in the bag_*
# namespace and contains a space diliniated list of ROS topics. The only two
# exceptions to this rule are the BAG_ALWAYS and BAG_DIR variables.
# Define topics that should be in every bag
export BAG_ALWAYS="/odom /absodom /clock"
# Define the directory that the bags will be stored in
export BAG_DIR=~/bags
# Topic variables that can be used from the bag command
export bag_front_left_cam="/camera/front/left/camera_info /camera/front/left/image_raw"
export bag_front_right_cam="/camera/front/right/camera_info /camera/front/right/image_raw"
export bag_front_cam="$bag_front_left_cam $bag_front_right_cam"
export bag_starboard_cam="/camera/starboard/image_raw /camera/starboard/camera_info"
export bag_down_cam="/camera/down/camera_info /camera/down/image_raw"
export bag_velodyne="/velodyne_points"
export bag_sick="/scan"
export bag_motors="/BL_motor/cmd /BL_motor/feedback /BL_motor/status /BR_motor/cmd /BR_motor/feedback /BR_motor/status /FL_motor/cmd /FL_motor/feedback /FL_motor/status /FR_motor/cmd /FR_motor/feedback /FR_motor/status"
export bag_status="/c3_trajectory_generator/waypoint /trajectory /wrench/cmd /battery_monitor /adaptation /diagnostics /learn /alarm /alarm_raise"
export bag_hydrophones="/hydrophones/debug /hydrophones/ping /hydrophones/pose /hydrophones/processed "
export bag_lqrrt="/ogrid_master /unclassified_markers /lqrrt/effor /lqrrt/focus /lqrrt/goal /lqrrt/impact /lqrrt/path /lqrrt/ref /lqrrt/tree /move_to/feedback /move_to/goal /move_to/result"
|
#!/bin/bash
# https://vaneyckt.io/posts/safer_bash_scripts_with_set_euxo_pipefail/
set -Eeuo pipefail
THIS_DIR=`dirname "$(readlink -f "$0")"`
POWERSHELL_CONFIG_DIR=~/.config/powershell
if [ ! -d "$POWERSHELL_CONFIG_DIR" ]; then
mkdir "$POWERSHELL_CONFIG_DIR"
fi
ln -sv "$THIS_DIR/profile.ps1" "$POWERSHELL_CONFIG_DIR/profile.ps1" || true
|
#!/bin/bash
# Description: This script will test IDS upgrade and JIT user provisioning (See JIRA tickets AUTH-529 and MNT-21741 for details)
# Author : Jamal Kaabi-Mofrad
# Since : IDS-1.5.0
#======================================================
source "../../distribution/build.properties"
. "../scripts/common.func"
pgrep_name="keycloak"
is_running() {
RET=$(pgrep -f "$pgrep_name")
if [ -n "$RET" ]; then
return 0
else
return 1
fi
}
stop_ids() {
if ! is_running; then
log_info "IDS server is not running."
exit 0
else
pkill -SIGINT -f "${pgrep_name}"
STOPPED="0"
KILL_MAX_SECONDS=10
i=0
log_info "Waiting at most ${KILL_MAX_SECONDS} seconds for regular termination of IDS server."
while [ "$i" -le "${KILL_MAX_SECONDS}" ]; do
if is_running; then
sleep 1
else
STOPPED="1"
break
fi
i=$((i + 1))
done
if [ "$STOPPED" -ne "1" ]; then
log_info "Regular shutdown of IDS server was not successful. Sending SIGKILL to process."
pkill -KILL -f "${pgrep_name}"
if is_running; then
log_error_no_exit "Error stopping IDS."
else
log_info "Stopped IDS."
fi
else
log_info "Stopped IDS."
fi
fi
}
# This is required if upgrading from a version less than 1.5.0 to a version greater than or equal to 1.5.0
# See https://www.keycloak.org/docs/13.0/upgrading/#migrating-to-13-0-0 for details
extra_migration_step() {
compare_versions "$1" "1.5.0"
case $? in
0) op="=" ;;
1) op=">" ;;
2) op="<" ;;
esac
if [ "$op" = "<" ]; then
compare_versions "1.5.0" "$IDENTITY_VERSION"
case $? in
0) op="=" ;;
1) op=">" ;;
2) op="<" ;;
esac
if [ "$op" != ">" ]; then
log_info "Removing 'SmallRye' modules references from 'standalone.xml' file as you are upgrading to a version greater than or equal to 1.5.0"
fi
fi
# If the upgrade going to be to >=1.5.0 then remove 'SmallRye' references
sed '/smallrye/d' "${target}"/standalone/configuration/standalone.xml >standalone-temp.xml && mv standalone-temp.xml "${target}"/standalone/configuration/standalone.xml
}
############################
# Variables #
############################
# /saml directory
current_dir=$(pwd)
workspace="${current_dir}/target/distribution/workspace"
# Get the host IP
host_ip=$(ifconfig | grep -E '([0-9]{1,3}\.){3}[0-9]{1,3}' | grep -v 127.0.0.1 | awk '{ print $2 }' | cut -f2 -d: | head -n1)
# Keycloak default port
port=8080
protocol="http"
base_url="${protocol}://${host_ip}:${port}"
app_name_prefix="local"
if [ -n "${IDS_BUILD_NAME}" ]; then
app_name_prefix="${IDS_BUILD_NAME}"
fi
auth0_app_name="${app_name_prefix}-upgrade-to-${IDENTITY_VERSION}"
log_info "Building the current IDS version: ${IDENTITY_VERSION}"
make build -C ../../distribution
# Create a directory to copy the required IDS versions
mkdir -p "${workspace}"
from_version_zip=$(find target/distribution -name "alfresco-identity-service-*.zip")
# unzip the from version
log_info "Unzipping ${from_version_zip} ..."
unzip -oq -d "${workspace}" "$from_version_zip"
source=$(basename "${workspace}"/alfresco-identity-service-*)
source_version=$(echo "$source" | cut -d - -f4)
target="alfresco-identity-service-${IDENTITY_VERSION}"
##########################################
# Start the 'from' version and do a test #
##########################################
cd "${workspace}/${source}" || exit 1
# first add the admin user
log_info "Add the admin user ..."
bin/add-user-keycloak.sh -r master -u admin -p admin
log_info "Starting ${source} ..."
# Start the server in the background
nohup ./bin/standalone.sh -b "${host_ip}" >/dev/null 2>&1 &
# wait for the server to startup
sleep 20
cd "${current_dir}/../scripts" || exit 1
# Check the 'from' version
./check-keycloak-version.sh ids_base_url="${base_url}" ids_home="${workspace}/${source}"
# setup Auth0
log_info "Setup Auth0 ..."
./auth0-api.sh create "${auth0_app_name}" "${base_url}"
# Configure SAML
./configure-saml-ids.sh app_name="${auth0_app_name}" ids_base_url="${base_url}"
# cd to /saml dir
cd "${current_dir}" || exit 1
# Run the test
mvn test -Dkeycloak.protocol="${protocol}" -Dkeycloak.hostname="${host_ip}" -Dkeycloak.port="${port}"
log_info "The test was successful. Stopping IDS server..."
# Stop the 'from' version and do an upgrade
stop_ids
log_info "Upgrading from ${source} to ${target} ..."
log_info "Copy ${target} distro to workspace"
cp -r ../../distribution/"${target}" "${workspace}"
cd "${workspace}" || exit 1
log_info "Prepare the upgrade ..."
log_info "List of ${source} 'tx-object-store' before removal"
ls -lh "${source}"/standalone/data/tx-object-store
log_info "Remove ${source} tx-object-store ..."
rm -rf "${source}"/standalone/data/tx-object-store/*
log_info "List of ${source} tx-object-store after removal"
ls -lh "${source}"/standalone/data/tx-object-store
log_info "List all files of ${source}/standalone directory"
ls -lh "${source}"/standalone/
log_info "List all files of ${target}/standalone"
ls -lh "${target}"/standalone/
log_info "Copy all files/dirs of ${source}/standalone into ${target}/standalone directory"
cp -rf "${source}"/standalone/* "${target}"/standalone/
# if the source is required to be upgraded to 1.5.0 or greater then perform additional steps
extra_migration_step "${source_version}"
log_info "List all files of ${target}/standalone directory after copy of old IDS"
ls -lh "${target}"/standalone/
cd "${target}" || exit 1
log_info "Executing the Standalone Mode Upgrade Script..."
bin/jboss-cli.sh --file=bin/migrate-standalone.cli
# Start the server in the background
nohup sh bin/standalone.sh -b "${host_ip}" >/dev/null 2>&1 &
# wait for the server to startup
sleep 20
cd "${current_dir}/../scripts" || exit 1
# Check the 'to' version
./check-keycloak-version.sh ids_base_url="${base_url}" ids_home="${workspace}/${target}"
# cd to /saml dir
cd "${current_dir}" || exit 1
# Run the test with the existing user. The user created in the first test run above
mvn test -Dkeycloak.protocol="${protocol}" -Dkeycloak.hostname="${host_ip}" -Dkeycloak.port="${port}"
# Run the test with a new user. A user that does not exist in Keycloak yet
mvn test -Dkeycloak.protocol="${protocol}" -Dkeycloak.hostname="${host_ip}" -Dkeycloak.port="${port}" -Dsaml.username=user2 -Dsaml.password=Passw0rd
log_info "The tests were successful. Stopping IDS server..."
# Stop the 'to' version
stop_ids
# Delete Auth0 application
cd "${current_dir}/../scripts" || exit 1
log_info "Cleanup ..."
log_info "Deleting Auth0 application: ${auth0_app_name} ..."
./auth0-api.sh delete "${auth0_app_name}"
|
g++ string_view_version.cpp -std=c++17 -Wall -O2
|
const BridgeData = [
{
"orderId": 1,
"title": "Kalnsētas parka gājēju tiltiņš Nr. 1",
"description": "Garums 7,13m <br> Platums 2,36m <br> Augstums 1,2m <br> Pa vidus saliņa",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "<NAME>"
}
],
"activities": '31. jūnijā plkst. 22.30 – 23.30 Dzejas un mūzikas programma "Piepildījums".<br><br> <b>Piedalās Saldus novada dzejnieces:</b><br> <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>št<NAME>imante, Daiga Mickēviča<br><br><NAME>a muzikālais noformējums'
},
{
"orderId": 2,
"title": "Kalnsētas parka gājēju tiltiņš Nr. 2",
"description": "Garums 7,60m <br/> Platums 2,36m <br/> augstums 1,2m",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "<NAME>"
}
],
"activities": '31. jūnijā plkst. 22.30 – 23.30 Dzejas un mūzikas programma "Piepildījums".<br><br> <b>Piedalās Saldus novada dzejnieces:</b><br> <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Laimdote Vištarte Leimante, Daiga Mickēviča<br><br>Andra Akermaņa muzikālais noformējums'
},
{
"orderId": 3,
"title": "Kalnsētas parka gājēju tiltiņš Nr. 3",
"description": "",
"sponsors": [
{
"id": 1,
"title": "SIA <NAME>"
},
{
"id": 2,
"title": "SIA <NAME>"
}
],
"activities": '31. jūnijā plkst. 22.30 – 23.30 Dzejas un mūzikas programma "Piepildījums".<br><br> <b>Piedalās Saldus novada dzejnieces:</b><br> <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Laimdote Vištarte Leimante, Daiga Mickēviča<br><br><NAME>kermaņa muzikālais noformējums'
},
{
"orderId": 4,
"title": "Kalnmuižas parka gājēju tiltiņš",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "A/S R & C"
}
],
"activities": "plkst. 23.50 Laulību ceremonija"
},
{
"orderId": 5,
"title": "Tilts pār Cieceres upi uz Jelgavas ielas",
"description": "",
"sponsors": [
{
"id": 1,
"title": "A/S PATA Saldus"
}
],
"activities": "Pie J.Rozentāla akmens 31. maijā 00.10 – Mičošana"
},
{
"orderId": 6,
"title": "<NAME>",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "<NAME>"
},
{
"id": 3,
"title": "<NAME>"
},
{
"id": 4,
"title": "<NAME>"
},
{
"id": 5,
"title": "<NAME>"
},
],
"activities": 'pkst. 22.30 Saldus senioru akordeonistu ansamblis "Akords" <br>plkst 23.50 Laulību ceremonija'
},
{
"orderId": 7,
"title": "Sporta skolas gājēju tiltiņš",
"description": "Garums 18,60m <br> Platums 1,28 <br> Augstums 2m",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
}
],
"activities": '22.30 Gaismas ģimene "Baltās dūjas" un kapela "Strops"'
},
{
"orderId": 8,
"title": "<NAME>jēju tilts",
"description": "Garums 10.90<br>Platums 3.90<br>Augstums 1.90",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
}
],
"activities": "Tilta gaismošana"
},
{
"orderId": 9,
"title": "Kuldīgas ielas tilts",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "SIA North 66 Baltic"
}
],
"activities": "Tilta gaismošana"
},
{
"orderId": 10,
"title": "Tilts pie bērnu laukumiņa \"Pīļuks\"",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
}
],
"activities": "plkst. 22.30 Boksa mači (Saldus Boksa klubs)"
},
{
"orderId": 11,
"title": "<NAME>",
"description": "",
"sponsors": [
{
"id": 1,
"title": "SIA <NAME>"
}
],
"activities": "22.30 – 23.20 Pūtēju ansamblis “Saldus”"
},
{
"orderId": 12,
"title": "Tilts <NAME>",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
},
{
"id": 2,
"title": "A/S SEB banka"
}
],
"activities": "Tilta gaismošana"
},
{
"orderId": 13,
"title": "Dzirnavu ielas gājēju tilts",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
}
],
"activities": "Tilta gaismošana"
},
{
"orderId": 14,
"title": "Tilts no Čāpātāju kapiem uz Celtniekiem",
"description": "",
"sponsors": [
{
"id": 1,
"title": "<NAME>"
}
],
"activities": "Tilta gaismošana"
},
]
export default BridgeData; |
#!/bin/bash
set -e
if [ $# -lt 11 ]; then
echo "Usage: $0 prim_poscar potcar prim_kpoints prim_incar sc.txt sc_kpoints sc_incar \"scale1(relative to prim_poscar) [scale2...]\" disp1 ndisp1 spec_disp1 [disp2 ndisp2 spec_disp2 ...]"
echo "suggetion: mkkp.sh prim_poscar|sc_poscar 3000 > prim_kpoints|sc_kpoints"
echo "Example:"
echo '../prepare-volume.sh POSCAR POTCAR KPOINTS_prim INCAR_prim ../sc.txt KPOINTS INCAR "4.01 4.08 4.13" 0.01 2 "4 5.5" 0.05 1 "5 5.3" 0.12 3 "6 5.2" 0.3 2 "6 5.1"'
exit 0
fi
prim_poscar=$1
shift
prim_kpoints=$1
shift
potcar=$1
shift
prim_incar=$1
shift
sc=$1
shift
sc_kpoints=$1
shift
sc_incar=$1
shift
scale_list=$1
shift
#echo "debug prim_poscar prim_kpoints prim_incar sc.txt sc_kpoints sc_incar scales $prim_poscar $prim_kpoints $prim_incar $sc $sc_kpoints $sc_incar $scale_list"
#exit 0
a0=`echo $scale_list |awk '{print $1}'`
run0=a$a0
for a in $scale_list; do
run=a$a
echo "Generating displacements for a = $a"
prim_dir=$run/prim
mkdir -p $run
mkdir -p $prim_dir
awk -v a=$a '{if (NR==2) $1=a; print}' $prim_poscar > $prim_dir/POSCAR
ln -sr $prim_kpoints $prim_dir/KPOINTS
ln -sr $prim_incar $prim_dir/INCAR
ln -sr $potcar $prim_dir/POTCAR
polaron_main --task supercell --p1 $sc --prim $prim_dir/POSCAR > $run/SPOSCAR
if [ $a == $a0 ]; then
# create displacements
while (( "$#" )); do
disp=$1; shift; ndisp=$1; shift; spec=$1; shift
polaron_main --task rand_disp_dir -R $disp --npolaron $ndisp --p1 $run/SPOSCAR --p2 "$spec"
mv dir_*/ $run
done
for i in $run/dir_*; do
polaron_main --task cmp_pos --p1 $run/SPOSCAR --p2 $i/POSCAR --summary > $i/disp.txt
done
else
for i in $run0/dir_*; do
# polaron_main --task vol_disp --p1 POSCAR --p2 $i/POSCAR -R `python -c "print($scale/$a0)"` > $dir/$i/POSCAR
dir=$run/${i#*/}
mkdir -p $dir
polaron_main --task add_disp --p1 $run/SPOSCAR --p2 $i/disp.txt > $dir/POSCAR
done
fi
ln -sr $sc_kpoints $dir/KPOINTS
ln -sr $sc_incar $dir/INCAR
ln -sr $potcar $dir/POTCAR
done
|
#!/usr/bin/env bash
if [ -z $1 ]
then
echo "Please specify device ID to use"
exit
fi
GPUID=$1
# Parameters to be changed
SPLIT="Split_5"
NET_NAME="resnet50_motionconstraint"
LVL="mid"
SNIPPET=16
STRIDE=4
FRAMESKIP=5
MIX=1
EXP_NAME=$NET_NAME"_nomotion_g0"
# Generated parameters
LABELSDESC="./data/50_salads_dataset/labels/actions_"$LVL"lvl.txt"
DATA_NAME=$SPLIT"_"$LVL"_snip"$SNIPPET"_stride"$STRIDE"_frs"$FRAMESKIP
if (( $MIX == 1 ))
then
DATA_NAME=$DATA_NAME"_mix"
fi
# Main command
CUDA_VISIBLE_DEVICES=$GPUID python3 src/tester_fd.py \
--datasetname "50salads" \
--datadir "./data/50_salads_dataset/activity/"$LVL \
--trainlogdir "./logs/50_salads/"$DATA_NAME"/"$EXP_NAME \
--testlogdir "./logs/50_salads/"$DATA_NAME"/"$EXP_NAME"_test" \
--split_fn "./data/50_salads_dataset/splits/"$SPLIT"/test.txt" \
--labels_fname $LABELSDESC \
--ext "png" \
--netname $NET_NAME \
--snippet_len $SNIPPET \
--frameskip $FRAMESKIP \
--batch_size 50 \
--ckpt_fname "auto"
|
#!/bin/sh
#usage groupRes.sh [-add <gri> <numNewDisjointPaths>] [-sub <groupGRI> <subGRI_1> <subGRI_2> ... <subGRI_K>]
java -cp .:./src:./lib/aopalliance-1.0.jar:./lib/asm-2.2.3.jar:./lib/bcprov-jdk15-1.43.jar:./lib/common-libs-0.0.1-SNAPSHOT.jar:./lib/common-logging-0.0.1-SNAPSHOT.jar:./lib/commons-codec-1.2.jar:./lib/commons-codec-1.2.jar:./lib/commons-httpclient-3.1.jar:./lib/commons-lang-2.4.jar:./lib/commons-logging-1.0.4.jar:./lib/common-soap-0.0.1-SNAPSHOT.jar:./lib/cxf-api-2.2.5.jar:./lib/cxf-common-schemas-2.2.5.jar:./lib/cxf-common-utilities-2.2.5.jar:./lib/cxf-rt-bindings-soap-2.2.5.jar:./lib/cxf-rt-bindings-xml-2.2.5.jar:./lib/cxf-rt-core-2.2.5.jar:./lib/cxf-rt-databinding-jaxb-2.2.5.jar:./lib/cxf-rt-frontend-jaxws-2.2.5.jar:./lib/cxf-rt-frontend-simple-2.2.5.jar:./lib/cxf-rt-transports-http-2.2.5.jar:./lib/cxf-rt-ws-addr-2.2.5.jar:./lib/cxf-rt-ws-policy-2.2.5.jar:./lib/cxf-rt-ws-security-2.2.5.jar:./lib/cxf-tools-common-2.2.5.jar:./lib/geronimo-activation_1.1_spec-1.0.2.jar:./lib/geronimo-annotation_1.0_spec-1.1.1.jar:./lib/geronimo-javamail_1.4_spec-1.6.jar:./lib/geronimo-jaxws_2.1_spec-1.0.jar:./lib/geronimo-stax-api_1.0_spec-1.0.1.jar:./lib/geronimo-ws-metadata_2.0_spec-1.1.2.jar:./lib/jaxb-api-2.1.jar:./lib/jaxb-impl-2.1.12.jar:./lib/jopt-simple-3.0.1.jar:./lib/jyaml-1.3.jar:./lib/log4j-1.2.17.jar:./lib/neethi-2.0.4.jar:./lib/netlogger-4.1.2.jar:./lib/oscars-client-0.0.1-SNAPSHOT.jar:./lib/saaj-api-1.3.jar:./lib/saaj-impl-1.3.2.jar:./lib/serializer-2.7.1.jar:./lib/spring-beans-2.5.5.jar:./lib/spring-context-2.5.5.jar:./lib/spring-core-2.5.5.jar:./lib/spring-web-2.5.5.jar:./lib/utils-0.0.1-SNAPSHOT.jar:./lib/wsdl4j-1.6.2.jar:./lib/wss4j-1.5.8.jar:./lib/wstx-asl-3.2.9.jar:./lib/xalan-2.7.1.jar:./lib/xml-resolver-1.2.jar:./lib/XmlSchema-1.4.5.jar:./lib/xmlsec-1.4.3.jar cli.MultipathCLIMain groupReservations $*
|
#!/bin/bash
function clean() {
echo "Stopping Go Demo Containers"
docker-compose -f docker-compose.demo.yml stop
echo "Removing Go Demo Containers"
docker-compose -f docker-compose.demo.yml rm
}
function rebuild() {
echo "Re-Building Go Demo Environment"
docker-compose -f docker-compose.demo.yml build --no-cache
}
if [[ "$1" == "reset" ]]
then
clean
rebuild
elif [[ "$1" == "update" ]] ; then
clean
echo "Updating Go Test Bench"
git stash
git pull origin
rebuild
else
echo "Starting Go Demo Container for standard library"
docker-compose -f docker-compose.demo.yml up std
fi |
import java.awt.Font;
// Class representing the GUI library
public class GUI {
private Font defaultFont;
// Method to set the default font
public void setDefaultFont(Font font) {
this.defaultFont = font;
}
// Widget class using the default font unless overridden
public class Widget {
private Font font;
public Widget() {
this.font = defaultFont; // Use the default font
}
// Method to explicitly set a different font for the widget
public void setFont(Font font) {
this.font = font; // Override the default font
}
// Other widget methods and properties
}
// Example usage
public static void main(String[] args) {
GUI gui = new GUI();
Font defaultFont = new Font("Arial", Font.PLAIN, 12);
gui.setDefaultFont(defaultFont);
Widget button1 = gui.new Widget();
Widget button2 = gui.new Widget();
button2.setFont(new Font("Times New Roman", Font.BOLD, 14));
System.out.println("Button 1 font: " + button1.font);
System.out.println("Button 2 font: " + button2.font);
}
} |
<filename>app/assets/javascripts/hyrax/save_work/save_work_control.es6
// Hyrax 2.5 override to add front end validations for individual metadata fields. Added methods: getVDCFields, initializeVDCFields, validateVDCFields.
import { RequiredFields } from './required_fields'
import { RequiredProject } from 'vdc/save_work/required_project'
import { ChecklistItem } from './checklist_item'
import { UploadedFiles } from './uploaded_files'
import { DepositAgreement } from './deposit_agreement'
import VisibilityComponent from './visibility_component'
/**
* Polyfill String.prototype.startsWith()
*/
if (!String.prototype.startsWith) {
String.prototype.startsWith = function(searchString, position){
position = position || 0;
return this.substr(position, searchString.length) === searchString;
};
}
export default class SaveWorkControl {
/**
* Initialize the save controls
* @param {jQuery} element the jquery selector for the save panel
* @param {AdminSetWidget} adminSetWidget the control for the adminSet dropdown
*/
constructor(element, adminSetWidget) {
if (element.length < 1) {
return
}
this.element = element
this.adminSetWidget = adminSetWidget
this.form = element.closest('form')
element.data('save_work_control', this)
this.activate();
}
/**
* Keep the form from submitting (if the return key is pressed)
* unless the form is valid.
*
* This seems to occur when focus is on one of the visibility buttons
*/
preventSubmitUnlessValid() {
this.form.on('submit', (evt) => {
if (!this.isValid())
evt.preventDefault();
})
}
/**
* Keep the form from being submitted many times.
*
*/
preventSubmitIfAlreadyInProgress() {
this.form.on('submit', (evt) => {
if (this.isValid())
this.saveButton.prop("disabled", true);
})
}
/**
* Keep the form from being submitted while uploads are running
*
*/
preventSubmitIfUploading() {
this.form.on('submit', (evt) => {
if (this.uploads.inProgress) {
evt.preventDefault()
}
})
}
/**
* Is the form for a new object (vs edit an existing object)
*/
get isNew() {
return this.form.attr('id').startsWith('new')
}
/*
* Call this when the form has been rendered
*/
activate() {
if (!this.form) {
return
}
this.requiredFields = new RequiredFields(this.form, () => this.formStateChanged())
this.requiredProject = new RequiredProject(this.form, () => this.formStateChanged())
this.uploads = new UploadedFiles(this.form, () => this.formStateChanged())
this.saveButton = this.element.find(':submit')
this.depositAgreement = new DepositAgreement(this.form, () => this.formStateChanged())
this.requiredMetadata = new ChecklistItem(this.element.find('#required-metadata'))
this.requiredCollection = new ChecklistItem(this.element.find('#required-project'))
this.requiredFiles = new ChecklistItem(this.element.find('#required-files'))
this.requiredAgreement = new ChecklistItem(this.element.find('#required-agreement'))
new VisibilityComponent(this.element.find('.visibility'), this.adminSetWidget)
this.preventSubmit()
this.watchMultivaluedFields()
this.formChanged()
this.addFileUploadEventListeners();
}
addFileUploadEventListeners() {
let $uploadsEl = this.uploads.element;
const $cancelBtn = this.uploads.form.find('#file-upload-cancel-btn');
$uploadsEl.bind('fileuploadstart', () => {
$cancelBtn.removeClass('hidden');
});
$uploadsEl.bind('fileuploadstop', () => {
$cancelBtn.addClass('hidden');
});
}
preventSubmit() {
this.preventSubmitUnlessValid()
this.preventSubmitIfAlreadyInProgress()
this.preventSubmitIfUploading()
}
// If someone adds or removes a field on a multivalue input, fire a formChanged event.
watchMultivaluedFields() {
$('.multi_value.form-group', this.form).bind('managed_field:add', () => this.formChanged())
$('.multi_value.form-group', this.form).bind('managed_field:remove', () => this.formChanged())
}
// Called when a file has been uploaded, the deposit agreement is clicked or a form field has had text entered.
formStateChanged() {
this.saveButton.prop("disabled", !this.isSaveButtonEnabled);
}
// called when a new field has been added to the form.
formChanged() {
this.requiredFields.reload();
this.requiredProject.reload();
this.formStateChanged();
}
// Indicates whether the "Save" button should be enabled: a valid form and no uploads in progress
get isSaveButtonEnabled() {
return this.isValid() && !this.uploads.inProgress;
}
isValid() {
// avoid short circuit evaluation. The checkboxes should be independent.
this.initializeVDCFields()
let metadataValid = this.validateMetadata()
let VDCFieldsValid = this.validateVDCFields()
let collectionValid = this.validateCollection()
let filesValid = this.validateFiles()
let agreementValid = this.validateAgreement(filesValid)
return metadataValid && VDCFieldsValid && filesValid && agreementValid && collectionValid
}
// sets the metadata indicator to complete/incomplete
validateMetadata() {
if (this.requiredFields.areComplete) {
this.requiredMetadata.check()
return true
}
this.requiredMetadata.uncheck()
return false
}
getVDCFields() { // return fields as objects. returns unique ID's
let arr = []
let labels = []
$("*.required").filter(":input").each(function(index) {
let normalLabel = $(this).siblings().filter("label").text() //select the text from the label for this form element
let depositorLabel = $(this).parent().parent().siblings().filter('label').text() // gets the label if it's a depositor
let label = (normalLabel || depositorLabel).match(/.+(?=required)/)[0].trim() // strips out the 'required' and white space
let value = $(this).val()
let isValuePresent = ($(this).val() === null) || ($(this).val().length < 1)
let id = $(this)[0].id.split('_').slice(1).join('_')
if(!labels.includes(label)){
labels.push(label)
let formItem = {
element: $(this),
label: label,
value: value,
isValuePresent: isValuePresent,
id: id
}
arr.push(formItem)
}
})
return arr; // return array as objects
}
initializeVDCFields() { // creates the li's
const that = this
$('#metadata-data').html('') // clears previous items
let fields = this.getVDCFields()
fields.forEach(field => {
$(`<li class='incomplete' style="list-style: none;" id=${field.id}>${field.label}</li>`).appendTo($('#metadata-data'))
field.checklistItem = new ChecklistItem(that.element.find(`#${field.id}`))
})
}
validateVDCFields() { // find the element and check/uncheck based on t/f
let allFilled = true
const fields = this.getVDCFields()
fields.forEach(field => {
let checklistItem = $(`#${field.id}`)
if(field.isValuePresent) {
checklistItem.removeClass('complete')
checklistItem.addClass('incomplete')
allFilled = false
}
else {
checklistItem.removeClass('incomplete')
checklistItem.addClass('complete')
}
})
return allFilled
}
// sets the collection indicator to complete/incomplete
validateCollection() {
if (this.requiredProject.areComplete) {
this.requiredCollection.check()
return true
}
this.requiredCollection.uncheck()
return false
}
// sets the files indicator to complete/incomplete
validateFiles() {
if (!this.uploads.hasFileRequirement) {
return true
}
if (!this.isNew || this.uploads.hasFiles) {
this.requiredFiles.check()
return true
}
this.requiredFiles.uncheck()
return false
}
validateAgreement(filesValid) {
if (filesValid && this.uploads.hasNewFiles && this.depositAgreement.mustAgreeAgain) {
// Force the user to agree again
this.depositAgreement.setNotAccepted()
this.requiredAgreement.uncheck()
return false
}
if (!this.depositAgreement.isAccepted) {
this.requiredAgreement.uncheck()
return false
}
this.requiredAgreement.check()
return true
}
}
|
<reponame>yuanlida/ant-design-pro
import { withPropsAPI, RegisterCommand } from 'gg-editor';
import React from 'react';
// onAfterExecuteCommand 之后可以获取命令
class SaveCommand extends React.Component<any, any> {
componentDidMount(): void {
const { propsAPI } = this.props;
console.log(propsAPI);
}
render() {
const { propsAPI } = this.props;
const { save } = propsAPI;
const config = {
queue: true,
enable() {
return true;
},
execute(): void {
const chart = save();
// const selectedNodes = getSelected();
console.log(JSON.stringify(chart));
// console.log(selectedNodes);
},
back(): void {
console.log('execute back command!');
},
shortcutCodes: [['ArrowLeft'], ['ArrowRight']],
};
return <RegisterCommand name="save" config={config}></RegisterCommand>;
}
}
export default withPropsAPI(SaveCommand);
|
package com.scout24.ha.meross.mqtt;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.scout24.ha.meross.rest.AttachedDevice;
import com.scout24.ha.meross.rest.NetworkDevice;
import lombok.Synchronized;
import lombok.extern.slf4j.Slf4j;
import java.io.Serializable;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static com.scout24.ha.meross.mqtt.Abilities.TOGGLE;
import static com.scout24.ha.meross.mqtt.Abilities.TOGGLEX;
@Slf4j
public class MerossDevice {
private static ObjectMapper mapper = new ObjectMapper();
private static final int CHANNEL_0 = 0;
private final AttachedDevice device;
private final MqttConnection connection;
protected final String clientRequestTopic;
//MQTT device
private NetworkDevice mqttDevice;
//Cached list of abilities
private List<String> abilities;
private List<Map> channels;
private boolean[] state;
public void initialize() throws MQTTException {
this.abilities = getAbilities();
this.mqttDevice = getSysData();
this.channels = mqttDevice.getAll().getDigest().getTogglex();
initStatesFromTogglexList(channels);
}
public MerossDevice(AttachedDevice device, MqttConnection connection) {
this.device = device;
this.connection = connection;
this.clientRequestTopic = "/appliance/" + device.getUuid() + "/subscribe";
state = new boolean[device.getChannels().size()];
}
public Map toggle(boolean enabled) {
ImmutableMap<String, Serializable> payload = ImmutableMap.of(
"channel", 0,
"toggle", ImmutableMap.of("onoff", enabled? 1 : 0)
);
return connection.executecmd("SET", TOGGLE.getNamespace(), payload, clientRequestTopic);
}
public Map togglex(int channel, boolean enabled) {
Map<String, Serializable> payload = ImmutableMap.of(
"togglex", ImmutableMap.of(
"onoff", enabled? 1:0,
"channel", channel,
"lmTime", System.currentTimeMillis()/1000)
);
return connection.executecmd("SET", Abilities.TOGGLEX.getNamespace(), payload, clientRequestTopic);
}
private Map toggleChannel(int channel, boolean status) throws MQTTException, CommandTimeoutException, InterruptedException {
if (this.getAbilities().contains(TOGGLE.getNamespace())) {
return this.toggle(status);
} else if (this.getAbilities().contains(Abilities.TOGGLEX.getNamespace())) {
return this.togglex(channel, status);
} else {
throw new MQTTException("The current device does not support neither TOGGLE nor TOGGLEX.");
}
}
private int getChannelId(String channel) throws MQTTException {
//Otherwise, if the passed channel looks like the channel spec, lookup its array indexindex
//if a channel name is given, lookup the channel id from the name
int i = 0;
for (Map map : channels) {
if (map.get("devName").equals(channel)) {
return i;
}
i++;
}
throw new MQTTException("Invalid channel specified.");
}
private NetworkDevice getSysData() throws MQTTException {
connection.executecmd("GET", "Appliance.System.All", ImmutableMap.of(), clientRequestTopic);
try {
final Map map = connection.receiveMessage();
return mapper.convertValue(map, NetworkDevice.class);
} catch (Exception e) {
throw new MQTTException("error retrieving state: " + e.getMessage());
}
}
List<Map> getChannels() {
return this.channels;
}
@Synchronized
private List<String> getAbilities() throws MQTTException {
if (abilities == null) {
try {
connection.executecmd("GET", "Appliance.System.Ability", ImmutableMap.of(), clientRequestTopic);
final Map map = connection.receiveMessage();
return Lists.newArrayList(((Map<String, ?>)map.get("ability")).keySet());
} catch (Exception e) {
throw new MQTTException("error fetching device abilities: " + e.getMessage());
}
}
return abilities;
}
public void consumeMessage(Map payload) throws MQTTException {
this.handleNamespacePayload(payload);
}
Map getReport() {
return connection.executecmd("GET", "Appliance.System.Report", ImmutableMap.of(), clientRequestTopic);
}
boolean getChannelStatus(String channel) throws MQTTException {
int c = this.getChannelId(channel);
return this.state[c];
}
public Map turnOnChannel(int channel) throws MQTTException, CommandTimeoutException, InterruptedException {
return this.toggleChannel(channel, true);
}
public Map turnOffChannel(int channel) throws MQTTException, CommandTimeoutException, InterruptedException {
return this.toggleChannel(channel, false);
}
Map turnOn(String channel) throws InterruptedException, CommandTimeoutException, MQTTException {
int c = this.getChannelId(channel);
return this.toggleChannel(c, true);
}
Map turnOff(String channel) throws InterruptedException, CommandTimeoutException, MQTTException {
int c = this.getChannelId(channel);
return this.toggleChannel(c, false);
}
boolean supportsConsumptionReading() throws MQTTException {
return getAbilities().contains(Abilities.CONSUMPTIONX.getNamespace());
}
boolean supportsElectricityReading() throws MQTTException {
return getAbilities().contains(Abilities.ELECTRICITY.getNamespace());
}
Map getPowerConsumption() throws MQTTException {
if (getAbilities().contains(Abilities.CONSUMPTIONX.getNamespace())) {
return connection.executecmd("GET", Abilities.CONSUMPTIONX.getNamespace(), ImmutableMap.of(), clientRequestTopic);
} else return null;
}
Map getElectricity() throws MQTTException {
if (getAbilities().contains(Abilities.ELECTRICITY.getNamespace())) {
return connection.executecmd("GET", Abilities.ELECTRICITY.getNamespace(), ImmutableMap.of(), clientRequestTopic);
} else return null;
}
protected void handleNamespacePayload(Map payload) throws MQTTException {
if (getAbilities().contains(TOGGLE.getNamespace())) {
final Map<String, ?> toggle = (Map<String, ?>) payload.get("toggle");
this.state[MerossDevice.CHANNEL_0] = Boolean.parseBoolean(toggle.get("onoff").toString());
} else if (getAbilities().contains(TOGGLEX.getNamespace())) {
if (payload.get("togglex") instanceof List) {
initStatesFromTogglexList((List) payload.get("togglex"));
} else if (payload.get("togglex") instanceof Map) {
initStatesFromTogglexMap((Map) payload.get("togglex"));
}
} else if ("".equals(Abilities.ONLINE.getNamespace())) {
log.info("Online keep alive received: " + payload);
} else {
log.error("Unknown/Unsupported namespace: {} for device {}", "", device.getDevName());
}
}
private void initStatesFromTogglexMap(Map map) {
int channelindex = Integer.parseInt(map.get("channel").toString());
this.state[channelindex] = map.get("onoff").toString().equals("1");
}
private void initStatesFromTogglexList(List list) {
for (Map map : (List<Map>) list) {
initStatesFromTogglexMap(map);
}
log.info("Initialized state: " + Arrays.toString(state));
}
//
// Map getWifilist() throws InterruptedException, CommandTimeoutException, MQTTException {
// return executecmd("GET", "Appliance.Config.WifiList", ImmutableMap.of(), LONG_TIMEOUT, clientresponsetopic, clientRequestTopic);
// }
//
// Map getTrace() throws InterruptedException, CommandTimeoutException, MQTTException {
// return executecmd("GET", "Appliance.Config.Trace", ImmutableMap.of(), SHORT_TIMEOUT, clientresponsetopic, clientRequestTopic);
// }
//
// Map getDebug() throws InterruptedException, CommandTimeoutException {
// return executecmd("GET", "Appliance.System.Debug", ImmutableMap.of(), SHORT_TIMEOUT, clientresponsetopic, clientRequestTopic);
// }
} |
<gh_stars>0
package lights
import "github.com/mickael-carl/hue_exporter/pkg/common"
type LightState struct {
On bool
Brightness int `json:"bri"`
Hue int
Saturation int `json:"sat"`
Effect string
XY []float64
ColorTemperature int `json:"ct"`
Alert string
ColorMode string
Mode string
Reachable bool
}
type Gamut []float32
type ColorTemperatureLimits struct {
Min int
Max int
}
type LightControl struct {
MinDimLevel int
MaxLumen int
ColorGamutType string
ColorGamut []Gamut
ColorTemperatureLimits ColorTemperatureLimits `json:"ct"`
}
type LightStreaming struct {
Renderer bool
Proxy bool
}
type LightCapabilities struct {
Certified bool
Control LightControl
Streaming LightStreaming
}
type LightStartup struct {
Mode string
Configured bool
}
type LightConfig struct {
Archetype string
Function string
Direction string
Startup LightStartup
}
type LightAttributes struct {
State LightState
SoftwareUpdate common.SoftwareUpdate `json:"swupdate"`
Type string
Name string
ModelId string
ManufacturerName string
ProductName string
Capabilities LightCapabilities
Config LightConfig
UniqueId string
SoftwareVersion string `json:"swversion"`
SoftwareConfigId string `json:"swconfigid"`
ProductId string
}
type Lights map[int]LightAttributes
|
import React, { useState, useEffect, Suspense } from 'react';
import { BrowserRouter as Router, Route, Switch } from 'react-router-dom'
import { useTranslation } from 'react-i18next';
import './App.css';
import MsgBox from './components/MsgBox/MsgBox';
import Header from './components/Header/Header';
import Footer from './components/Footer/Footer';
import Home from './views/Home';
import Menu from './views/Menu/Menu';
import Detail from './views/Detail/Detail';
import NoMatch from './views/NoMatch/NoMatch';
import wallet from './tools/ethWallet';
// 优化写个加载页面,越简单越好
const Loader = () => (
<div>loading...</div>
);
function App() {
const { t } = useTranslation();
const [isWarning, setWarning] = useState(false);
// 优化的分离出组件
let msg = <div className="msg-box-body">
<div className="msg-box-icon-area">
<i className="msg-box-icon"></i>
</div>
<div className="download-msg">{t('app.install')} <a target="new_tab" href="https://chrome.google.com/webstore/detail/tronlink%EF%BC%88%E6%B3%A2%E5%AE%9D%E9%92%B1%E5%8C%85%EF%BC%89/ibnejdfjmmkpcnlpebklmnkoeoihofec">TronLink</a>。</div>
</div>
useEffect(() => {
const timerRec = setTimeout(() => {
wallet._init();
clearTimeout(timerRec);
}, 2 * 1000);
return () => {
clearTimeout(timerRec);
}
}, []);
return (
<Suspense fallback={<Loader />}>
< div className="App" >
<div className="down-box">
{/* <MsgBox show={!hasBom} msg={msg} /> */}
{isWarning && <MsgBox msg={msg} title={t('app.warning')} confirmCallBack={() => setWarning(false)} />}
</div>
<Router>
<Header />
<Switch>
<Route exact path="/" component={Home}></Route>
<Route exact path="/menu" component={Menu}></Route>
<Route exact path="/detail" component={Detail}></Route>
<Route component={NoMatch} exact></Route>
</Switch>
</Router>
<Footer />
</div >
</Suspense>
);
}
export default App;
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_BipedalWalkerHardcore-v2_doule_ddpg_softcopy_action_noise_seed5_run8_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env BipedalWalkerHardcore-v2 --random-seed 5 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/continuous/BipedalWalkerHardcore-v2/doule_ddpg_softcopy_action_noise_seed5_run8 --continuous-act-space-flag
|
<gh_stars>0
import {
Answerable,
AnswersQuestions,
Question,
UsesAbilities,
} from '@serenity-js/core';
import { formatted } from '@serenity-js/core/lib/io';
import { ElementHandle } from 'playwright';
export class Attribute extends Question<Promise<string>> {
/**
* @deprecated Use Attribute.called(name).of(target)
*
* @param {Answerable<ElementHandle>} target
* @returns
*/
static of(target: Answerable<ElementHandle>): { called: (name: Answerable<string>) => Attribute; } {
return {
called: (name: Answerable<string>) => new Attribute(target, name),
};
}
/**
* @param {Answerable<string>} name of attribute
* @returns {{ of(Answerable<ElementHandle>): Attribute }}
*/
static called(name: Answerable<string>): {
/**
* @param {Answerable<ElementHandle>} target
* @returns {Attribute}
*/
of: (target: Answerable<ElementHandle>) => Attribute;
} {
return {
/**
* @param {Answerable<ElementHandle>} target
* @returns {Attribute}
*/
of: (target: Answerable<ElementHandle>) => new Attribute(target, name),
};
}
protected constructor(
private readonly target: Answerable<ElementHandle>,
private readonly name: Answerable<string>
) {
super(formatted`the value of the ${name} attribute of ${target}`);
}
/**
* @desc
* Makes the provided {@link @serenity-js/core/lib/screenplay/actor~Actor}
* answer this {@link @serenity-js/core/lib/screenplay~Question}.
*
* @param {AnswersQuestions & UsesAbilities} actor
* @returns {Promise<void>}
*
* @see {@link @serenity-js/core/lib/screenplay/actor~Actor}
* @see {@link @serenity-js/core/lib/screenplay/actor~AnswersQuestions}
* @see {@link @serenity-js/core/lib/screenplay/actor~UsesAbilities}
*/
async answeredBy(actor: AnswersQuestions & UsesAbilities): Promise<string> {
const element = await actor.answer(this.target);
const name = await actor.answer(this.name);
return element.getAttribute(name);
}
}
|
#! /bin/sh
#
# Copyright by The HDF Group.
# All rights reserved.
#
# This file is part of HDF5. The full HDF5 copyright notice, including
# terms governing use, modification, and redistribution, is contained in
# the COPYING file, which can be found at the root of the source code
# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases.
# If you do not have access to either file, you may request a copy from
# help@hdfgroup.org.
#
# This file: run-c-ex.sh
# Written by: Larry Knox
# Date: May 11, 2010
#
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# #
# This script will compile and run the c examples from source files installed #
# in .../share/hdf5_examples/c using h5cc or h5pc. The order for running #
# programs with RunTest in the MAIN section below is taken from the Makefile. #
# The order is important since some of the test programs use data files created #
# by earlier test programs. Any future additions should be placed accordingly. #
# #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Initializations
EXIT_SUCCESS=0
EXIT_FAILURE=1
# Where the tool is installed.
# default is relative path to installed location of the tools
prefix="${prefix:-/usr/local/Cellar/hdf5/1.10.1}"
PARALLEL=no # Am I in parallel mode?
AR="ar"
RANLIB="ranlib"
if [ "$PARALLEL" = no ]; then
H5TOOL="h5cc" # The tool name
else
H5TOOL="h5pcc" # The tool name
fi
H5TOOL_BIN="${prefix}/bin/${H5TOOL}" # The path of the tool binary
#### Run test ####
RunTest()
{
TEST_EXEC=$1
Test=$1".c"
echo
echo "################# $1 #################"
${H5TOOL_BIN} -o $TEST_EXEC $Test
if [ $? -ne 0 ]
then
echo "messed up compiling $Test"
exit 1
fi
./$TEST_EXEC
}
################## MAIN ##################
if ! test -d red; then
mkdir red
fi
if ! test -d blue; then
mkdir blue
fi
if ! test -d u2w; then
mkdir u2w
fi
# Run tests
if [ $? -eq 0 ]
then
if (RunTest h5_crtdat &&\
rm h5_crtdat &&\
RunTest h5_extend &&\
rm h5_extend &&\
RunTest h5_rdwt &&\
rm h5_rdwt &&\
RunTest h5_crtatt &&\
rm h5_crtatt &&\
RunTest h5_crtgrp &&\
rm h5_crtgrp &&\
RunTest h5_crtgrpar &&\
rm h5_crtgrpar &&\
RunTest h5_crtgrpd &&\
rm h5_crtgrpd &&\
RunTest h5_subset &&\
rm h5_subset &&\
RunTest h5_cmprss &&\
rm h5_cmprss &&\
RunTest h5_write &&\
rm h5_write &&\
RunTest h5_read &&\
rm h5_read &&\
RunTest h5_extend_write &&\
rm h5_extend_write &&\
RunTest h5_chunk_read &&\
rm h5_chunk_read &&\
RunTest h5_compound &&\
rm h5_compound &&\
RunTest h5_group &&\
rm h5_group &&\
RunTest h5_select &&\
rm h5_select &&\
RunTest h5_attribute &&\
rm h5_attribute &&\
RunTest h5_mount &&\
rm h5_mount &&\
RunTest h5_reference &&\
rm h5_reference &&\
RunTest h5_drivers &&\
rm h5_drivers &&\
RunTest h5_ref2reg &&\
rm h5_ref2reg &&\
RunTest h5_extlink &&\
rm h5_extlink &&\
RunTest h5_elink_unix2win &&\
rm h5_elink_unix2win &&\
RunTest h5_shared_mesg &&\
rm h5_shared_mesg &&\
RunTest h5_vds-eiger &&\
rm h5_vds-eiger &&\
RunTest h5_vds-exclim &&\
rm h5_vds-exclim &&\
RunTest h5_vds-exc &&\
rm h5_vds-exc &&\
RunTest h5_vds-simpleIO &&\
rm h5_vds-simpleIO &&\
RunTest h5_vds-percival &&\
rm h5_vds-percival &&\
RunTest h5_vds-percival-unlim &&\
rm h5_vds-percival-unlim &&\
RunTest h5_vds-percival-unlim-maxmin&&\
rm h5_vds-percival-unlim-maxmin &&\
RunTest h5_vds &&\
rm h5_vds); then
EXIT_VALUE=${EXIT_SUCCESS}
else
EXIT_VALUE=${EXIT_FAILURE}
fi
fi
# Cleanup
rm *.o
rm *.h5
rm -rf red blue u2w
echo
exit $EXIT_VALUE
|
#!/bin/bash
# Script - download codeforces dataset from s3
# options:
# $1 specify a percentage of dataset used as train set
# $2 specify a percentage of dataset used as test set
# $3 specify a percentage of dataset used as validation set
# $4 specify if developer mode is on, default: false
# $5 specify a path to astminer .jar file
# $6 specify a path to splitiing script
# $7 specify if splitted dataset needs to be downloaded
TRAIN_SPLIT_PART=$1
VAL_SPLIT_PART=$2
TEST_SPLIT_PART=$3
DEV=$4
ASTMINER_PATH=$5
SPLIT_SCRIPT=$6
LOAD_SPLITTED=$7
DATA_DIR=./data
DATASET_NAME=poj_104
DATA_PATH=${DATA_DIR}/${DATASET_NAME}
if [ ! -d $DATA_DIR ]
then
mkdir $DATA_DIR
fi
if [ -d "$DATA_PATH" ]
then
echo "$DATA_PATH exists."
else
if $LOAD_SPLITTED
then
if [ ! -f "$DATA_DIR/poj-104-splitted.tar.gz" ]
then
echo "Downloading splitted dataset ${DATASET_NAME}"
wget https://s3-eu-west-1.amazonaws.com/datasets.ml.labs.aws.intellij.net/poj-104/poj-104-splitted.tar.gz -P $DATA_DIR/
fi
echo "Unzip splitted dataset"
tar -C $DATA_DIR/ -xvf "$DATA_DIR/poj-104-splitted.tar.gz"
else
if [ ! -f "$DATA_DIR/poj-104-original.tar.gz" ]
then
echo "Downloading dataset ${DATASET_NAME}"
wget https://s3-eu-west-1.amazonaws.com/datasets.ml.labs.aws.intellij.net/poj-104/poj-104-original.tar.gz -P $DATA_DIR/
fi
echo "Unzip dataset"
# In the developer mode we leave only several classes
if $DEV
then
echo "Dev mode"
tar -C $DATA_DIR/ -xvf "$DATA_DIR/poj-104-original.tar.gz" "ProgramData/[1-3]"
else
tar -C $DATA_DIR/ -xvf "$DATA_DIR/poj-104-original.tar.gz"
fi
fi
mv "$DATA_DIR"/ProgramData "$DATA_PATH"
fi
# To prepare our dataset for astminer we need to rename all .txt files to .c files
echo "Renaming files"
find "$DATA_PATH"/*/* -name "*.txt" -type f -exec sh -c 'mv "$0" "${0%.txt}.c"' {} \;
if [ ! -d "$DATA_PATH"/train ] || [ ! -d "$DATA_PATH"/test ] || [ ! -d "$DATA_PATH"/val ]
then
# Splitting dataset on train/test/val parts
echo "Splitting on train/test/val"
sh "$SPLIT_SCRIPT" "$DATA_PATH" "$DATA_PATH"_split "$TRAIN_SPLIT_PART" "$TEST_SPLIT_PART" "$VAL_SPLIT_PART"
rm -rf "$DATA_PATH"
mv "$DATA_PATH"_split "$DATA_PATH"
fi
echo "Extracting paths using astminer. You need to specify the path to .jar in \"ASTMINER_PATH\" variable first"
if [ -d "$DATA_PATH"_parsed ]
then
rm -rf "$DATA_PATH"_parsed
fi
mkdir "$DATA_PATH"_parsed
java -jar -Xmx200g $ASTMINER_PATH code2vec --lang c --project "$DATA_PATH"/train --output "$DATA_PATH"_parsed/train --maxL 8 --maxW 2 --granularity file --folder-label --split-tokens
java -jar -Xmx200g $ASTMINER_PATH code2vec --lang c --project "$DATA_PATH"/test --output "$DATA_PATH"_parsed/test --maxL 8 --maxW 2 --granularity file --folder-label --split-tokens
java -jar -Xmx200g $ASTMINER_PATH code2vec --lang c --project "$DATA_PATH"/val --output "$DATA_PATH"_parsed/val --maxL 8 --maxW 2 --granularity file --folder-label --split-tokens
for folder in $(find "$DATA_PATH"_parsed/*/c -type d)
do
for file in "$folder"/*
do
type="$(basename -s .csv "$(dirname "$folder")")"
mv "$file" "$DATA_PATH"_parsed/"$(basename "${file%.csv}.$type.csv")"
done
rm -rf "$(dirname "$folder")"
done
mv "$DATA_PATH" "$DATA_PATH"_pure_files
mv "$DATA_PATH"_parsed "$DATA_PATH"
|
// vim: set et ts=2 sw=2 :
function CustomGetRelTileUrl (prefix, a, b) {
if ( b == 7 && a.x >= 76 && a.x <= 78 && a.y >= 39 && a.y <= 40 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 8 && a.x >= 153 && a.x <= 156 && a.y >= 78 && a.y <= 81 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 9 && a.x >= 307 && a.x <= 312 && a.y >= 157 && a.y <= 163 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 10 && a.x >= 614 && a.x <= 625 && a.y >= 314 && a.y <= 326 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 11 && a.x >= 1228 && a.x <= 1251 && a.y >= 629 && a.y <= 652 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 12 && a.x >= 2457 && a.x <= 2503 && a.y >= 1258 && a.y <= 1305 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 13 && a.x >= 4915 && a.x <= 5006 && a.y >= 2516 && a.y <= 2610 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
else if ( b == 14 && a.x >= 9830 && a.x <= 10013 && a.y >= 5033 && a.y <= 5221 )
{
return prefix + "Z" + b + "/" + a.y + "_" + a.x + ".png";
}
}
var G_SLAZAV_MAP = null;
var G_ARBALET_SLAZAV_MAP = null;
function GetSlazavMap () {
if (G_SLAZAV_MAP == null) {
var prefixSlazav = 'http://aparshin.ru/maps/slazav/';
var slazavLayer = new GTileLayer(new GCopyrightCollection('Slazav'), 0, 14, { isPng:true, opacity:1 });
slazavLayer.getTileUrl = function (a, b) { return CustomGetRelTileUrl(prefixSlazav, a, b); };
G_SLAZAV_MAP = new GMapType([G_NORMAL_MAP.getTileLayers()[0], slazavLayer], new GMercatorProjection(22), 'Slazav', { textColor:'black', maxResolution:14 });
}
return G_SLAZAV_MAP;
}
function GetArbaletSlazavMap () {
if (G_ARBALET_SLAZAV_MAP == null) {
G_ARBALET_SLAZAV_MAP = new GMapType([GetArbaletMap().getTileLayers()[0], GetSlazavMap().getTileLayers()[1]], new GMercatorProjection(22), 'Arbalet, Slazav', { textColor:'black' });
}
return G_ARBALET_SLAZAV_MAP;
}
|
#!/bin/sh
cd /home/pi/GitHub/cryptopulse
nodemon keystone |
import React, { Component/*, PropTypes */} from 'react'
import { connect } from 'react-redux'
import Helmet from 'react-helmet'
import loadMainPage from './main'
import jQuery from 'jquery'
import Header from './header'
import Home from './home'
import About from './about'
import Events from './events'
import Photos from './photos'
import Donate from './donate'
import Footer from './footer'
class Landing extends Component {
componentDidMount () {
loadMainPage(jQuery)
}
render () {
return (
<div>
<Helmet
title='SocialWorks'
/>
<Header user = {this.props.user} authed={this.props.loggedIn} />
<Home />
<About />
<Events />
<Photos />
<Donate />
<Footer />
</div>
)
}
}
const mapStateToProps = (state) => {
return {
user: state.user.getIn(['user','uid']),
loggedIn: state.user.getIn(['loggedIn'])
}
}
export { Landing }
export default connect(mapStateToProps, null)(Landing)
|
#!/bin/bash
source /opt/ros/indigo//setup.bash
export ROS_PACKAGE_PATH=$PWD/../:$ROS_PACKAGE_PATH
#rosrun xacro xacro.py ../atlas_description/robots/atlas_v3.urdf.xacro > atlas_v3.urdf
#gzsdf print atlas_v3.urdf > atlas_v3.sdf
#rosparam set /robot_description -t atlas_v3.urdf
#DO NOT use, as robot state publisher doesn't know it's updated
#rosrun xacro xacro.py ../atlas_description/robots/atlas_v3_irobot_hands.urdf.xacro > atlas_v3_irobot_hands.urdf
#gzsdf print atlas_v3_irobot_hands.urdf|sed 's/\[\([[:digit:]]\)\]/_\1/g' > atlas_v3_irobot_hands.sdf
#rosparam set /robot_description_irobot_hands -t atlas_v3_irobot_hands.urdf
#rosrun xacro xacro.py atlas_v3_hook.urdf.xacro > sdf/atlas_v3_hook.urdf
#gzsdf print sdf/atlas_v3_hook.urdf > sdf/atlas_v3_hook.sdf
for INPUT_XACRO in robots/*urdf.xacro
do
INPUT=${INPUT_XACRO##robots/}
INPUT_URDF=urdf/${INPUT%.xacro}
INPUT_SDF=sdf/${INPUT%.urdf.xacro}.sdf
echo $INPUT_XACRO to $INPUT_URDF to $INPUT_SDF
rosrun xacro xacro.py $INPUT_XACRO |sed 's/\[\([[:digit:]]\)\]\//_\1_/g' > $INPUT_URDF
gzsdf print $INPUT_URDF > $INPUT_SDF
ROBOT=${INPUT%.urdf.xacro}
MODELDIR=$HOME/.gazebo/models/$ROBOT
mkdir -p $MODELDIR
echo '<?xml version="1.0"?>' > $MODELDIR/model.config
echo '<model>' >> $MODELDIR/model.config
echo "<name>${ROBOT}</name>" >> $MODELDIR/model.config
echo "<sdf version='1.4'>model.sdf</sdf>" >> $MODELDIR/model.config
echo "</model>" >> $MODELDIR/model.config
cp $INPUT_SDF $MODELDIR/model.sdf
done
#echo cp sdf/*sdf ../../src/us/ihmc/darpaRoboticsChallenge/models/GFE/
echo cp sdf/*sdf ../../../Atlas/resources/models/GFE/
Atlas/resources/models/GFE/drc_no_hands.sdf
#roslauch ./show_in_rviz.launch &
#rosparam set /robot_description -t sdf/atlas_v3_hook.urdf
# <visual name='l_box'>
# <pose>0.15 +0.25 0.05 -1.57 0 -2.36 </pose>
# <geometry>
# <mesh>
# <scale>1 1 1</scale>
# <uri>file://ihmc/calibration_cube.dae</uri>
# </mesh>
# </geometry>
# </visual>
#
|
def Fibonacci(n):
if n<0:
print("Incorrect input")
elif n==1:
return 0
elif n==2:
return 1
else:
return Fibonacci(n-1)+Fibonacci(n-2) |
const setup = require('../index.js')
const QUnit = require('qunit')
const timeout = function (ms) {
return new Promise(resolve => setTimeout(resolve, ms))
}
const retry = setup(QUnit.test)
QUnit.module('test retries and result message', hooks => {
hooks.after(assert => {
assert.equal(QUnit.config.current.assertions[0].message, '(Retried 5 times)', 'message shows retries')
})
retry('test retry five times', function (assert, currentRun) {
assert.equal(currentRun, 5)
}, 5)
})
QUnit.module('test retries, should stop at 3 retries', hooks => {
hooks.after(assert => {
assert.equal(QUnit.config.current.assertions[0].message, '(Retried 3 times)', 'message shows retries')
})
retry('test retry five times', function (assert, currentRun) {
assert.equal(currentRun, 3)
}, 5)
})
retry('test default: retry runs twice - initial attempt plus one retry', function (assert, currentRun) {
assert.expect(1)
assert.equal(currentRun, 2)
})
retry('test retry five times', function (assert, currentRun) {
assert.expect(1)
assert.equal(currentRun, 5)
}, 5)
retry('test retry async', async function (assert, currentRun) {
assert.expect(1)
await timeout(100)
assert.equal(currentRun, 4)
}, 4)
retry('promise reject', async function (assert, currentRun) {
if (currentRun === 2) {
await Promise.reject(new Error('should be handled'))
}
assert.equal(currentRun, 5)
}, 5)
QUnit.module('hook context', function (hooks) {
hooks.beforeEach(function () {
this.sharedValue = 'myContext'
})
QUnit.test('qunit test', function (assert) {
assert.equal(this.sharedValue, 'myContext')
})
retry('retry matches qunit test behaviour', function (assert, currentRun) {
assert.equal(this.sharedValue, 'myContext')
assert.equal(currentRun, 2)
})
})
QUnit.module('currentRun count', function () {
// tests are order dependent
// count retries in retryTest
// assert correct count in another test
let execCount = 0
retry('count retries', function (assert, currentRun) {
execCount = execCount + 1
assert.equal(currentRun, 5)
}, 5)
QUnit.test('execCount for retryTest', function (assert) {
assert.equal(execCount, 5)
})
})
QUnit.module('hooks count', function () {
// tests are order dependent
// count retries in retryTest
// assert correct count in another test
let execCount = 0
let beforeCount = 0
let afterCount = 0
QUnit.module('count hooks async', function (hooks) {
hooks.beforeEach(async function () {
await timeout(100)
beforeCount++
})
hooks.afterEach(async function () {
await timeout(100)
afterCount++
})
retry('count retries', function (assert, currentRun) {
execCount++
assert.equal(beforeCount, currentRun, 'beforeCount should match currentRun')
assert.equal(afterCount, currentRun - 1, 'afterCount one less than currentRun')
assert.equal(currentRun, 5)
}, 5)
})
QUnit.test('test hooks count', function (assert) {
assert.equal(execCount, 5)
assert.equal(beforeCount, 5)
assert.equal(afterCount, 5)
})
})
|
#!/bin/bash
#SBATCH --job-name=parse_global_rdf_adf
#SBATCH --mail-type=ALL
#SBATCH --mail-use=salil.bavdekar@ufl.edu
#SBATCH --output=parse_global_rdf_adf/job_%j.out
#SBATCH --error=parse_global_rdf_adf/job_%j.err
#SBATCH --account=subhash
#SBATCH --qos=subhash
#SBATCH --ntasks=1
#SBATCH --nodes=1
#SBATCH --mem=4GB
#SBATCH --time=24:00:00
pwd;
echo 'Job Started: parse_global_rdf_adf'
date;
echo 'Working directory:' $1
echo 'Elements:' $2 $3
export PATH=/home/salil.bavdekar/.conda/envs/ai_gasp/bin:$PATH
python parse_global_rdf_adf.py $1 $2 $3
echo 'Done.'
date;
|
#!/bin/bash
# We need PWD to be set by the shel before invoking tusk else launchdir is
# empty
exec /usr/local/bin/tusk -qf /bbench/tusk.yml "$@"
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
if test ! -f "${KUBECONFIG}"
then
echo "No kubeconfig, so no point in calling must-gather."
exit 0
fi
# For disconnected or otherwise unreachable environments, we want to
# have steps use an HTTP(S) proxy to reach the API server. This proxy
# configuration file should export HTTP_PROXY, HTTPS_PROXY, and NO_PROXY
# environment variables, as well as their lowercase equivalents (note
# that libcurl doesn't recognize the uppercase variables).
if test -f "${SHARED_DIR}/proxy-conf.sh"
then
# shellcheck disable=SC1090
source "${SHARED_DIR}/proxy-conf.sh"
fi
# Allow a job to override the must-gather image, this is needed for
# disconnected environments prior to 4.8.
if test -f "${SHARED_DIR}/must-gather-image.sh"
then
# shellcheck disable=SC1090
source "${SHARED_DIR}/must-gather-image.sh"
else
MUST_GATHER_IMAGE=${MUST_GATHER_IMAGE:-""}
fi
echo "Running must-gather..."
mkdir -p ${ARTIFACT_DIR}/must-gather
oc --insecure-skip-tls-verify adm must-gather $MUST_GATHER_IMAGE --dest-dir ${ARTIFACT_DIR}/must-gather > ${ARTIFACT_DIR}/must-gather/must-gather.log
[ -f "${ARTIFACT_DIR}/must-gather/event-filter.html" ] && cp "${ARTIFACT_DIR}/must-gather/event-filter.html" "${ARTIFACT_DIR}/event-filter.html"
tar -czC "${ARTIFACT_DIR}/must-gather" -f "${ARTIFACT_DIR}/must-gather.tar.gz" .
rm -rf "${ARTIFACT_DIR}"/must-gather
function queue() {
local TARGET="${1}"
shift
local LIVE
LIVE="$(jobs | wc -l)"
while [[ "${LIVE}" -ge 45 ]]; do
sleep 1
LIVE="$(jobs | wc -l)"
done
echo "${@}"
if [[ -n "${FILTER:-}" ]]; then
"${@}" | "${FILTER}" >"${TARGET}" &
else
"${@}" >"${TARGET}" &
fi
}
export PATH=$PATH:/tmp/shared
echo "Gathering artifacts ..."
mkdir -p ${ARTIFACT_DIR}/pods ${ARTIFACT_DIR}/nodes ${ARTIFACT_DIR}/metrics ${ARTIFACT_DIR}/bootstrap ${ARTIFACT_DIR}/network ${ARTIFACT_DIR}/oc_cmds
oc --insecure-skip-tls-verify --request-timeout=5s get nodes -o jsonpath --template '{range .items[*]}{.metadata.name}{"\n"}{end}' > /tmp/nodes
oc --insecure-skip-tls-verify --request-timeout=5s get pods --all-namespaces --template '{{ range .items }}{{ $name := .metadata.name }}{{ $ns := .metadata.namespace }}{{ range .spec.containers }}-n {{ $ns }} {{ $name }} -c {{ .name }}{{ "\n" }}{{ end }}{{ range .spec.initContainers }}-n {{ $ns }} {{ $name }} -c {{ .name }}{{ "\n" }}{{ end }}{{ end }}' > /tmp/containers
oc --insecure-skip-tls-verify --request-timeout=5s get pods -l openshift.io/component=api --all-namespaces --template '{{ range .items }}-n {{ .metadata.namespace }} {{ .metadata.name }}{{ "\n" }}{{ end }}' > /tmp/pods-api
queue ${ARTIFACT_DIR}/config-resources.json oc --insecure-skip-tls-verify --request-timeout=5s get apiserver.config.openshift.io authentication.config.openshift.io build.config.openshift.io console.config.openshift.io dns.config.openshift.io featuregate.config.openshift.io image.config.openshift.io infrastructure.config.openshift.io ingress.config.openshift.io network.config.openshift.io oauth.config.openshift.io project.config.openshift.io scheduler.config.openshift.io -o json
queue ${ARTIFACT_DIR}/apiservices.json oc --insecure-skip-tls-verify --request-timeout=5s get apiservices -o json
queue ${ARTIFACT_DIR}/oc_cmds/apiservices oc --insecure-skip-tls-verify --request-timeout=5s get apiservices
queue ${ARTIFACT_DIR}/clusteroperators.json oc --insecure-skip-tls-verify --request-timeout=5s get clusteroperators -o json
queue ${ARTIFACT_DIR}/oc_cmds/clusteroperators oc --insecure-skip-tls-verify --request-timeout=5s get clusteroperators
queue ${ARTIFACT_DIR}/clusterversion.json oc --insecure-skip-tls-verify --request-timeout=5s get clusterversion -o json
queue ${ARTIFACT_DIR}/oc_cmds/clusterversion oc --insecure-skip-tls-verify --request-timeout=5s get clusterversion
queue ${ARTIFACT_DIR}/configmaps.json oc --insecure-skip-tls-verify --request-timeout=5s get configmaps --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/configmaps oc --insecure-skip-tls-verify --request-timeout=5s get configmaps --all-namespaces
queue ${ARTIFACT_DIR}/credentialsrequests.json oc --insecure-skip-tls-verify --request-timeout=5s get credentialsrequests --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/credentialsrequests oc --insecure-skip-tls-verify --request-timeout=5s get credentialsrequests --all-namespaces
queue ${ARTIFACT_DIR}/csr.json oc --insecure-skip-tls-verify --request-timeout=5s get csr -o json
queue ${ARTIFACT_DIR}/endpoints.json oc --insecure-skip-tls-verify --request-timeout=5s get endpoints --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/endpoints oc --insecure-skip-tls-verify --request-timeout=5s get endpoints --all-namespaces
FILTER=gzip queue ${ARTIFACT_DIR}/deployments.json.gz oc --insecure-skip-tls-verify --request-timeout=5s get deployments --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/deployments oc --insecure-skip-tls-verify --request-timeout=5s get deployments --all-namespaces -o wide
FILTER=gzip queue ${ARTIFACT_DIR}/daemonsets.json.gz oc --insecure-skip-tls-verify --request-timeout=5s get daemonsets --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/daemonsets oc --insecure-skip-tls-verify --request-timeout=5s get daemonsets --all-namespaces -o wide
queue ${ARTIFACT_DIR}/events.json oc --insecure-skip-tls-verify --request-timeout=5s get events --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/events oc --insecure-skip-tls-verify --request-timeout=5s get events --all-namespaces
queue ${ARTIFACT_DIR}/kubeapiserver.json oc --insecure-skip-tls-verify --request-timeout=5s get kubeapiserver -o json
queue ${ARTIFACT_DIR}/oc_cmds/kubeapiserver oc --insecure-skip-tls-verify --request-timeout=5s get kubeapiserver
queue ${ARTIFACT_DIR}/kubecontrollermanager.json oc --insecure-skip-tls-verify --request-timeout=5s get kubecontrollermanager -o json
queue ${ARTIFACT_DIR}/oc_cmds/kubecontrollermanager oc --insecure-skip-tls-verify --request-timeout=5s get kubecontrollermanager
queue ${ARTIFACT_DIR}/machineconfigpools.json oc --insecure-skip-tls-verify --request-timeout=5s get machineconfigpools -o json
queue ${ARTIFACT_DIR}/oc_cmds/machineconfigpools oc --insecure-skip-tls-verify --request-timeout=5s get machineconfigpools
queue ${ARTIFACT_DIR}/machineconfigs.json oc --insecure-skip-tls-verify --request-timeout=5s get machineconfigs -o json
queue ${ARTIFACT_DIR}/oc_cmds/machineconfigs oc --insecure-skip-tls-verify --request-timeout=5s get machineconfigs
queue ${ARTIFACT_DIR}/machinesets.json oc --insecure-skip-tls-verify --request-timeout=5s get machinesets -A -o json
queue ${ARTIFACT_DIR}/oc_cmds/machinesets oc --insecure-skip-tls-verify --request-timeout=5s get machinesets -A
queue ${ARTIFACT_DIR}/machines.json oc --insecure-skip-tls-verify --request-timeout=5s get machines -A -o json
queue ${ARTIFACT_DIR}/oc_cmds/machines oc --insecure-skip-tls-verify --request-timeout=5s get machines -A -o wide
queue ${ARTIFACT_DIR}/namespaces.json oc --insecure-skip-tls-verify --request-timeout=5s get namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/namespaces oc --insecure-skip-tls-verify --request-timeout=5s get namespaces
queue ${ARTIFACT_DIR}/nodes.json oc --insecure-skip-tls-verify --request-timeout=5s get nodes -o json
queue ${ARTIFACT_DIR}/oc_cmds/nodes oc --insecure-skip-tls-verify --request-timeout=5s get nodes -o wide
queue ${ARTIFACT_DIR}/openshiftapiserver.json oc --insecure-skip-tls-verify --request-timeout=5s get openshiftapiserver -o json
queue ${ARTIFACT_DIR}/oc_cmds/openshiftapiserver oc --insecure-skip-tls-verify --request-timeout=5s get openshiftapiserver
queue ${ARTIFACT_DIR}/pods.json oc --insecure-skip-tls-verify --request-timeout=5s get pods --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/pods oc --insecure-skip-tls-verify --request-timeout=5s get pods --all-namespaces -o wide
queue ${ARTIFACT_DIR}/persistentvolumes.json oc --insecure-skip-tls-verify --request-timeout=5s get persistentvolumes --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/persistentvolumes oc --insecure-skip-tls-verify --request-timeout=5s get persistentvolumes --all-namespaces -o wide
queue ${ARTIFACT_DIR}/persistentvolumeclaims.json oc --insecure-skip-tls-verify --request-timeout=5s get persistentvolumeclaims --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/persistentvolumeclaims oc --insecure-skip-tls-verify --request-timeout=5s get persistentvolumeclaims --all-namespaces -o wide
FILTER=gzip queue ${ARTIFACT_DIR}/replicasets.json.gz oc --insecure-skip-tls-verify --request-timeout=5s get replicasets --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/replicasets oc --insecure-skip-tls-verify --request-timeout=5s get replicasets --all-namespaces -o wide
queue ${ARTIFACT_DIR}/rolebindings.json oc --insecure-skip-tls-verify --request-timeout=5s get rolebindings --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/rolebindings oc --insecure-skip-tls-verify --request-timeout=5s get rolebindings --all-namespaces
queue ${ARTIFACT_DIR}/roles.json oc --insecure-skip-tls-verify --request-timeout=5s get roles --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/roles oc --insecure-skip-tls-verify --request-timeout=5s get roles --all-namespaces
queue ${ARTIFACT_DIR}/services.json oc --insecure-skip-tls-verify --request-timeout=5s get services --all-namespaces -o json
queue ${ARTIFACT_DIR}/oc_cmds/services oc --insecure-skip-tls-verify --request-timeout=5s get services --all-namespaces
FILTER=gzip queue ${ARTIFACT_DIR}/statefulsets.json.gz oc --insecure-skip-tls-verify --request-timeout=5s get statefulsets --all-namespaces -o json
queue ${ARTIFACT_DIR}/statefulsets oc --insecure-skip-tls-verify --request-timeout=5s get statefulsets --all-namespaces
FILTER=gzip queue ${ARTIFACT_DIR}/openapi.json.gz oc --insecure-skip-tls-verify --request-timeout=5s get --raw /openapi/v2
# gather nodes first in parallel since they may contain the most relevant debugging info
while IFS= read -r i; do
mkdir -p ${ARTIFACT_DIR}/nodes/$i
queue ${ARTIFACT_DIR}/nodes/$i/heap oc --insecure-skip-tls-verify get --request-timeout=20s --raw /api/v1/nodes/$i/proxy/debug/pprof/heap
FILTER=gzip queue ${ARTIFACT_DIR}/nodes/$i/journal.gz oc --insecure-skip-tls-verify adm node-logs $i --unify=false
FILTER=gzip queue ${ARTIFACT_DIR}/nodes/$i/journal-previous.gz oc --insecure-skip-tls-verify adm node-logs $i --unify=false --boot=-1
FILTER=gzip queue ${ARTIFACT_DIR}/nodes/$i/audit.gz oc --insecure-skip-tls-verify adm node-logs $i --unify=false --path=audit/audit.log
done < /tmp/nodes
# Snapshot iptables-save on each node for debugging possible kube-proxy issues
oc --insecure-skip-tls-verify get --request-timeout=20s -n openshift-sdn -l app=sdn pods --template '{{ range .items }}{{ .metadata.name }}{{ "\n" }}{{ end }}' > /tmp/sdn-pods
while IFS= read -r i; do
queue ${ARTIFACT_DIR}/network/iptables-save-$i oc --insecure-skip-tls-verify rsh --timeout=20 -n openshift-sdn -c sdn $i iptables-save -c
done < /tmp/sdn-pods
while IFS= read -r i; do
file="$( echo "$i" | cut -d ' ' -f 3 | tr -s ' ' '_' )"
queue ${ARTIFACT_DIR}/metrics/${file}-heap oc --insecure-skip-tls-verify exec $i -- /bin/bash -c 'oc --insecure-skip-tls-verify get --raw /debug/pprof/heap --server "https://$( hostname ):8443" --config /etc/origin/master/admin.kubeconfig'
queue ${ARTIFACT_DIR}/metrics/${file}-controllers-heap oc --insecure-skip-tls-verify exec $i -- /bin/bash -c 'oc --insecure-skip-tls-verify get --raw /debug/pprof/heap --server "https://$( hostname ):8444" --config /etc/origin/master/admin.kubeconfig'
done < /tmp/pods-api
while IFS= read -r i; do
file="$( echo "$i" | cut -d ' ' -f 2,3,5 | tr -s ' ' '_' )"
FILTER=gzip queue ${ARTIFACT_DIR}/pods/${file}.log.gz oc --insecure-skip-tls-verify logs --request-timeout=20s $i
FILTER=gzip queue ${ARTIFACT_DIR}/pods/${file}_previous.log.gz oc --insecure-skip-tls-verify logs --request-timeout=20s -p $i
done < /tmp/containers
echo "Snapshotting prometheus (may take 15s) ..."
# Snapshot the prometheus data from the replica that has the oldest
# PVC. If persistent storage isn't enabled, it uses the last
# prometheus instances by default to catch issues that occur when the
# first prometheus pod upgrades.
prometheus="$( oc --insecure-skip-tls-verify get pods -n openshift-monitoring -l app.kubernetes.io/name=prometheus | tail -1 )"
if [[ -n "$( oc --insecure-skip-tls-verify get pvc -n openshift-monitoring -l app.kubernetes.io/name=prometheus --ignore-not-found=true )" ]]; then
pvc="$( oc --insecure-skip-tls-verify get pvc -n openshift-monitoring -l app.kubernetes.io/name=prometheus --sort-by=.metadata.creationTimestamp -o jsonpath='{.items[0].metadata.name}' )"
prometheus="${pvc##prometheus-data-}"
fi
queue ${ARTIFACT_DIR}/metrics/prometheus.tar.gz oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- tar cvzf - -C /prometheus .
cat >> ${SHARED_DIR}/custom-links.txt << EOF
<script>
let a = document.createElement('a');
a.href="https://promecieus.dptools.openshift.org/?search="+document.referrer;
a.innerHTML="PromeCIeus";
document.getElementById("wrapper").append(a);
</script>
EOF
FILTER=gzip queue ${ARTIFACT_DIR}/metrics/prometheus-target-metadata.json.gz oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- /bin/bash -c "curl -G http://localhost:9090/api/v1/targets/metadata --data-urlencode 'match_target={instance!=\"\"}'"
FILTER=gzip queue ${ARTIFACT_DIR}/metrics/prometheus-config.json.gz oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- /bin/bash -c "curl -G http://localhost:9090/api/v1/status/config"
queue ${ARTIFACT_DIR}/metrics/prometheus-tsdb-status.json oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- /bin/bash -c "curl -G http://localhost:9090/api/v1/status/tsdb"
queue ${ARTIFACT_DIR}/metrics/prometheus-runtimeinfo.json oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- /bin/bash -c "curl -G http://localhost:9090/api/v1/status/runtimeinfo"
queue ${ARTIFACT_DIR}/metrics/prometheus-targets.json oc --insecure-skip-tls-verify exec -n openshift-monitoring "${prometheus}" -- /bin/bash -c "curl -G http://localhost:9090/api/v1/targets"
# Calculate metrics suitable for apples-to-apples comparison across CI runs.
# Load whatever timestamps we can, generate the metrics script, and then send it to the
# thanos-querier pod on the cluster via exec (so we don't need to have a route exposed).
echo "Saving job metrics"
cat >/tmp/generate.sh <<'GENERATE'
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
# CI job metrics extraction
#
# This script gathers a number of important query metrics from the metrics
# stack in a cluster after tests are complete. It gathers metrics related to
# three phases - install, test, and overall (install start to test end).
#
# Prometheus may not have data from early in an install, and some runs may
# result in outage to prometheus, so queries have to look at measurements
# that may have gaps or be incomplete.
#
# A metric belongs in this set if it is useful in tracking a trend over time
# in the behavior of the cluster at install time or over the test run - for
# instance, by looking at the total CPU usage of the control plane, we can
# perform apples to apples comparisons between two cloud platforms and look
# for places where we are inadequate. The metrics are output to the artifacts
# dir and then are processed by the ci-search indexer cloud functions to be
# visualized by ci-search.
#
# The output of the script is a file with one JSON object per line consisting
# of:
#
# {"<name_of_metric>":<prometheus query result object>}
#
# The prometheus query result object is described here:
# https://prometheus.io/docs/prometheus/latest/querying/api/
#
# Metrics are expected to return a scalar, a vector with a single entry and
# no labels, or a vector with a single label and a single entry.
#
# This script outputs a script that is intended to be invoked against a local
# prometheus instance. In the CI environment we run this script inside the
# pod that contains the Thanos querier, but it can be used locally for testing
# against a prometheus instance running at localhost:9090.
#########
# Take as arguments a set of env vars for the phases (install, test, all) that
# contain the unix timestamp of the start and end of the two main phases, then
# calculate what we can. If a phase is missing, that may mean the test script
# could not run to completion, in which case we will not define the variable
# and some metrics will not be calculated or output. Omitting a query if it
# can't be calculate is important, because the zero value may be meaningful.
#
# - t_* is the unix timestamp at the end
# - s_* is the number of seconds the phase took
# - d_* is a prometheus duration of the phase as "<seconds>s"
t_now=$(date +%s)
if [[ -n "${TEST_TIME_INSTALL_END-}" ]]; then
t_install=${TEST_TIME_INSTALL_END}
if [[ -n "${TEST_TIME_INSTALL_START-}" ]]; then
s_install="$(( TEST_TIME_INSTALL_END - TEST_TIME_INSTALL_START ))"
d_install="${s_install}s"
fi
fi
if [[ -n "${TEST_TIME_TEST_END-}" ]]; then
t_test=${TEST_TIME_TEST_END}
if [[ -n "${TEST_TIME_TEST_START-}" ]]; then
s_test="$(( TEST_TIME_TEST_END - TEST_TIME_TEST_START ))"
d_test="${s_test}s"
fi
fi
if [[ -n "${TEST_TIME_TEST_START-}" || "${TEST_TIME_INSTALL_START-}" ]]; then
t_start=${TEST_TIME_INSTALL_START:-${TEST_TIME_TEST_START}}
fi
t_all=${t_test:-${t_install:-${t_now}}}
if [[ -n "${t_start-}" ]]; then
s_all="$(( t_all - t_start ))"
d_all="${s_all}s"
fi
# We process this query file one line at a time - if a variable is undefined we'll skip the
# entire query.
cat > /tmp/queries <<'END'
${t_install} cluster:capacity:cpu:total:cores sum(cluster:capacity_cpu_cores:sum)
${t_install} cluster:capacity:cpu:control_plane:cores max(cluster:capacity_cpu_cores:sum{label_node_role_kubernetes_io="master"})
${t_all} cluster:usage:cpu:total:seconds sum(increase(container_cpu_usage_seconds_total{id="/"}[${d_all}]))
${t_install} cluster:usage:cpu:install:seconds sum(increase(container_cpu_usage_seconds_total{id="/"}[${d_install}]))
${t_test} cluster:usage:cpu:test:seconds sum(increase(container_cpu_usage_seconds_total{id="/"}[${d_test}]))
${t_all} cluster:usage:cpu:total:rate sum(rate(container_cpu_usage_seconds_total{id="/"}[${d_all}]))
${t_install} cluster:usage:cpu:install:rate sum(rate(container_cpu_usage_seconds_total{id="/"}[${d_install}]))
${t_test} cluster:usage:cpu:test:rate sum(rate(container_cpu_usage_seconds_total{id="/"}[${d_test}]))
${t_all} cluster:usage:cpu:control_plane:total:avg avg(rate(container_cpu_usage_seconds_total{id="/"}[${d_all}]) * on(node) group_left() group by (node) (kube_node_role{role="master"}))
${t_install} cluster:usage:cpu:control_plane:install:avg avg(rate(container_cpu_usage_seconds_total{id="/"}[${d_install}]) * on(node) group_left() group by (node) (kube_node_role{role="master"}))
${t_test} cluster:usage:cpu:control_plane:test:avg avg(rate(container_cpu_usage_seconds_total{id="/"}[${d_test}]) * on(node) group_left() group by (node) (kube_node_role{role="master"}))
${t_all} cluster:usage:mem:rss:control_plane:quantile label_replace(max(quantile_over_time(0.99, ((container_memory_rss{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.99", "", "")
${t_all} cluster:usage:mem:rss:control_plane:quantile label_replace(max(quantile_over_time(0.9, ((container_memory_rss{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.9", "", "")
${t_all} cluster:usage:mem:rss:control_plane:quantile label_replace(max(quantile_over_time(0.5, ((container_memory_rss{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.5", "", "")
${t_all} cluster:usage:mem:working_set:control_plane:quantile label_replace(max(quantile_over_time(0.99, ((container_memory_working_set_bytes{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.99", "", "")
${t_all} cluster:usage:mem:working_set:control_plane:quantile label_replace(max(quantile_over_time(0.9, ((container_memory_working_set_bytes{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.9", "", "")
${t_all} cluster:usage:mem:working_set:control_plane:quantile label_replace(max(quantile_over_time(0.5, ((container_memory_working_set_bytes{id="/"} * on(node) group_left() group by (node) (kube_node_role{role="master"})))[${d_all}:1s] )), "quantile", "0.5", "", "")
${t_all} cluster:alerts:total:firing:distinct:severity count by (severity) (count by (alertname,severity) (count_over_time(ALERTS{alertstate="firing",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}[${d_all}])))
${t_test} cluster:alerts:total:firing:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="firing",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_test}:1s]))
${t_install} cluster:alerts:install:firing:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="firing",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_install}:1s]))
${t_test} cluster:alerts:test:firing:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="firing",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_test}:1s]))
${t_test} cluster:alerts:total:pending:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="pending",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_test}:1s]))
${t_install} cluster:alerts:install:pending:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="pending",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_install}:1s]))
${t_test} cluster:alerts:test:pending:seconds:severity count_over_time((sum by (severity) (count by (alertname,severity) (ALERTS{alertstate="pending",alertname!~"AlertmanagerReceiversNotConfigured|Watchdog"}))[${d_test}:1s]))
${t_all} cluster:api:total:requests sum(increase(apiserver_request_total[${d_all}]))
${t_install} cluster:api:install:requests sum(increase(apiserver_request_total[${d_install}]))
${t_test} cluster:api:requests:test sum(increase(apiserver_request_total[${d_test}]))
${t_all} cluster:api:read:total:requests sum(increase(apiserver_request_total{verb=~"GET|LIST|WATCH"}[${d_all}]))
${t_install} cluster:api:read:install:requests sum(increase(apiserver_request_total{verb=~"GET|LIST|WATCH"}[${d_install}]))
${t_test} cluster:api:read:test:requests sum(increase(apiserver_request_total{verb=~"GET|LIST|WATCH"}[${d_test}]))
${t_all} cluster:api:write:total:requests sum(increase(apiserver_request_total{verb!~"GET|LIST|WATCH"}[${d_all}]))
${t_install} cluster:api:write:install:requests sum(increase(apiserver_request_total{verb!~"GET|LIST|WATCH"}[${d_install}]))
${t_test} cluster:api:write:test:requests sum(increase(apiserver_request_total{verb!~"GET|LIST|WATCH"}[${d_test}]))
${t_all} cluster:api:errors:total:requests sum(increase(apiserver_request_total{code=~"5\\\\d\\\\d|0"}[${d_all}]))
${t_install} cluster:api:errors:install:requests sum(increase(apiserver_request_total{code=~"5\\\\d\\\\d|0"}[${d_install}]))
${t_install} cluster:resource:install:count sort_desc(max by(resource) (etcd_object_counts)) > 1
${t_test} cluster:resource:test:delta sort_desc(max by(resource) (delta(etcd_object_counts[${d_test}]))) != 0
${t_all} cluster:node:total:boots sum(increase(node_boots_total[${d_all}]))
${t_test} cluster:node:test:boots sum(increase(node_boots_total[${d_test}]))
${t_all} cluster:pod:openshift:unready:total:fraction 1-max(avg_over_time(cluster:usage:openshift:kube_running_pod_ready:avg[${d_all}]))
${t_install} cluster:pod:openshift:unready:install:fraction 1-max(avg_over_time(cluster:usage:openshift:kube_running_pod_ready:avg[${d_install}]))
${t_test} cluster:pod:openshift:unready:test:fraction 1-max(avg_over_time(cluster:usage:openshift:kube_running_pod_ready:avg[${d_test}]))
${t_all} cluster:pod:openshift:started:total:count sum(changes(kube_pod_start_time{namespace=~"openshift-.*"}[${d_all}]) + 1)
${t_install} cluster:pod:openshift:started:install:count sum(changes(kube_pod_start_time{namespace=~"openshift-.*"}[${d_install}]) + 1)
${t_test} cluster:pod:openshift:started:test:count sum(changes(kube_pod_start_time{namespace=~"openshift-.*"}[${d_test}]))
${t_all} cluster:container:total:started count(count_over_time((count without(container,endpoint,name,namespace,pod,service,job,metrics_path,instance,image) (container_start_time_seconds{container!="",container!="POD",pod!=""}))[${d_all}:30s]))
${t_install} cluster:container:install:started count(count_over_time((count without(container,endpoint,name,namespace,pod,service,job,metrics_path,instance,image) (container_start_time_seconds{container!="",container!="POD",pod!=""}))[${d_install}:30s]))
${t_test} cluster:container:test:started count(count_over_time((count without(container,endpoint,name,namespace,pod,service,job,metrics_path,instance,image) (container_start_time_seconds{container!="",container!="POD",pod!=""} > (${t_test}-${s_test})))[${d_test}:30s]))
${t_all} cluster:version:info:total topk(1, max by (version) (max_over_time(cluster_version{type="completed"}[${d_all}])))*0+1
${t_install} cluster:version:info:install topk(1, max by (version) (max_over_time(cluster_version{type="completed"}[${d_install}])))*0+1
${t_all} cluster:version:current:seconds count_over_time(max by (version) ((cluster_version{type="current"}))[${d_all}:1s])
${t_test} cluster:version:updates:seconds count_over_time(max by (version) ((cluster_version{type="updating",from_version!=""}))[${d_test}:1s])
${t_all} job:duration:total:seconds vector(${s_all})
${t_install} job:duration:install:seconds vector(${s_install})
${t_test} job:duration:test:seconds vector(${s_test})
${t_all} cluster:promtail:failed_targets sum by (pod) (promtail_targets_failed_total{reason!="exists"})
${t_all} cluster:promtail:dropped_entries sum by (pod) (promtail_dropped_entries_total)
${t_all} cluster:promtail:request:duration sum by (status_code) (rate(promtail_request_duration_seconds_count[${d_all}]))
END
# topk(1, max by (image, version) (max_over_time(cluster_version{type="completed"}[30m])))
# Perform variable replacement by putting each line of the query file through an eval and then outputting
# it back to a file.
# glob expansion is disabled because we use '*' in queries for multiplication
set -f
# clear the file
echo > /tmp/queries_resolved
while IFS= read -r i; do
if [[ -z "${i}" ]]; then continue; fi
# Try to convert the line of the file into a query, performing bash substitution AND catch undefined variables
# The heredoc is necessary because bash will perform quote evaluation on labels in queries (pod="x" becomes pod=x)
if ! q=$( eval $'cat <<END\n'$i$'\nEND\n' 2>/dev/null ); then
# evaluate the errors and output them to stderr
(
set +e
set +x
q=$( eval $'cat <<END\n'$i$'\nEND\n' 2>&1 1>/dev/null )
echo "error: Query '${i}' was not valid:$(echo "${q}" | cut -f 3- -d ':')" 1>&2
)
continue
fi
echo "${q}" >> /tmp/queries_resolved
done < /tmp/queries
set +f
# Output the script to execute. The first part embeds the evaluated queries and will write them to /tmp
# on the remote system.
cat <<SCRIPT
#!/bin/bash
set -euo pipefail
cat > /tmp/queries <<'END'
$( cat /tmp/queries_resolved )
END
SCRIPT
# The second part of the script iterates over the evaluated queries and queries a local prometheus.
# Variables are not expanded in this section.
cat <<'SCRIPT'
while IFS= read -r q; do
if [[ -z "${q}" ]]; then continue; fi
# part up the line '<unix_timestamp_query_time> <name> <query>'
timestamp=${q%% *}
q=${q#* }
name=${q%% *}
query="${q#* }"
# perform the query against the local prometheus instance
if ! out=$( curl -f --silent http://localhost:9090/api/v1/query --data-urlencode "time=${timestamp}" --data-urlencode "query=${query}" ); then
echo "error: Query ${name} failed at ${timestamp}: ${query}" 1>&2
continue
fi
# wrap the
echo "{\"${name}\":${out}}"
done < /tmp/queries
SCRIPT
GENERATE
script="$(
TEST_TIME_INSTALL_START="$( cat ${SHARED_DIR}/TEST_TIME_INSTALL_START || true )" \
TEST_TIME_INSTALL_END="$( cat ${SHARED_DIR}/TEST_TIME_INSTALL_END || true )" \
TEST_TIME_TEST_START="$( cat ${SHARED_DIR}/TEST_TIME_TEST_START || true )" \
TEST_TIME_TEST_END="$( cat ${SHARED_DIR}/TEST_TIME_TEST_END || true )" \
bash /tmp/generate.sh
)"
queue ${ARTIFACT_DIR}/metrics/job_metrics.json oc --insecure-skip-tls-verify rsh -T -n openshift-monitoring -c thanos-query deploy/thanos-querier /bin/bash -c "${script}"
wait
# This is a temporary conversion of cluster operator status to JSON matching the upgrade - may be moved to code in the future
curl -sL https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 >/tmp/jq && chmod ug+x /tmp/jq
mkdir -p ${ARTIFACT_DIR}/junit/
<${ARTIFACT_DIR}/clusteroperators.json /tmp/jq -r 'def one(condition; t): t as $t | first([.[] | select(condition)] | map(.type=t)[]) // null; def msg: "Operator \(.type) (\(.reason)): \(.message)"; def xmlfailure: if .failure then "<failure message=\"\(.failure | @html)\">\(.failure | @html)</failure>" else "" end; def xmltest: "<testcase name=\"\(.name | @html)\">\( xmlfailure )</testcase>"; def withconditions: map({name: "operator conditions \(.metadata.name)"} + ((.status.conditions // [{type:"Available",status: "False",message:"operator is not reporting conditions"}]) | (one(.type=="Available" and .status!="True"; "unavailable") // one(.type=="Degraded" and .status=="True"; "degraded") // one(.type=="Progressing" and .status=="True"; "progressing") // null) | if . then {failure: .|msg} else null end)); .items | withconditions | "<testsuite name=\"Operator results\" tests=\"\( length )\" failures=\"\( [.[] | select(.failure)] | length )\">\n\( [.[] | xmltest] | join("\n"))\n</testsuite>"' >${ARTIFACT_DIR}/junit/junit_install_status.xml
# This is an experimental wiring of autogenerated failure detection.
echo "Detect known failures from symptoms (experimental) ..."
curl -f https://gist.githubusercontent.com/smarterclayton/03b50c8f9b6351b2d9903d7fb35b342f/raw/symptom.sh 2>/dev/null | bash -s ${ARTIFACT_DIR} > ${ARTIFACT_DIR}/junit/junit_symptoms.xml
# Create custom-link-tools.html from custom-links.txt
REPORT="${ARTIFACT_DIR}/custom-link-tools.html"
cat >> ${REPORT} << EOF
<link rel="stylesheet" type="text/css" href="/static/style.css">
<link rel="stylesheet" type="text/css" href="/static/extensions/style.css">
<link href="https://fonts.googleapis.com/css?family=Roboto:400,700" rel="stylesheet">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<link rel="stylesheet" type="text/css" href="/static/spyglass/spyglass.css">
EOF
cat ${SHARED_DIR}/custom-links.txt >> ${REPORT}
|
import { resolve } from 'path';
import { EnvironmentConfig } from '../models/index';
import { absolutePath } from './file-system';
const absolutePathCheck = /^(\~?\/)/;
export function absoluteConfigPath(config: EnvironmentConfig, path: string): string {
return (absolutePathCheck.test(path))
? absolutePath(path)
: resolve(config.fileRoot || '', path);
} |
package ca.antaki.www.cat.producer.model;
public class Cat {
private final String name;
//using a byte (primitive type) in order to save space if we want to have millions of Cats
private byte mood = CatMood.MIAW.getId();
public Cat(String name) {
super();
this.name = name;
}
public int getMood() {
return mood;
}
public void setMood(byte mood) {
this.mood = mood;
}
public String getName() {
return name;
}
@Override
public String toString() {
return new StringBuilder(name).append(" ").append(mood).toString();
}
@Override
public int hashCode() { //I could use HashCodeBuilder but this is faster
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) { //I could use EqualsBuilder but this is faster
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Cat other = (Cat) obj;
return name.equals(other.name);
}
}
|
<gh_stars>1-10
import { MongoClient, Db, Collection } from "mongodb";
// json schema for contact us info
export interface ContactUsData{
name:string;
email:string;
comment:string;
timestamp:string;
viewed:boolean;
}
export class DatabaseManager{
private _mongoClient:MongoClient; // mongodb connection
private _db:Db; // connection database
constructor(mongoClient:MongoClient){
this._mongoClient = mongoClient;
this._db = this._mongoClient.db();
}
// creates the mongo collection (table)
public createCollections():Promise<Collection[]>{
return Promise.all([
this._db.createCollection("contact_us")
]);
}
// inserts a new submission
// @param doc document to insert
public insert(name:string, email:string, comment:string):Promise<number>{
return new Promise((resolve, reject) => {
// create document from parameters, generate timestamp
let doc:ContactUsData = {
name, email, comment, timestamp: new Date().toLocaleString(), viewed: false
};
// insert into database
this.contactUsCollection.insertOne(doc, err => {
// error or success
err ? reject(err) : resolve(1);
});
});
}
// gets every submission
public getAll():Promise<ContactUsData[]>{
return new Promise((resolve, reject) => {
// find everything
this.contactUsCollection.find().toArray()
.then(results => {
this.markAllViewed()
.then(() => resolve(results))
.catch(err => reject(err));
})
.catch(err => reject(err));
});
}
// marks every unviewed document as viewed
private markAllViewed():Promise<number>{
return new Promise((resolve, reject) => {
// find any not viewed
let filter = {viewed: {$ne: true}};
// viewed to true
let update = {$set: {viewed: true}};
// find and upate
this.contactUsCollection.updateMany(filter, update)
.then(result => resolve(result.result.nModified))
.catch(err => reject(err));
});
}
// getter for the "contact_us" collection
private get contactUsCollection():Collection{
return this._db.collection("contact_us");
}
} |
<filename>ProductCheck/product_check/utilities.py
from django.core.mail import EmailMultiAlternatives, BadHeaderError
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
from smtplib import SMTPException
def mail_user(product_data):
"""
Utility function used to update customer regarding the price change via mail.
Args:
product_data: Latest scrapped product data.
Returns:
Scraped data and success http code.
Raises:
BadHeaderError: Raises an exception to prevent header injection.
SMTPException: Handles all smtplib exceptions.
Exception: Raises an exception.
"""
try:
subject = 'ProductCheck price alert'
from_email = settings.DEFAULT_FROM
to_email = settings.DEFAULT_TO
html_content = render_to_string('mail_template.html', {'product_url': product_data['product_url'], 'price': product_data['product_price']})
text_content = strip_tags(html_content)
msg = EmailMultiAlternatives(subject, text_content, from_email, [to_email])
msg.attach_alternative(html_content, "text/html")
msg.send()
except BadHeaderError as error:
print('Subject was not properly formatted' + str(error))
except SMTPException as error:
print('There was an error sending an email.' + str(error))
except Exception as error:
print('There was an error sending an email.' + str(error))
|
#!/bin/bash
set -euxo pipefail
python3 -m pipenv run pip freeze > requirements.txt
image_name="python:pipenv"
docker build --rm --pull --file "Dockerfile" --label "python-pip-env" --tag "${image_name}" .
vol_name="python-pipenv-vol"
docker volume create "${vol_name}"
# Run the default CMD / RUN in Dockerfile
workdir="app"
docker run \
--rm \
--workdir /${workdir} \
--mount type=volume,source="${vol_name}",destination=/${workdir}/logs \
${image_name}
|
<reponame>wagnerjt/halin
import React, { Component } from 'react';
import { Button, Icon, Table } from 'semantic-ui-react';
import status from '../../../api/status/index';
import datautil from '../../../api/data/util';
import sentry from '../../../api/sentry/index';
import SignalMeter from '../../data/SignalMeter/SignalMeter';
import Spinner from '../../ui/scaffold/Spinner/Spinner';
import NodeLabel from '../../ui/scaffold/NodeLabel/NodeLabel';
import Explainer from '../../ui/scaffold/Explainer/Explainer';
export default class Ping extends Component {
state = {
allResults: [],
pingResults: null,
message: null,
error: null,
};
componentWillMount() {
this.mounted = true;
this.ping();
}
componentWillUnmount() {
this.mounted = false;
}
ping() {
if (!this.mounted) { return false; }
const ctx = window.halinContext;
const promises = ctx.members().map(node => ctx.ping(node));
if (this.state.pingResults) {
this.state.allResults.push(this.state.pingResults);
}
this.setState({
pingResults: null,
message: null,
error: null,
});
return Promise.all(promises)
.then(pingResults => {
if (this.mounted) {
this.setState({
pingResults,
message: null,
error: null,
});
}
})
.catch(err => {
sentry.reportError(err);
if (this.mounted) {
this.setState({
error: status.message('Failed to ping Neo4j', `${err}`),
});
}
});
}
haveErrors() {
if (!this.state.pingResults) { return false; }
return this.state.pingResults.filter(pr => pr.err).length > 0;
}
render() {
let message = status.formatStatusMessage(this);
let rows = [
{ header: 'Machine', show: true, render: r => <NodeLabel node={r.clusterMember} /> },
{ header: 'Role', show: true, render: r => r.clusterMember.role },
{ header: 'Result (ms)', show: true, render: r => r.elapsedMs },
{ header: 'Status', show: true, render: r => <SignalMeter strength={datautil.signalStrengthFromPing(r.elapsedMs, r.err) } /> },
{
header: 'Other Information',
show: this.haveErrors(),
render: r => r.err ? `${r.err}` : 'none',
},
];
return this.state.pingResults ? (
<div className='Ping'>
<h3>Ping <Explainer knowledgebase='Ping'/></h3>
{ message }
<Table celled>
<Table.Header>
<Table.Row>{
rows.map((r, i) => <Table.HeaderCell key={i}>{r.header}</Table.HeaderCell>)
}</Table.Row>
</Table.Header>
<Table.Body>
{
this.state.pingResults.map((r, resultIdx) =>
<Table.Row key={resultIdx}>{
rows.map((row, rowIdx) =>
<Table.Cell key={rowIdx}>{row.render(r)}</Table.Cell>)
}</Table.Row>)
}
</Table.Body>
</Table>
<Button basic onClick={() => this.ping()}>
<Icon name='cogs'/>
Ping Again
</Button>
</div>
) : <Spinner active={true}/>;
}
} |
#!/bin/bash
# Required parameters:
# @raycast.schemaVersion 1
# @raycast.title Whois of Clipboard URL
# @raycast.mode silent
# Optional parameters:
# @raycast.author Caleb Stauffer
# @raycast.authorURL https://github.com/crstauf
# @raycast.description Whois of clipboard URL.
# @raycast.packageName Internet
# @raycast.needsConfirmation true
# @raycast.icon 🌐
clipboard=$(pbpaste)
open https://who.is/whois/$clipboard |
#!/bin/bash
set -euo pipefail
docker-compose build --no-cache
docker-compose run --rm python
docker-compose run --rm python python -c 'print("1>Hello, world!")'
|
<filename>src/sentry/static/sentry/app/components/seenByList.tsx
import React from 'react';
import classNames from 'classnames';
import moment from 'moment';
import styled from '@emotion/styled';
import {t} from 'app/locale';
import {User} from 'app/types';
import {IconShow} from 'app/icons';
import {userDisplayName} from 'app/utils/formatters';
import AvatarList from 'app/components/avatar/avatarList';
import ConfigStore from 'app/stores/configStore';
import Tooltip from 'app/components/tooltip';
import space from 'app/styles/space';
type Props = {
// Avatar size
avatarSize?: number;
// List of *all* users that have seen something
seenBy?: User[];
// Tooltip message for the "Seen By" icon
iconTooltip?: string;
// Max avatars to display
maxVisibleAvatars?: number;
iconPosition?: 'left' | 'right';
className?: string;
};
const SeenByList = ({
avatarSize = 28,
seenBy = [],
iconTooltip = t('People who have viewed this'),
maxVisibleAvatars = 10,
iconPosition = 'left',
className,
}: Props) => {
const activeUser = ConfigStore.get('user');
const displayUsers = seenBy.filter(user => activeUser.id !== user.id);
if (displayUsers.length === 0) {
return null;
}
// Note className="seen-by" is required for responsive design
return (
<SeenByWrapper
iconPosition={iconPosition}
className={classNames('seen-by', className)}
>
<AvatarList
users={displayUsers}
avatarSize={avatarSize}
maxVisibleAvatars={maxVisibleAvatars}
renderTooltip={user => (
<React.Fragment>
{userDisplayName(user)}
<br />
{moment(user.lastSeen).format('LL')}
</React.Fragment>
)}
/>
<IconWrapper iconPosition={iconPosition}>
<Tooltip title={iconTooltip}>
<IconShow size="sm" color="gray400" />
</Tooltip>
</IconWrapper>
</SeenByWrapper>
);
};
const SeenByWrapper = styled('div')<{iconPosition: Props['iconPosition']}>`
display: flex;
margin-top: 15px;
float: right;
${p => (p.iconPosition === 'left' ? 'flex-direction: row-reverse' : '')};
`;
const IconWrapper = styled('div')<{iconPosition: Props['iconPosition']}>`
background-color: transparent;
color: ${p => p.theme.gray700};
height: 28px;
width: 24px;
line-height: 26px;
text-align: center;
padding-top: ${space(0.5)};
${p => (p.iconPosition === 'left' ? 'margin-right: 10px' : '')};
`;
export default SeenByList;
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 查询芝麻信用等级
*
* @author auto create
* @since 1.0, 2018-06-25 14:48:31
*/
public class AlipayEcoRenthouseRenterZhimaQueryModel extends AlipayObject {
private static final long serialVersionUID = 7654945488145129521L;
/**
* 身份证号
*/
@ApiField("card_no")
private String cardNo;
/**
* 用户姓名
*/
@ApiField("user_name")
private String userName;
public String getCardNo() {
return this.cardNo;
}
public void setCardNo(String cardNo) {
this.cardNo = cardNo;
}
public String getUserName() {
return this.userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
}
|
from typing import Any
import yaml
class ConfigurationParser:
def __init__(self, configuration):
with open(configuration, 'r') as metadata:
self.configuration = yaml.safe_load(metadata)
def validate(self):
self.validate_istio()
return
def validate_istio(self):
return
def get(self):
return self.configuration
def get_istio(self):
return self.configuration["istio"]["config"]
|
#!/bin/bash
fw_installed cutelyst && return 0
CUTELYST_VER=1.5.0
QT_VERSION_MM=56
QT_VERSION_FULL=562-trusty
CROOT=${IROOT}/cutelyst
sudo apt-add-repository --yes ppa:george-edison55/cmake-3.x
sudo apt-add-repository --yes ppa:beineri/opt-qt$QT_VERSION_FULL
sudo apt-get update -qq
sudo apt-get install -qqy \
cmake \
uwsgi \
uuid-dev \
libcap-dev \
libzmq3-dev \
clearsilver-dev \
libjemalloc-dev \
qt${QT_VERSION_MM}base \
qt${QT_VERSION_MM}script \
qt${QT_VERSION_MM}tools
export CMAKE_PREFIX_PATH=/opt/qt${QT_VERSION_MM};
mkdir -p ${CROOT} || true
cd ${CROOT}
fw_get -O https://github.com/cutelyst/cutelyst/archive/v$CUTELYST_VER.tar.gz
fw_untar v$CUTELYST_VER.tar.gz
cd cutelyst-$CUTELYST_VER
mkdir build && cd build
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=$CROOT \
-DUWSGI_PLUGINS_DIR=${CROOT}/lib/uwsgi/plugins \
-DUSE_JEMALLOC=on
make -j $MAX_THREADS && sudo make install
echo "QT_VERSION_MM=${QT_VERSION_MM}" > $IROOT/cutelyst.installed
|
<gh_stars>0
package com.kushal.orctotext.mapreduce;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class OrcToTextReducer extends Reducer<Text, Text, Text, Text> {
@Override
protected void cleanup(Context context) throws IOException, InterruptedException {
super.cleanup(context);
}
@Override
public void reduce(Text key, Iterable<Text> values, Context context) throws IOException, InterruptedException {
}
@Override
protected void setup(Context context) throws IOException, InterruptedException {
super.setup(context);
}
}
|
public static int gcd(int a,int b)
{
// Everything divides 0
if (a == 0)
return b;
if (b == 0)
return a;
// base case
if (a == b)
return a;
// a is greater
if (a > b)
return gcd(a-b, b);
return gcd(a, b-a);
} |
package ddbt.tpcc.loadtest.load
import java.sql.Connection
import java.sql.PreparedStatement
import java.sql.SQLException
/**
* Data loader using prepared statements and batches. This is slower than the JdbcStatementLoader which uses
* bulk inserts.
*/
class JdbcPreparedStatementLoader(var conn: Connection,
var tableName: String,
var columnName: Array[String],
var ignore: Boolean,
var maxBatchSize: Int) extends RecordLoader {
var pstmt: PreparedStatement = _
var currentBatchSize: Int = _
val b = new StringBuilder()
b.append("INSERT ")
if (ignore) {
b.append("IGNORE ")
}
b.append("INTO `").append(tableName).append("` (")
for (i <- 0 until columnName.length) {
if (i > 0) {
b.append(',')
}
b.append(columnName(i).trim())
}
b.append(") VALUES (")
for (i <- 0 until columnName.length) {
if (i > 0) {
b.append(',')
}
b.append('?')
}
b.append(')')
val sql = b.toString
{
this.conn.setAutoCommit(false)
this.pstmt = conn.prepareStatement(sql)
}
def load(r: Record) {
for (i <- 0 until columnName.length) {
pstmt.setObject(i + 1, r.getField(i))
}
pstmt.addBatch()
currentBatchSize += 1
if (currentBatchSize == maxBatchSize) {
executeCurrentBatch()
}
}
private def executeCurrentBatch() {
pstmt.executeBatch()
currentBatchSize = 0
}
def commit() {
conn.commit()
}
def close() {
executeCurrentBatch()
pstmt.close()
conn.commit()
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check for circular dependencies
export LC_ALL=C
EXPECTED_CIRCULAR_DEPENDENCIES=(
"chainparamsbase -> util -> chainparamsbase"
"checkpoints -> validation -> checkpoints"
"index/txindex -> validation -> index/txindex"
"policy/fees -> txmempool -> policy/fees"
"policy/policy -> validation -> policy/policy"
"qt/addresstablemodel -> qt/walletmodel -> qt/addresstablemodel"
"qt/bantablemodel -> qt/clientmodel -> qt/bantablemodel"
"qt/bitcoinb3gui -> qt/utilitydialog -> qt/bitcoinb3gui"
"qt/bitcoinb3gui -> qt/walletframe -> qt/bitcoinb3gui"
"qt/bitcoinb3gui -> qt/walletview -> qt/bitcoinb3gui"
"qt/clientmodel -> qt/peertablemodel -> qt/clientmodel"
"qt/paymentserver -> qt/walletmodel -> qt/paymentserver"
"qt/recentrequeststablemodel -> qt/walletmodel -> qt/recentrequeststablemodel"
"qt/sendcoinsdialog -> qt/walletmodel -> qt/sendcoinsdialog"
"qt/transactiontablemodel -> qt/walletmodel -> qt/transactiontablemodel"
"qt/walletmodel -> qt/walletmodeltransaction -> qt/walletmodel"
"rpc/rawtransaction -> wallet/rpcwallet -> rpc/rawtransaction"
"txmempool -> validation -> txmempool"
"validation -> validationinterface -> validation"
"wallet/coincontrol -> wallet/wallet -> wallet/coincontrol"
"wallet/fees -> wallet/wallet -> wallet/fees"
"wallet/rpcwallet -> wallet/wallet -> wallet/rpcwallet"
"wallet/wallet -> wallet/walletdb -> wallet/wallet"
"policy/fees -> policy/policy -> validation -> policy/fees"
"policy/rbf -> txmempool -> validation -> policy/rbf"
"qt/addressbookpage -> qt/bitcoinb3gui -> qt/walletview -> qt/addressbookpage"
"qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/guiutil"
"txmempool -> validation -> validationinterface -> txmempool"
"qt/addressbookpage -> qt/bitcoinb3gui -> qt/walletview -> qt/receivecoinsdialog -> qt/addressbookpage"
"qt/addressbookpage -> qt/bitcoinb3gui -> qt/walletview -> qt/signverifymessagedialog -> qt/addressbookpage"
"qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/intro -> qt/guiutil"
"qt/addressbookpage -> qt/bitcoinb3gui -> qt/walletview -> qt/sendcoinsdialog -> qt/sendcoinsentry -> qt/addressbookpage"
)
EXIT_CODE=0
CIRCULAR_DEPENDENCIES=()
IFS=$'\n'
for CIRC in $(cd src && ../contrib/devtools/circular-dependencies.py {*,*/*,*/*/*}.{h,cpp} | sed -e 's/^Circular dependency: //'); do
CIRCULAR_DEPENDENCIES+=($CIRC)
IS_EXPECTED_CIRC=0
for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do
if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then
IS_EXPECTED_CIRC=1
break
fi
done
if [[ ${IS_EXPECTED_CIRC} == 0 ]]; then
echo "A new circular dependency in the form of \"${CIRC}\" appears to have been introduced."
echo
EXIT_CODE=1
fi
done
for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do
IS_PRESENT_EXPECTED_CIRC=0
for CIRC in "${CIRCULAR_DEPENDENCIES[@]}"; do
if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then
IS_PRESENT_EXPECTED_CIRC=1
break
fi
done
if [[ ${IS_PRESENT_EXPECTED_CIRC} == 0 ]]; then
echo "Good job! The circular dependency \"${EXPECTED_CIRC}\" is no longer present."
echo "Please remove it from EXPECTED_CIRCULAR_DEPENDENCIES in $0"
echo "to make sure this circular dependency is not accidentally reintroduced."
echo
EXIT_CODE=1
fi
done
exit ${EXIT_CODE}
|
# 6.00.2x Problem Set 2: Simulating robots
import math
import random
import numpy as np
import ps2_visualize
import pylab
##################
## Comment/uncomment the relevant lines, depending on which version of Python you have
##################
# For Python 3.5:
# from ps2_verify_movement35 import testRobotMovement
# If you get a "Bad magic number" ImportError, you are not using Python 3.5
# For Python 3.6:
# from ps2_verify_movement36 import testRobotMovement
# If you get a "Bad magic number" ImportError, you are not using Python 3.6
# === Provided class Position
class Position(object):
"""
A Position represents a location in a two-dimensional room.
"""
def __init__(self, x, y):
"""
Initializes a position with coordinates (x, y).
"""
self.x = x
self.y = y
def getX(self):
return self.x
def getY(self):
return self.y
def getNewPosition(self, angle, speed):
"""
Computes and returns the new Position after a single clock-tick has
passed, with this object as the current position, and with the
specified angle and speed.
Does NOT test whether the returned position fits inside the room.
angle: number representing angle in degrees, 0 <= angle < 360
speed: positive float representing speed
Returns: a Position object representing the new position.
"""
old_x, old_y = self.getX(), self.getY()
angle = float(angle)
# Compute the change in position
delta_y = speed * math.cos(math.radians(angle))
delta_x = speed * math.sin(math.radians(angle))
# Add that to the existing position
new_x = old_x + delta_x
new_y = old_y + delta_y
return Position(new_x, new_y)
def __str__(self):
return "(%0.2f, %0.2f)" % (self.x, self.y)
# === Problem 1
class RectangularRoom(object):
"""
A RectangularRoom represents a rectangular region containing clean or dirty
tiles.
A room has a width and a height and contains (width * height) tiles. At any
particular time, each of these tiles is either clean or dirty.
"""
def __init__(self, width, height):
"""
Initializes a rectangular room with the specified width and height.
Initially, no tiles in the room have been cleaned.
width: an integer > 0
height: an integer > 0
"""
self.clean_tiles = []
self.width = width
self.height = height
def cleanTileAtPosition(self, pos):
"""
Mark the tile under the position POS as cleaned.
Assumes that POS represents a valid position inside this room.
pos: a Position
"""
posX = pos.getX()
posY = pos.getY()
if (int(posX), int(posY)) not in self.clean_tiles:
self.clean_tiles.append((int(posX), int(posY)))
def isTileCleaned(self, m, n):
"""
Return True if the tile (m, n) has been cleaned.
Assumes that (m, n) represents a valid tile inside the room.
m: an integer
n: an integer
returns: True if (m, n) is cleaned, False otherwise
"""
if (m, n) in self.clean_tiles:
return True
return False
def getNumTiles(self):
"""
Return the total number of tiles in the room.
returns: an integer
"""
return (self.width) * (self.height)
def getNumCleanedTiles(self):
"""
Return the total number of clean tiles in the room.
returns: an integer
"""
return len(self.clean_tiles)
def getRandomPosition(self):
"""
Return a random position inside the room.
returns: a Position object.
"""
posX = np.random.uniform(0, self.width)
posY = np.random.uniform(0, self.height)
return Position(posX, posY)
def isPositionInRoom(self, pos):
"""
Return True if pos is inside the room.
pos: a Position object.
returns: True if pos is in the room, False otherwise.
"""
posX = pos.getX()
posY = pos.getY()
if posX >= 0 and posX < self.width and posY >= 0 and posY < self.height:
return True
return False
room = RectangularRoom(5,5)
print(room.getNumTiles())
|
package ch15;
import javax.swing.*;
import java.awt.*;
import java.net.URL;
import static java.awt.Color.*;
/**
* Project: ch15.ex10
* Date: 2/27/2018
*
* @author <NAME>
*/
public class ex1510 extends JApplet
{
private Image logo;
private int bg = 0;
private URL base;
private Color[] bground = {YELLOW, PINK, LIGHT_GRAY, CYAN};
private static final int APP_WIDTH = 800;
private static final int APP_HEIGHT = 150;
@Override
public void init()
{
setSize(APP_WIDTH, APP_HEIGHT);
Container c = getContentPane();
c.setLayout(null);
c.setBackground(bground[bg++]);
c.add(new JCheckBox("Web") {{
setBackground(null);
doClick();
setBounds(320, 40, 80, 20);
}});
c.add(new JCheckBox("Image") {{
setBackground(null);
setBounds(420, 40, 80, 20);
}});
c.add(new TextField(100) {{
setBounds(320, 70, 320, 25);
}});
c.add(new JButton("Search") {{
setBounds(650, 70, 80, 25);
}});
try
{
UIManager.setLookAndFeel("com.sun.java.swing.plaf.windows.WindowsLookAndFeel");
SwingUtilities.updateComponentTreeUI(this);
// You may edit this. The getgetCodeBase() and getDocumentBase() doesn't work in intelliJ
base = new URL("file:C:\\Users\\devtoor\\Documents\\IdeaProjects\\ciss241\\src\\ch15\\");
logo = getImage(base, "lycos.png");
}
catch (Exception ignore){ignore.getMessage();}
new Thread(() ->
{
while (true)
{
try
{
c.setBackground(bground[bg = bg < bground.length - 1 ? ++bg : 0]);
repaint();
Thread.sleep(1000);
}
catch (InterruptedException e) {}
}
}).start();
}
@Override
public void paint(Graphics g)
{
super.paint(g);
g.drawImage(logo, 50, 30, this);
}
} |
<filename>main.js<gh_stars>0
/*var hhead=document.querySelector('h1');
hhead.onclick=display;
function display()
{
hhead.textContent=welcome;
alert('text have been changed');
}*/
/*var myimage=document.querySelector("img");
myimage.onclick=imagechange;
function imagechange()
{
var imgsrc=myimage.getAttribute('src');
if(imgsrc==='android.png')
{
myimage.setAttribute('src','bb.jpeg');
var para=document.querySelector('p');
para.textContent="blackberry";
}
else{
myimage.setAttribute('src','android.png');
}
}*/
var mybutton=document.querySelector('button');
var myheading=document.querySelector('h1');
function setusername()
{
var uname=prompt("enter your name");
localStorage.setItem('name',uname);
myheading.textContent="welcome to this web "+uname;
}
if(localStorage.getItem('name'))
{
setusername();
}
else{
var storedname=localStorage.getItem('name');
myheading.textContent="welcome to this web "+storedname;
}
mybutton.onclick=function()
{
setusername();
}
var show=document.getElementById('applee');
show.onclick=display;
function display()
{
if(show.style.backgroundColor!='red')
{
show.style.backgroundColor='red';
app.style.backgroundColor='white';
win.style.backgroundColor='white';
}
else
{
show.style.color='white';
}
}
var app=document.getElementById('androidd');
app.onclick=android;
function android()
{
if(app.style.backgroundColor!='red')
{
app.style.backgroundColor='red';
show.style.backgroundColor='white';
win.style.backgroundColor='white';
}
else
{
app.style.backgroundColor='white';
}
}
var win=document.getElementById('windowss');
win.onclick=windows;
function windows()
{
if(win.style.backgroundColor!='red')
{
win.style.backgroundColor='red';
show.style.backgroundColor='white';
app.style.backgroundColor='white';
}
else
{
win.style.backgroundColor='white';
}
}
|
<gh_stars>1-10
const express = require('express');
let router = express.Router();
// Require controller modules
const grid_controller = require('../controllers/griddedProductController')
router.get('/', grid_controller.get_window);
router.get('/find', grid_controller.find_one);
module.exports = router; |
import { createContext } from "react";
const LanguageContext = createContext(null);
export default LanguageContext;
|
import { Component, Input, Output, EventEmitter, TemplateRef } from '@angular/core';
import { defaultLibraryCardsGrid } from '../library-cards.data';
import { IContent, LibraryCardTypes, LibraryCardGridTypes } from '../../card/models';
import { IViewMoreClick, ICardClick } from '../models';
@Component({
selector: 'sb-library-cards-grid',
templateUrl: './library-cards-grid.component.html',
styleUrls: ['./library-cards-grid.component.scss']
})
export class LibraryCardsGridComponent {
/* Title for the grid */
@Input() title: string = defaultLibraryCardsGrid.title;
@Input() contentList: Array<IContent|any> = defaultLibraryCardsGrid.contentList;
@Input() type: LibraryCardGridTypes;
@Input() hoverData = [];
@Input() layoutConfig: any;
/* Max card count to be shown */
@Input() maxCardCount = defaultLibraryCardsGrid.maxCardCount;
@Input() viewMoreButtonText = defaultLibraryCardsGrid.viewMoreButtonText;
@Input('hover-template') gridTemplate: TemplateRef<any>;
@Input() isLoading:boolean;
@Input() viewMore = false;
/* Show Menu on each card */
@Input() isMenu = false;
@Output() viewMoreClick: EventEmitter<IViewMoreClick> = new EventEmitter<IViewMoreClick>();
@Output() cardClick: EventEmitter<ICardClick> = new EventEmitter<ICardClick>();
@Output() hoverActionClick: EventEmitter<any> = new EventEmitter<any>();
@Output() menuClick: EventEmitter<ICardClick> = new EventEmitter();
get LibraryCardTypes() { return LibraryCardTypes; }
get LibraryCardGridTypes() { return LibraryCardGridTypes; }
/**
* Triggers event on `View More` Click
* @param event HTML click event
*/
onViewMoreClick(event: MouseEvent) {
this.viewMoreClick.emit({ event, data: this.contentList });
}
/**
* Triggers event on card click
* @param event HTML Click event
* @param data Content data for selected card
*/
onCardClick(event: MouseEvent, data: IContent) {
this.cardClick.emit({ event, data });
}
hoverActionClicked(event) {
this.hoverActionClick.emit(event);
}
range(maxCardCounter) {
return new Array(maxCardCounter);
}
onCardMenuClick(event) {
this.menuClick.emit(event);
}
}
|
<filename>package/spack-gnupg/package.py
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Gnupg(AutotoolsPackage):
"""GnuPG is a complete and free implementation of the OpenPGP
standard as defined by RFC4880 """
homepage = "https://gnupg.org/index.html"
url = "https://gnupg.org/ftp/gcrypt/gnupg/gnupg-2.2.3.tar.bz2"
version('2.2.3', '6911c0127e4231ce52d60f26029dba68')
version('2.1.21', '685ebf4c3a7134ba0209c96b18b2f064')
depends_on('libgcrypt')
depends_on('libassuan')
depends_on('libksba')
depends_on('libgpg-error')
depends_on('npth')
def configure_args(self):
args = ['--with-npth-prefix=%s' % self.spec['npth'].prefix,
'--with-libgcrypt-prefix=%s' % self.spec['libgcrypt'].prefix,
'--with-libksba-prefixx=%s' % self.spec['libksba'].prefix,
'--with-libassuan-prefix=%s' % self.spec['libassuan'].prefix,
'--with-libpgp-error-prefix=%s' %
self.spec['libgpg-error'].prefix]
return args
|
RSpec.describe MultiSessionStore::SubsessionGeneratorMiddleware do
subject(:middleware) { described_class.new app, config }
let(:app) { double 'application' }
let(:config) { {} }
describe '#call' do
subject(:call) { middleware.call env }
let(:call_result) { 'status, headers and body' }
let(:env) do
{
'REQUEST_METHOD' => 'GET',
'QUERY_STRING' => query_string,
'PATH_INFO' => '/healthcheck',
'action_dispatch.remote_ip' => '127.0.0.1',
'rack.input' => StringIO.new('')
}
end
let(:query_string) { '' }
let(:request) { Rack::Request.new env }
before do
allow(app).to receive(:call).with(env).and_return(call_result)
end
it 'calls the next middleware in the stack and returns the results' do
expect(call).to eq call_result
end
it 'generates a subsession ID into the request' do
allow(SecureRandom).to receive(:hex).and_return('subsession_ID_hash')
call
expect(request.params).to include 'subsession_id' => 'subsession_ID_hash'
end
context 'when the request already contains a subsession ID' do
let(:query_string) { 'subsession_id=subsession_ID_hash' }
it 'does not overwrite the existing ID' do
call
expect(request.params).to include 'subsession_id' => 'subsession_ID_hash'
end
end
context 'when the request has an HTTP_X_SUBSESSIONID header' do
before { env['HTTP_X_SUBSESSIONID'] = 'subsession_ID_hash' }
it 'stores the header value into the request params and does not generate a new ID' do
call
expect(request.params).to include 'subsession_id' => 'subsession_ID_hash'
end
end
context 'when the path is excluded from subsession management' do
let(:config) { {exclude_paths: ['/some_path', '/healthcheck']} }
it 'does not generate a subsession ID into the request' do
call
expect(request.params).not_to include 'subsession_id'
end
context 'and the exclude path is a regexp' do
let(:config) { {exclude_paths: [%r'/health.*']} }
it 'does not generate a subsession ID into the request' do
call
expect(request.params).not_to include 'subsession_id'
end
end
end
end
end
|
#include "TestGameWorldGenerator.h"
#include <Engine/Modules/Graphics/GraphicsSystem/TransformComponent.h>
#include <Engine/Modules/LevelsManagement/LevelsManager.h>
#include <Engine/Modules/ResourceManagement/ResourcesManagement.h>
#include <Game/Game/Inventory/InventoryControlSystem.h>
#include <Game/Game/Dynamic/QuestsSystem.h>
#include <Game/Game/Dynamic/GameLogicConditionsManager.h>
#include <Game/Game/Dynamic/ActorComponent.h>
#include <Game/Game/Inventory/InventoryComponent.h>
#include <Game/Game/PlayerComponent.h>
std::shared_ptr<GameWorld> TestGameWorldGenerator::buildTestGameWorld()
{
// Initialize game world and general gameplay systems
auto gameWorld = GameWorld::createInstance();
auto resourcesManager = std::make_shared<ResourcesManager>();
auto levelsManager = std::make_shared<LevelsManager>(gameWorld, resourcesManager);
auto infoportionsSystem = std::make_shared<InfoportionsSystem>();
gameWorld->getGameSystemsGroup()->addGameSystem(infoportionsSystem);
auto inventoryControlSystem = std::make_shared<InventoryControlSystem>(levelsManager);
gameWorld->getGameSystemsGroup()->addGameSystem(inventoryControlSystem);
auto conditionsManager = std::make_shared<GameLogicConditionsManager>(gameWorld);
auto questsStorage = std::make_shared<QuestsStorage>();
auto questsSystem = std::make_shared<QuestsSystem>(conditionsManager, questsStorage);
gameWorld->getGameSystemsGroup()->addGameSystem(questsSystem);
// Initialize player
GameObject player = gameWorld->createGameObject("player");
player.addComponent<TransformComponent>();
player.addComponent<PlayerComponent>(1.0f);
player.addComponent<InventoryComponent>();
auto playerActorComponent = player.addComponent<ActorComponent>();
playerActorComponent->setName("Player");
// Initialize NCP
GameObject npc1 = gameWorld->createGameObject("npc1");
npc1.addComponent<TransformComponent>();
auto npcActorComponent1 = npc1.addComponent<ActorComponent>();
npcActorComponent1->setName("npc1");
npc1.addComponent<InventoryComponent>();
// Initialize misc game objects
GameObject medkit = gameWorld->createGameObject("medkit");
medkit.addComponent<TransformComponent>();
medkit.addComponent<InventoryItemComponent>(ResourceHandle<GLTexture>(), "objet_class_medkit", "medkit");
// Initialize infoportions
infoportionsSystem->addInfoportion("infoportion_id_test_1");
infoportionsSystem->addInfoportion("infoportion_id_test_2");
infoportionsSystem->addInfoportion("infoportion_id_quest_find_medkit_started");
infoportionsSystem->addInfoportion("infoportion_id_quest_find_medkit_completed");
// Initialize quests
// Find a medkit quest
Quest findMedkitQuest("quest_id_find_medkit", "find_medkit", "find_medkit_desc");
QuestTask findMedkitTask("quest_id_find_medkit_task_id_find", "find_medkit", "find_medkit_desc");
findMedkitTask.setAutostartCondition(std::make_shared<GameLogicConditionHasInfoportion>(
conditionsManager.get(), "infoportion_id_quest_find_medkit_started"));
findMedkitTask.setActiveCondition(std::make_shared<GameLogicConditionHasNotObject>(
conditionsManager.get(), "objet_class_medkit"));
findMedkitTask.setCompleteCondition(std::make_shared<GameLogicConditionHasObject>(
conditionsManager.get(), "objet_class_medkit"));
findMedkitQuest.addTask(findMedkitTask);
QuestTask transferMedkitTask("quest_id_find_medkit_task_id_transfer", "transfer_medkit",
"transfer_medkit_desc");
transferMedkitTask.setActiveCondition(std::make_shared<GameLogicConditionHasObject>(
conditionsManager.get(), "objet_class_medkit"));
transferMedkitTask.setCompleteCondition(std::make_shared<GameLogicConditionHasInfoportion>(
conditionsManager.get(), "infoportion_id_quest_find_medkit_completed"));
findMedkitQuest.addTask(transferMedkitTask);
questsSystem->registerQuest(findMedkitQuest);
return gameWorld;
}
|
def calculate_weighted_average(num1: float, num2: float, num3: float) -> float:
weighted_average = (2 * num1 + 3 * num2 + 5 * num3) / 10
return round(weighted_average, 1) |
<filename>open-sphere-plugins/geopackage/src/main/java/io/opensphere/geopackage/export/ui/UserAsker.java<gh_stars>10-100
package io.opensphere.geopackage.export.ui;
/**
* Asks the user a yes no question and returns the results.
*/
public interface UserAsker
{
/**
* Asks the user the given question and return the results.
*
* @param question The question to ask.
* @param title The title of the question.
* @return True if the user answered yes, false if the user answered no.
*/
boolean askYesNo(String question, String title);
}
|
import sqlite3
# Connect Database
conn = sqlite3.connect('employee_records.db')
# Create Cursor
cursor = conn.cursor()
# Query Database
cursor.execute("SELECT name, salary, department FROM employees")
rows = cursor.fetchall()
# Generate Report
for row in rows:
name = row[0]
salary = row[1]
department = row[2]
print(f'{name} earns ${salary} in {department}')
# Close Connection
conn.close() |
#!/bin/bash
source scripts/helper.sh
sskr_lib_name="libbc-crypto-base-jni.dylib"
out_dir=build/release
jni_md_dir="darwin"
if is_osx; then
export CC="clang"
export CXX="clang++"
else
sskr_lib_name="libbc-crypto-base-jni.so"
jni_md_dir="linux"
export CC="clang-10"
export CXX="clang++-10"
fi
java_home="/usr/java/jdk8u265-b01"
if is_osx; then
java_home=$(/usr/libexec/java_home 2>/dev/null)
fi
if [ "$JAVA_HOME" == "" ]; then
export JAVA_HOME=$java_home
fi
# Install bc-crypto-base
pushd ../../deps/bc-crypto-base || exit
./configure
make clean
make CFLAGS=-fPIC check
sudo make CFLAGS=-fPIC install
popd || exit
# Install jni lib
echo "Building $sskr_lib_name..."
mkdir -p $out_dir
$CC -I$JAVA_HOME/include -I$JAVA_HOME/include/$jni_md_dir -fexceptions -frtti -shared -fPIC src/main/jniLibs/*.c ../../base-jni/*.c ../../deps/bc-crypto-base/src/libbc-crypto-base.a -o $out_dir/$sskr_lib_name || exit
echo "Done. Checkout the release file at $out_dir/$sskr_lib_name"
|
SELECT *
FROM employees
ORDER BY DATE_OF_BIRTH
LIMIT 3 |
<reponame>thomastay/collectable
import test from 'ava';
import { empty as emptyMap } from '@collectable/map';
import { empty, isSet } from '../../src';
test('returns true if the argument is an instance of a Collectable.js Set class', t => {
t.true(isSet(empty()));
});
test('returns false if the argument is not an instance of a Collectable.js Set class', t => {
t.false(isSet(emptyMap()));
});
|
package cyclops.container.immutable.impl;
import cyclops.container.control.Option;
import cyclops.container.immutable.ImmutableList;
import cyclops.container.immutable.tuple.Tuple;
import cyclops.container.immutable.tuple.Tuple2;
import cyclops.container.immutable.tuple.Tuple3;
import cyclops.function.higherkinded.DataWitness.zipper;
import cyclops.function.higherkinded.Higher;
import cyclops.reactive.ReactiveSeq;
import java.util.Iterator;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
import lombok.Getter;
import lombok.With;
@Getter
@With
public class Zipper<T> implements Iterable<T>, Higher<zipper, T> {
private final ImmutableList<T> left;
private final T point;
private final ImmutableList<T> right;
private Zipper(ImmutableList<T> left,
T point,
ImmutableList<T> right) {
this.left = left;
this.point = point;
this.right = right;
}
public static <T> Zipper<T> of(ImmutableList<T> left,
T value,
ImmutableList<T> right) {
return new Zipper<>(left,
value,
right);
}
public static <T> Zipper of(ReactiveSeq<T> left,
T value,
ReactiveSeq<T> right) {
return new Zipper<>(LazySeq.fromStream(left),
value,
LazySeq.fromStream(right));
}
public boolean isStart() {
return left.isEmpty();
}
public boolean isEnd() {
return right.isEmpty();
}
public <R> Zipper<R> map(Function<? super T, ? extends R> fn) {
return of(left.map(fn),
fn.apply(point),
right.map(fn));
}
public <R> Zipper<R> zip(Zipper<T> zipper,
BiFunction<? super T, ? super T, ? extends R> fn) {
ImmutableList<R> newLeft = left.zip(zipper.left.stream(),
fn);
R newPoint = fn.apply(point,
zipper.point);
ImmutableList<R> newRight = right.zip(zipper.right.stream(),
fn);
return of(newLeft,
newPoint,
newRight);
}
public Zipper<Tuple2<T, T>> zip(Zipper<T> zipper) {
return zip(zipper,
Tuple::tuple);
}
public Zipper<T> start() {
Option<Zipper<T>> result = Option.some(this);
Option<Zipper<T>> next = result;
while (next.isPresent()) {
next = result.flatMap(p -> p.previous());
if (next.isPresent()) {
result = next;
}
}
return result.orElse(this);
}
public Zipper<T> end() {
Option<Zipper<T>> result = Option.some(this);
Option<Zipper<T>> next = result;
while (next.isPresent()) {
next = result.flatMap(p -> p.next());
if (next.isPresent()) {
result = next;
}
}
return result.orElse(this);
}
public int index() {
return left.size();
}
public Option<Zipper<T>> position(int index) {
Zipper<T> result = this;
while (index != result.index()) {
if (result.index() < index && !result.isEnd()) {
result = result.next(result);
} else if (result.index() > index && !result.isStart()) {
result = result.previous(result);
} else {
return Option.none();
}
}
return Option.some(result);
}
public <R> Option<Zipper<T>> next() {
return right.fold(c -> Option.some(new Zipper(left.append(point),
c.head(),
c.tail())),
nil -> Option.none());
}
public <R> Zipper<T> next(Zipper<T> alt) {
return next().orElse(alt);
}
public <R> Zipper<T> previous(Zipper<T> alt) {
return previous().orElse(alt);
}
public Zipper<T> cycleNext() {
return left.fold(cons -> right.fold(c -> next().orElse(this),
nil -> {
return of(LazySeq.empty(),
cons.head(),
cons.tail()
.append(point));
}),
nil -> this);
}
public Zipper<T> cyclePrevious() {
return right.fold(cons -> left.fold(c -> previous().orElse(this),
nil -> {
ImmutableList.Some<T> reversed = cons.reverse();
return of(reversed.tail()
.reverse()
.prepend(point),
reversed.head(),
LazySeq.empty());
}),
nil -> this);
}
public <R> Option<Zipper<T>> previous() {
return left.fold(c -> Option.some(new Zipper(c.take(c.size() - 1),
c.last(null),
right.prepend(point))),
nil -> Option.none());
}
public Zipper<T> left(T value) {
return new Zipper<>(left,
value,
right.prepend(point));
}
public Zipper<T> right(T value) {
return new Zipper<>(left.append(point),
value,
right);
}
public Zipper<T> deleteAllLeftAndRight() {
return new Zipper<>(LazySeq.empty(),
point,
LazySeq.empty());
}
public Option<Zipper<T>> deleteLeft() {
return left.fold(c -> right.fold(c2 -> Option.some(of(c.dropRight(1),
c.last(null),
right)),
n -> Option.some(of(c.dropRight(1),
c.last(null),
right))),
n -> right.fold(c -> Option.some(of(left,
c.head(),
c.tail())),
n2 -> Option.none()));
}
public Option<Zipper<T>> deleteRight() {
return right.fold(c -> left.fold(c2 -> Option.some(of(left,
c.head(),
c.tail())),
n -> Option.some(of(left,
c.head(),
c.tail()))),
n -> left.fold(c -> Option.some(of(c.tail(),
c.head(),
right)),
n2 -> Option.none()));
}
public Zipper<T> filterLeft(Predicate<? super T> predicate) {
return of(left.filter(predicate),
point,
right);
}
public Zipper<T> filterRight(Predicate<? super T> predicate) {
return of(left,
point,
right.filter(predicate));
}
public Tuple3<ImmutableList<T>, T, ImmutableList<T>> split() {
return Tuple.tuple(left,
point,
right);
}
public ImmutableList<T> list() {
return right.prepend(point)
.prependAll(left);
}
public ReactiveSeq<T> stream() {
return left.stream()
.append(point)
.appendStream(right.stream());
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Zipper<?> zipper = (Zipper<?>) o;
return Objects.equals(left,
zipper.left) && Objects.equals(point,
zipper.point) && Objects.equals(right,
zipper.right);
}
@Override
public int hashCode() {
return Objects.hash(left,
point,
right);
}
@Override
public String toString() {
String l = left.stream()
.join(", ",
"[",
"");
String p = ",>>" + point.toString() + "<<";
String r = right.stream()
.join(", ",
",",
"]");
return l + p + r;
}
@Override
public Iterator<T> iterator() {
return stream().iterator();
}
}
|
var app = getApp();
Page({
data:{
img_url: 'http://appuat.huihuishenghuo.com/img/',
checkType:true,
focus:true,
height:'440rpx'
},
checkTap:function(){
var check=this.data.checkType;
this.setData({
checkType: !check
})
},
paymentTap:function(){
this.setData({
height: '440rpx'
})
},
payloseTap:function(){
this.setData({
focus:false,
height: '0rpx'
})
},
privilegeTap:function(){
wx.navigateTo({
url: 'privilege',
success: function(res){
// success
},
fail: function() {
// fail
},
complete: function() {
// complete
}
})
},
paycommentTap:function(){
wx.navigateTo({
url: 'paycomment',
success: function(res){
// success
},
fail: function() {
// fail
},
complete: function() {
// complete
}
})
}
}) |
/*
* @Author: liuyr
* @Date: 2019-09-13 09:04:33
* @Last Modified by: liuyr
* @Last Modified time: 2019-09-13 09:06:03
*/
import axios from '@/utils/axios';
/**
* 查找所有的服务员信息
* @export
* @returns
*/
export async function getWaiterDataS() {
return axios.get('/waiter/findAllWaiter');
} |
let separator;
let resize;
let dragWidth;
let pos;
let containerOffset;
let containerWidth;
let minLeft;
let maxLeft;
let water;
let beer;
let width;
function onWater() {
document.body.classList.add('chosen');
beer.style.display = 'none';
separator.style.display = 'none';
}
function onBeer() {
document.body.classList.add('chosen');
resize.style.display = 'none';
water.style.display = 'none';
separator.style.display = 'none';
}
function goBack() {
document.body.classList.remove('chosen');
resize.style.display = '';
beer.style.display = '';
water.style.display = '';
document.body.classList.add('animate');
window.requestAnimationFrame(function() {
width = '50%';
setTimeout(function() {
document.body.classList.remove('animate');
separator.style.display = '';
}, 300);
});
}
function copyToClipboard(e) {
let text = e.target.previousElementSibling.href;
for (let button of document.querySelectorAll('button.copy')) {
button.classList.remove('success');
button.classList.remove('error');
}
navigator.clipboard.writeText(text).then(function () {
e.target.classList.add('success');
}, function() {
e.target.classList.add('error');
});
}
function onStart(e) {
separator.classList.add('draggable');
document.body.classList.add('dragging');
// Check if it's a mouse or touch event and pass along the correct value
let start = (e.pageX) ? e.pageX : e.changedTouches[0].pageX;
// Get the initial position
dragWidth = separator.offsetWidth;
pos = separator.offsetLeft + dragWidth - start;
containerOffset = document.body.offsetLeft;
containerWidth = document.body.offsetWidth;
// Set limits
minLeft = containerOffset + 10;
maxLeft = containerOffset + containerWidth - dragWidth - 10;
document.body.onmousemove = onMove;
document.body.ontouchmove = onMove;
e.preventDefault();
}
function onMove(e) {
// Check if it's a mouse or touch event and pass along the correct value
let move = (e.pageX) ? e.pageX : e.changedTouches[0].pageX;
let left = move + pos - dragWidth;
// Prevent going off limits
if (left < minLeft) {
left = minLeft;
} else if (left > maxLeft) {
left = maxLeft;
}
// Translate the separator's left value to masked divs width.
let fac = (left + dragWidth / 2 - containerOffset) * 100 / containerWidth;
width = fac + '%';
if (fac < 10) {
onEnd();
onBeer();
} else if (fac > 90) {
onEnd();
onWater();
}
}
function onEnd() {
separator.classList.remove('draggable');
document.body.classList.remove('dragging');
document.body.onmousemove = null;
document.body.ontouchmove = null;
}
function init() {
separator = document.querySelector('.separator');
resize = document.querySelector('.resize');
water = document.querySelector('.water');
beer = document.querySelector('.beer');
separator.onmousedown = onStart;
separator.ontouchstart = onStart;
document.body.onmouseup = onEnd;
document.body.ontouchend = onEnd;
document.body.onmouseleave = onEnd;
let backButtons = document.querySelectorAll('button.back');
for (let button of backButtons) {
button.onclick = goBack;
}
let copyButtons = document.querySelectorAll('button.copy');
for (let button of copyButtons) {
button.onclick = copyToClipboard;
}
setInterval(function () {
if (!!width) {
// Set the new values for the slider and the separator.
separator.style.left = width;
resize.style.width = width;
}
}, 1000 / 60);
}
if (document.readyState === 'complete') {
init();
} else {
document.addEventListener('DOMContentLoaded', init);
}
|
/*
* Copyright 2017-2021 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micronaut.data.model.jpa.criteria;
import io.micronaut.core.annotation.Experimental;
import io.micronaut.core.annotation.NonNull;
import io.micronaut.core.annotation.Nullable;
import io.micronaut.data.model.Association;
import jakarta.persistence.criteria.Expression;
import jakarta.persistence.criteria.From;
import jakarta.persistence.criteria.Join;
import jakarta.persistence.criteria.JoinType;
import jakarta.persistence.criteria.Predicate;
import jakarta.persistence.metamodel.Attribute;
import jakarta.persistence.metamodel.Bindable;
import java.util.ArrayList;
import java.util.List;
import static io.micronaut.data.model.jpa.criteria.impl.CriteriaUtils.notSupportedOperation;
/**
* The persistent entity association path.
*
* @param <OwnerType> The association owner type
* @param <AssociatedEntityType> The association entity type
* @author <NAME>
* @since 3.2
*/
@Experimental
public interface PersistentAssociationPath<OwnerType, AssociatedEntityType> extends PersistentEntityJoin<OwnerType, AssociatedEntityType>,
PersistentPropertyPath<AssociatedEntityType> {
@NonNull
@Override
Association getProperty();
@NonNull
Association getAssociation();
/**
* @return The join type
*/
@Nullable
io.micronaut.data.annotation.Join.Type getAssociationJoinType();
/**
* Set join type.
*
* @param type The join type
*/
void setAssociationJoinType(@Nullable io.micronaut.data.annotation.Join.Type type);
/**
* Set join alias.
*
* @param alias The alias
*/
void setAlias(String alias);
@NonNull
default List<Association> asPath() {
List<Association> associations = getAssociations();
List<Association> newAssociations = new ArrayList<>(associations.size() + 1);
newAssociations.addAll(associations);
newAssociations.add(getAssociation());
return newAssociations;
}
@Override
@NonNull
default Join<OwnerType, AssociatedEntityType> on(Expression<Boolean> restriction) {
throw notSupportedOperation();
}
@Override
@NonNull
default Join<OwnerType, AssociatedEntityType> on(Predicate... restrictions) {
throw notSupportedOperation();
}
@Override
@Nullable
default Predicate getOn() {
throw notSupportedOperation();
}
@Override
@NonNull
default Attribute<? super OwnerType, ?> getAttribute() {
throw notSupportedOperation();
}
@Override
@Nullable
default From<?, OwnerType> getParent() {
return null;
}
@Override
@NonNull
default JoinType getJoinType() {
throw notSupportedOperation();
}
@Override
@NonNull
default Bindable<AssociatedEntityType> getModel() {
throw notSupportedOperation();
}
}
|
package no.item.enonic.builders;
import com.enonic.cms.api.client.Client;
import com.enonic.cms.api.client.ClientException;
import com.enonic.cms.api.client.model.GetContentByCategoryParams;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import org.jdom.JDOMException;
import org.jdom.output.DOMOutputter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
public class ContentByCategoryQueryBuilder {
private final Logger logger = LoggerFactory.getLogger(ContentByCategoryQueryBuilder.class);
private final Client client;
private final GetContentByCategoryParams params;
private final DOMOutputter domOutputter;
private ContentByCategoryQueryBuilder(Client client, int[] categoryKeys){
this.client = client;
this.params = new GetContentByCategoryParams();
params.categoryKeys = categoryKeys;
this.domOutputter = new DOMOutputter();
}
public static ContentByCategoryQueryBuilder of(Client client, int[] categoryKeys){
return new ContentByCategoryQueryBuilder(client, categoryKeys);
}
public ContentByCategoryQueryBuilder query(String query){
params.query = query;
return this;
}
public ContentByCategoryQueryBuilder orderBy(String orderBy){
params.orderBy = orderBy;
return this;
}
public ContentByCategoryQueryBuilder levels(int levels){
params.levels = levels;
return this;
}
public ContentByCategoryQueryBuilder childrenLevel(int levels){
params.childrenLevel = levels;
return this;
}
public ContentByCategoryQueryBuilder parentLevel(int levels){
params.parentLevel = levels;
return this;
}
public ContentByCategoryQueryBuilder from(int index){
params.index = index;
return this;
}
public ContentByCategoryQueryBuilder size(int count){
params.count = count;
return this;
}
public ContentByCategoryQueryBuilder includeData(){
params.includeData = true;
return this;
}
public ContentByCategoryQueryBuilder includeUserRights(){
params.includeUserRights = true;
return this;
}
public ContentByCategoryQueryBuilder includeVersionsInfo(){
params.includeVersionsInfo = true;
return this;
}
public ContentByCategoryQueryBuilder includeOfflineContent(){
params.includeOfflineContent = true;
return this;
}
public Document get(){
try {
Preconditions.checkNotNull(params.categoryKeys, "Content By Category expects category keys");
return domOutputter.output(client.getContentByCategory(params));
} catch(ClientException e){
logger.error("Can't get content by section [{}]", params.categoryKeys, e);
throw e;
} catch (JDOMException e) {
throw Throwables.propagate(e);
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.algebra;
import java.util.ArrayList ;
import java.util.HashMap ;
import java.util.List ;
import java.util.Map ;
import org.apache.jena.graph.Node ;
import org.apache.jena.graph.Triple ;
import org.apache.jena.sparql.algebra.op.OpBGP ;
import org.apache.jena.sparql.algebra.op.OpPropFunc ;
import org.apache.jena.sparql.algebra.op.OpSequence ;
import org.apache.jena.sparql.algebra.op.OpTable ;
import org.apache.jena.sparql.core.BasicPattern ;
import org.apache.jena.sparql.expr.Expr ;
import org.apache.jena.sparql.expr.ExprList ;
import org.apache.jena.sparql.pfunction.PropFuncArg ;
import org.apache.jena.sparql.pfunction.PropertyFunctionRegistry ;
import org.apache.jena.sparql.util.Context ;
import org.apache.jena.sparql.util.ExprUtils ;
import org.apache.jena.sparql.util.graph.GNode ;
import org.apache.jena.sparql.util.graph.GraphList ;
public class PropertyFunctionGenerator
{
public static Op buildPropertyFunctions(PropertyFunctionRegistry registry, OpBGP opBGP, Context context)
{
if ( opBGP.getPattern().isEmpty() )
return opBGP ;
return compilePattern(registry, opBGP.getPattern(), context) ;
}
private static Op compilePattern(PropertyFunctionRegistry registry, BasicPattern pattern, Context context)
{
// Split into triples and property functions.
// 1/ Find property functions.
// Property functions may involve other triples (for list arguments)
// (but leave the property function triple in-place as a marker)
// 2/ Find arguments for property functions
// (but leave the property function triple in-place as a marker)
// 3/ For remaining triples, put into basic graph patterns,
// and string together the procedure calls and BGPs.
List<Triple> propertyFunctionTriples = new ArrayList<>() ; // Property functions seen
BasicPattern triples = new BasicPattern(pattern) ; // A copy of all triples (later, it is mutated)
// Find the triples invoking property functions, and those not.
findPropertyFunctions(context, pattern, registry, propertyFunctionTriples) ;
if ( propertyFunctionTriples.size() == 0 )
//No property functions.
return new OpBGP(pattern) ;
Map<Triple, PropertyFunctionInstance> pfInvocations = new HashMap<>() ; // Map triple => property function instance
// Removes triples of list arguments. This mutates 'triples'
findPropertyFunctionArgs(context, triples, propertyFunctionTriples, pfInvocations) ;
// Now make the OpSequence structure.
Op op = makeStages(triples, pfInvocations) ;
return op ;
}
private static void findPropertyFunctions(Context context,
BasicPattern pattern,
PropertyFunctionRegistry registry,
List<Triple> propertyFunctionTriples)
{
// Step 1 : find property functions (if any); collect triples.
// Not list arg triples at this point.
for ( Triple t : pattern )
{
if ( isMagicProperty(registry, t) )
propertyFunctionTriples.add(t) ;
}
}
private static void findPropertyFunctionArgs(Context context,
BasicPattern triples,
List<Triple> propertyFunctionTriples,
Map<Triple, PropertyFunctionInstance> pfInvocations)
{
// Step 2 : for each property function, remove associated triples in list arguments;
// Leave the propertyFunction triple itself.
for ( Triple pf : propertyFunctionTriples )
{
PropertyFunctionInstance pfi = magicProperty( context, pf, triples );
pfInvocations.put( pf, pfi );
}
}
private static class PropertyFunctionInstance
{
Node predicate ;
PropFuncArg subjArgs ;
PropFuncArg objArgs ;
PropertyFunctionInstance(PropFuncArg sArgs, Node predicate, PropFuncArg oArgs)
{
this.subjArgs = sArgs ;
this.predicate = predicate ;
this.objArgs = oArgs ;
}
ExprList argList()
{
ExprList exprList = new ExprList() ;
argList(exprList, subjArgs) ;
argList(exprList, objArgs) ;
return exprList ;
}
PropFuncArg getSubjectArgList() { return subjArgs ; }
PropFuncArg getObjectArgList() { return objArgs ; }
private static void argList(ExprList exprList, PropFuncArg pfArg)
{
if ( pfArg.isNode() )
{
Node n = pfArg.getArg() ;
Expr expr = ExprUtils.nodeToExpr(n) ;
exprList.add(expr) ;
return ;
}
for ( Node n : pfArg.getArgList() )
{
Expr expr = ExprUtils.nodeToExpr(n) ;
exprList.add(expr) ;
}
}
}
private static Op makeStages(BasicPattern triples, Map<Triple, PropertyFunctionInstance> pfInvocations)
{
// Step 3 : Make the operation expression.
// For each property function, insert the implementation
// For each block of non-property function triples, make a BGP.
Op op = null;
BasicPattern pattern = null ;
for ( Triple t : triples )
{
if ( pfInvocations.containsKey(t) )
{
op = flush(pattern, op) ;
pattern = null ;
PropertyFunctionInstance pfi = pfInvocations.get(t) ;
OpPropFunc opPF = new OpPropFunc(t.getPredicate(), pfi.getSubjectArgList(), pfi.getObjectArgList(), op) ;
op = opPF ;
continue ;
}
// Regular triples - make sure there is a basic pattern in progress.
if ( pattern == null )
pattern = new BasicPattern() ;
pattern.add(t) ;
}
op = flush(pattern, op) ;
return op ;
}
private static Op flush(BasicPattern pattern, Op op)
{
if ( pattern == null || pattern.isEmpty() )
{
if ( op == null )
return OpTable.unit() ;
return op ;
}
OpBGP opBGP = new OpBGP(pattern) ;
return OpSequence.create(op, opBGP) ;
}
private static boolean isMagicProperty(PropertyFunctionRegistry registry, Triple pfTriple)
{
if ( ! pfTriple.getPredicate().isURI() )
return false ;
if ( registry.manages(pfTriple.getPredicate().getURI()) )
return true ;
return false ;
}
// Remove all triples associated with this magic property.
// Make an instance record.
private static PropertyFunctionInstance magicProperty(Context context,
Triple pfTriple,
BasicPattern triples)
{
List<Triple> listTriples = new ArrayList<>() ;
GNode sGNode = new GNode(triples, pfTriple.getSubject()) ;
GNode oGNode = new GNode(triples, pfTriple.getObject()) ;
List<Node> sList = null ;
List<Node> oList = null ;
if ( GraphList.isListNode(sGNode) )
{
sList = GraphList.members(sGNode) ;
GraphList.allTriples(sGNode, listTriples) ;
}
if ( GraphList.isListNode(oGNode) )
{
oList = GraphList.members(oGNode) ;
GraphList.allTriples(oGNode, listTriples) ;
}
PropFuncArg subjArgs = new PropFuncArg(sList, pfTriple.getSubject()) ;
PropFuncArg objArgs = new PropFuncArg(oList, pfTriple.getObject()) ;
// Confuses single arg with a list of one.
PropertyFunctionInstance pfi = new PropertyFunctionInstance(subjArgs, pfTriple.getPredicate(), objArgs) ;
triples.getList().removeAll(listTriples) ;
return pfi ;
}
}
|
import * as React from 'react';
import styled from 'react-emotion';
import { mixins } from '../styles';
interface IconButtonProps extends React.HTMLProps<HTMLInputElement> {
svg: string;
styles?: string;
};
const iconMixin = (svg: string) => `
width: 24px;
height: 24px;
background: url(${svg});
background-repeat: no-repeat;
background-position: center center;
`;
export const Icon = styled('div')`
${(props: { svg: string }): string => iconMixin(props.svg)}
display: inline-block;
`;
const IconInput = styled('input')`
${mixins.resetInput}
${(props: IconButtonProps): string => iconMixin(props.svg)}
cursor: pointer;
border-radius: 3px;
&:hover {
background-color: rgba(0, 0, 0, 0.2);
}
${(props: IconButtonProps): string | undefined => props.styles}
`;
export class IconButton extends React.PureComponent<IconButtonProps> {
public render() {
return (
<IconInput type="button" onClick={this.props.onClick} svg={this.props.svg} styles={this.props.styles} />
);
}
}
|
<filename>src/globus-client-java/src/main/java/org/globus/GlobusRestInterface.java<gh_stars>0
/*
* Copyright 2014 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.globus;
import java.util.Map;
/**
* @author pruyne
*
*/
public interface GlobusRestInterface
{
public enum RequestType {
@Deprecated
NEXUS("nexus"),
transfer("transfer.api.globus.org"),
groups("nexus.api.globus.org"),
auth("auth.globus.org"),
atmosphere("atmosphere"),
publish("publish"),
search("search.api.globus.org"),
identifiers("identifiers.globus.org"),
xsede("xsede");
private String typeName;
public String getTypeName()
{
return typeName;
}
private RequestType(String typeName) {
this.typeName = typeName;
}
public static RequestType forTypeName(String typeName) {
RequestType[] values = RequestType.values();
for (RequestType requestType : values) {
if (requestType.typeName.equals(typeName)) {
return requestType;
}
}
return null;
}
}
public enum RestMethod {
GET,
PUT,
POST,
DELETE
}
public <T> T doGet(RequestType requestType, String path, int desiredHttpResponse,
Class<T> responseClass) throws GlobusClientException;
public <T> T doRestOp(RestMethod method, RequestType requestType, String path,
int desiredHttpResponse, Map<String, Object> params,
Object requestEntity, Class<T> responseClass)
throws GlobusClientException;
public <T> T doPost(RequestType requestType, String path, int desiredHttpResponse,
Map<String, Object> params, Object requestObj, Class<T> responseClass)
throws GlobusClientException;
public <T> T doPut(RequestType requestType, String path, int desiredHttpResponse,
Map<String, Object> params, Object requestObj, Class<T> responseClass)
throws GlobusClientException;
public <T> T doGet(RequestType requestType, String path, int desiredHttpResponse,
Map<String, Object> params, Class<T> responseClass)
throws GlobusClientException;
public <T> T doDelete(RequestType requestType, String path, int desiredHttpResponse,
Class<T> responseClass) throws GlobusClientException;
public String getRootUrlForRequestType(RequestType requestType) throws GlobusClientException;
public String getBearerTokenForRequestType(RequestType requestType);
}
|
#!/bin/bash
set -eo pipefail
dir="$(dirname "$(readlink -f "$BASH_SOURCE")")"
image="$1"
mysqlImage='mysql:5.7'
# ensure the mysqlImage is ready and available
if ! docker image inspect "$mysqlImage" &> /dev/null; then
docker pull "$mysqlImage" > /dev/null
fi
# Create an instance of the container-under-test
mysqlCid="$(docker run -d \
-e MYSQL_RANDOM_ROOT_PASSWORD=true \
-e MYSQL_DATABASE=monica \
-e MYSQL_USER=homestead \
-e MYSQL_PASSWORD=secret \
"$mysqlImage")"
trap "docker rm -vf $mysqlCid > /dev/null" EXIT
cid="$(docker run -d \
--link "$mysqlCid":mysql \
-e DB_HOST=mysql \
"$image")"
trap "docker rm -vf $cid $mysqlCid > /dev/null" EXIT
_artisan() {
docker exec "$cid" php artisan "$@"
}
# returns success when all database migrations are finished
_migrate_done() {
local status
status="$(_artisan migrate:status)"
if grep -q ' Yes ' <<<"$status" && ! grep -q ' No ' <<<"$status"; then
return 0
fi
return 1
}
# check artisan command for specific output; print and error when not found
_artisan_test() {
local match="$1"; shift
output="$(_artisan "$@")"
if ! grep -iq "$match" <<<"$output"; then
echo "Match: '$match' not found in: $output"
return 1
fi
}
# Give some time to install
. "$dir/../../retry.sh" --tries 30 '_migrate_done'
# Check if installation is complete
_artisan monica:getversion > /dev/null
. "$dir/../../retry.sh" --tries 5 -- _artisan_test 'No scheduled commands are ready to run.' schedule:run
|
class Project extends Model
{
public function resources()
{
return $this->belongsToMany('App\Resource', 'project_resource', 'project_id', 'resource_id');
}
} |
/*---------------------------------------------------------------------------
* Copyright (c) 2016, <NAME>, All Rights Reserved
* SPDX-License-Identifier: LicenseRef-PBL
*
* This file and the related binary are licensed under the
* Permissive Binary License, Version 1.0 (the "License");
* you may not use these files except in compliance with the License.
*
* You may obtain a copy of the License here:
* LICENSE-permissive-binary-license-1.0.txt and at
* https://www.mbed.com/licenses/PBL-1.0
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Component : Wireless LAN driver
* File : cb_types.h
*
* Description : Common definitions for a GCC compatible compiler.
*-------------------------------------------------------------------------*/
/**
* @file cb_types.h Defines type required for the entire driver.
* The defines in this file will have to be adapted for the platform.
* @ingroup platform
*/
#ifndef _CB_PLATFORM_BASIC_TYPES_H_
#define _CB_PLATFORM_BASIC_TYPES_H_
#include <stdint.h>
#include <stdbool.h>
/*===========================================================================
* TYPES
*=========================================================================*/
/*===========================================================================
* COMMON SYSTEM DEFINES
*=========================================================================*/
typedef int8_t cb_int8;
typedef int16_t cb_int16;
typedef int32_t cb_int32;
typedef int64_t cb_int64;
typedef uint8_t cb_uint8;
typedef uint16_t cb_uint16;
typedef uint32_t cb_uint32;
typedef uint64_t cb_uint64;
typedef bool cb_boolean;
typedef char cb_char;
typedef int cb_int;
/**
* Used when declaring an empty array that does not take up space in a struct.
* Example: struct { cb_uint8 payload[cb_EMPTY_ARRAY]; }
* In some compilers this is empty i.e. payload[]. While in some it requires a zero.
* I.e. payload[0];
* Use this define to get it working for your system.
*/
#define cb_EMPTY_ARRAY (0)
/*===========================================================================
* DEFINES
*=========================================================================*/
/**
* Used in function definitions to declare an input parameter unused to avoid warnings.
*/
#if defined(__GNUC__) || defined(__clang__) || defined(__CC_ARM)
#define cb_UNUSED(x) UNUSED_ ## x __attribute__((unused))
#else
#define cb_UNUSED(x) UNUSED_ ## x
#endif
/**
* Define cb_ASSERT to the wanted assert handler.
*/
/*
#define cb_ASSERT(exp) do { if (!(exp)) { \
W_PRINT("ASSERT %s:%d\n", __FILE__, __LINE__); \
while(1); \
} } while(0)
*/
#include "cb_assert.h"
/**@{*/
/**
* Packed struct defines.
* - cb_PACKED_STRUCT_ATTR_PRE is used before the typedef'ed struct declaration.
* - cb_PACKED_STRUCT_ATTR_INLINE_PRE is after the typedef but before the struct declaration.
* - cb_PACKED_STRUCT_ATTR_INLINE_POST is used after the struct declaration but before the typedef'ed name.
* - cb_PACKED_STRUCT_ATTR_POST is used after the entire struct declaration.
*
* example:
* cb_PACKED_STRUCT_ATTR_PRE
* typedef cb_PACKED_STRUCT_ATTR_INLINE_PRE struct myPackedStruct {
* int a;
* int b;
* } cb_PACKED_STRUCT_ATTR_INLINE_POST myPackedStruct
* cb_PACKED_STRUCT_ATTR_POST
*
*/
#define cb_PACKED_STRUCT_ATTR_PRE
#if defined(__ICCARM__)
#define cb_PACKED_STRUCT_ATTR_INLINE_PRE __packed
#else
#define cb_PACKED_STRUCT_ATTR_INLINE_PRE
#endif
#if defined(__ICCARM__)
#define cb_PACKED_STRUCT_ATTR_INLINE_POST __packed
#else
#define cb_PACKED_STRUCT_ATTR_INLINE_POST __attribute__ ((__packed__))
#endif
#define cb_PACKED_STRUCT_ATTR_POST
/**@}*/
#endif /* _CB_PLATFORM_BASIC_TYPES_H_ */
|
# Python function to calculate accuracy
def calculate_accuracy(predictions, ground_truth):
correct_count = sum(1 for pred, truth in zip(predictions, ground_truth) if pred == truth)
total_count = len(predictions)
accuracy = correct_count / total_count if total_count > 0 else 0
return accuracy
# Function to store accuracy in database
def store_accuracy_in_database(accuracy):
print(f"Accuracy {accuracy} stored in the database")
# Construct Python command and execute
PYTHON_CMD = f"{CODE_CMD} {TMP_LOCATION} {TRUTH_LOCATION}"
predictions = [1, 0, 1, 1, 0] # Example model predictions
ground_truth = [1, 1, 1, 0, 0] # Example ground truth values
accuracy = calculate_accuracy(predictions, ground_truth)
store_accuracy_in_database(accuracy) |
<reponame>abhisheklalwani/DeepLabv3FineTuning
import os
import shutil
test_data_path = "D:/small_obs_dataset/Small_Obstacle_Dataset/test/stadium_3/"
target_path = "C:/Users/Abhishek/Desktop/Work/UMass Fall 2021/682/DeepLabV3FineTuningLatest/DeepLabv3FineTuning/SmallObjectDataset/Test_Images/"
count = 0
for image in os.listdir(test_data_path+'labels'):
if count%2 == 0:
source_image = test_data_path+'image/'+image
destination_image = target_path+'Images'
source_mask = test_data_path+'labels/'+image
destination_mask = target_path+'Masks/'+image
shutil.copy(source_image,destination_image)
shutil.copy(source_mask,destination_mask)
count+=1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.