text
stringlengths 1
1.05M
|
|---|
def sum_even_numbers(num_list):
try:
even_sum = 0
for num in num_list:
if isinstance(num, int) and num % 2 == 0:
even_sum += num
return even_sum
except Exception as e:
return "Error occurred"
|
function calculateProductPrices($products) {
$calculatedPrices = [];
foreach ($products as $product) {
$finalPrice = $product['base_price'];
switch ($product['category']) {
case 'electronics':
$finalPrice *= (1 - 0.10); // Apply 10% discount
break;
case 'clothing':
$finalPrice *= (1 - 0.20); // Apply 20% discount
break;
case 'books':
$finalPrice *= (1 - 0.15); // Apply 15% discount
break;
// 'other' category does not have any discount
}
$calculatedPrices[] = [
'name' => $product['name'],
'final_price' => $finalPrice,
];
}
return $calculatedPrices;
}
|
<reponame>isomorfeus/isomorfeus-project
require 'spec_helper'
RSpec.describe 'LucidTranslation::Mixin' do
context 'on server' do
it 'can mixin' do
result = on_server do
class TestClass
include LucidTranslation::Mixin
end
TestClass.ancestors
end
expect(result).to include(LucidTranslation::Mixin)
end
it 'has available locales' do
result = on_server do
Isomorfeus.available_locales
end
expect(result).to eq(['de'])
end
it 'has locale' do
result = on_server do
Isomorfeus.locale
end
expect(result).to eq('de')
end
it 'has domain' do
result = on_server do
Isomorfeus.i18n_domain
end
expect(result).to eq('app')
end
it 'can translate on class level' do
result = on_server do
class TestClass
extend LucidTranslation::Mixin
end
TestClass._('simple')
end
expect(result).to eq('einfach')
end
it 'can translate on instance level' do
result = on_server do
class TestClass
include LucidTranslation::Mixin
end
TestClass.new._('simple')
end
expect(result).to eq('einfach')
end
end
context 'Server Side Rendering' do
before do
@page = visit('/ssr')
end
it 'renders on the server' do
expect(@page.inner_text).to include('Rendered!')
end
it 'translates' do
expect(@page.inner_text).to include('einfach')
end
end
end
|
<gh_stars>0
'use strict';
// DB abstraction
// For postgreSQL use sequalize, as it returns Promises
const redis = require('redis');
const REDIS_URI = process.env.URI || require('../config').uri;
const client = redis.createClient(REDIS_URI);
class DB {
constructor() {
this.client = client;
}
getData() {
return new Promise((res, rej) => {
res(true);
});
}
getDriverInfo(name, ru) {
console.log('infooo');
return new Promise((res, rej) => {
res({
name: 'Vettel',
stats: {
championships: 4,
wins: 40,
poles: 40,
fastlaps: 40,
seasons: 9,
},
bio: 'some big text',
photo: '/home/vk/Sebastian_Vettel_2011_Sebastian_Vettel_in_Yokohama_Infiniti_event.jpg',
});
});
}
}
module.exports = new DB();
|
package com.partyrgame.socketservice.service.impl;
import java.util.List;
import com.partyrgame.blackhandservice.model.BlackHand;
import com.partyrgame.chatservice.model.ChatMessage;
import com.partyrgame.roomservice.model.Room;
import com.partyrgame.socketservice.service.MessageService;
import com.partyrgame.socketservice.util.WebsocketConstants;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.event.EventListener;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.socket.messaging.SessionConnectedEvent;
import org.springframework.web.socket.messaging.SessionDisconnectEvent;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
public class MessageServiceImpl implements MessageService {
private final SimpMessagingTemplate template;
/**
* MessageServiceImpl.
*/
@Autowired
MessageServiceImpl(SimpMessagingTemplate template) {
this.template = template;
}
/**
* sendChatMessage.
*/
public void sendChatMessage(ChatMessage chatMessage, String channel) {
String destination = WebsocketConstants.CHAT_SUBSCRIBE + "/" + convertChannelName(channel);
this.template.convertAndSend(destination, chatMessage);
}
/**
* sendRoomMessage: sends a list of rooms to the client
*/
public void sendRoomMessage(List<Room> rooms, String channel) {
String destination = WebsocketConstants.LOBBY_SUBSCRIBE + "/" + convertChannelName(channel);
log.info("{}", rooms);
this.template.convertAndSend(destination, rooms);
}
/**
* sendBlackHandMessage: sends a BlackHand object to the current game.
*/
public void sendBlackHandMessage(BlackHand blackHand, String channel) {
String destination = WebsocketConstants.BLACK_HAND_SUBSCRIBE + "/" + convertChannelName(channel);
this.template.convertAndSend(destination, blackHand);
}
/**
* sendOfferMessage: sends
*/
public void sendOfferMessage(String offer, String channel) {
String queue = WebsocketConstants.BLACK_HAND_BROKER + "/" + channel;
log.info("sending offer to " + queue);
this.template.convertAndSend(queue, offer);
}
/**
* handleWebSocketConnectListener.
*/
@EventListener
public void handleWebSocketConnectListener(SessionConnectedEvent event) {
log.info("received a new web socket connection. time: {}; user: {}; message: {}; source: {}", event.getTimestamp(),
event.getUser(), event.getMessage(), event.getSource());
}
/**
* handleWebSocketDisconnectListener.
*/
@EventListener
public void handleWebSocketDisconnectListener(SessionDisconnectEvent event) {
log.info("received a new web socket connection. time: {}; user: {}; message: {}; source: {}", event.getTimestamp(),
event.getUser(), event.getMessage(), event.getSource());
}
/**
* convertChannelName: removes special characters, converts spaces to dashes,
* and converts to lowercase.
*
* @param channel input channel which may contain spaces and special characters.
* @return lowercase alphanumeric version of a room name (channel) separated by
* dashes.
*/
private String convertChannelName(String channel) {
channel = channel.replaceAll("[^a-zA-Z0-9\\s\\-]", "");
channel = channel.replaceAll(" ", "-").toLowerCase();
log.info("altered channel: {}", channel);
return channel;
}
}
|
<gh_stars>1-10
export {default as favoritesFilterSelector} from './favoritesFilterSelector';
export {default as flagsSelector} from './flagsSelector';
export {default as isAboutRouteSelector} from './isAboutRouteSelector';
export {default as locationSelector} from './locationSelector';
export {default as themeSelector} from './themeSelector';
|
package cyclops.reactor.container.transformer;
import cyclops.container.foldable.AbstractConvertableSequenceTest;
import cyclops.container.immutable.impl.ConvertableSequence;
import cyclops.monads.AnyMs;
import cyclops.monads.Witness.list;
import cyclops.reactor.stream.FluxReactiveSeq;
public class StreamTSeqConvertableSequenceTest extends AbstractConvertableSequenceTest {
@Override
public <T> ConvertableSequence<T> of(T... elements) {
return AnyMs.liftM(FluxReactiveSeq.of(elements),
list.INSTANCE)
.to();
}
@Override
public <T> ConvertableSequence<T> empty() {
return AnyMs.liftM(FluxReactiveSeq.<T>empty(),
list.INSTANCE)
.to();
}
}
|
/******************************************************************/
/******** socket 接続系処理 ***********/
/******************************************************************/
var socket = { on: function(){} };
var url = "https://motion-share.herokuapp.com"; //websocketサーバのURL。
// 接続
var connect = function() {
//alert("connect");
if ( !socket.connected ) socket = io.connect(url);
else socket.connect();
}
// 切断
var disconnect = function(){
//alert("disconnect");
socket.disconnect();
}
/******************************************************************/
/******** 送信処理 ***********/
/******************************************************************/
// contentID:0 連絡先 送信処理
function sendContact(socketID){
//localStorageにcontactがあるときに処理を行う
if(!(localStorage.contact===void 0)){
//簡単に扱うために一時的にJSONを入れる変数
var befferContact=JSON.parse(localStorage.contact);
//名前が空文字であれば,不明とする
if(befferContact.Name=="") befferContact.Name="unknown";
//保存されたユーザ情報にはパスワードも含まれるため,パスワードを除いた4項目のJSONを再構築
//JSON形式
/*
var sendingContact={
"Name":befferContact.Name,
"Id":befferContact.Id,
"Phone":befferContact.Phone,
"Mail":befferContact.Mail
};
*/
//配列形式
var sendingContact=[];
sendingContact[0]=befferContact.Name;
sendingContact[1]=localStorage.getItem("userId");
sendingContact[2]=befferContact.Phone;
sendingContact[3]=befferContact.Mail;
if((shareSetting&01)==00){
sendingContact[2]="0000000000";
}
if((shareSetting&10)==00){
sendingContact[3]="unknown";
}
sendingContact=sendingContact.toString();
//Base64エンコード
sendingContact=btoa(unescape(encodeURIComponent(sendingContact)));
socket.emit("send real data to server", [ 0 , socketID , sendingContact ]);
disconnect();
Materialize.toast('Contact Data go to server', 2000,'blue');
modeChange();
}
}
// contentID:1 スケジュール 送信処理
function sendSchedule(socketID){
//localStorageにscheduleがあるときに処理を行う
if(!(localStorage.schedule===void 0)){
scheduleJson=JSON.parse(localStorage.schedule);
var index='0';
if(!(sessionStorage.scheduleIndex===void 0)){
index=sessionStorage.scheduleIndex;
}
//選択したスケジュールを扱う
//JSON形式
/*
var sendingSche=scheduleJson[index];
sendingSche=JSON.stringify(sendingSche);
*/
//配列形式
var dates=scheduleJson[index].date.replace(/T|-|:/g,"/");
var notes=scheduleJson[index].note;
var sendingSche=[dates,notes];
sendingSche=sendingSche.toString();
if(sendingSche!=null){
//Base64で送信するときは以下のbtoa関数のコメントアウトを解除する
//Base64エンコード
sendingSche=btoa(unescape(encodeURIComponent(sendingSche)));
socket.emit("send real data to server", [ 1 , socketID , sendingSche ]);
//socket.emit("html5_test", sendingSche);
disconnect();
Materialize.toast('Schedule go to Server', 2000,'blue');
modeChange();
}else{
disconnect();
Materialize.toast('There are not sharable schedule', 2000,'red');
modeChange();
}
}
}
// contentID:2 画像 送信処理
function sendPhotoData(socketID){
var data = localStorage.getItem('imageData');
socket.emit("send real data to server", [ 2 , socketID , data ]);
disconnect();
Materialize.toast('Photo go to Server', 2000,'blue');
modeChange();
}
/******************************************************************/
/******** 受信処理 ***********/
/******************************************************************/
// contentID:0 連絡先 受信処理
function receiveContact(rcvCtt){
//Base64デコード
rcvCtt=decodeURIComponent(escape(atob(rcvCtt)));
/*
var contact=JSON.parse(rcvCtt);
var name=contact["Name"];
var phone=contact["Phone"];
var mail=contact["Mail"];
*/
var rcvCttString=rcvCtt.split(",");
var name=rcvCttString[0];
var phone=rcvCttString[2];
var mail=rcvCttString[3];
Materialize.toast("Received contact of "+ name,2000,'blue');
//サーバに自分のuserIDと相手のuserIDを送信する
socket.emit("create relation",[localStorage.getItem("userId"),rcvCttString[1]]);
var newContact=navigator.contacts.create({"displayName":name});
var phoneNumbers=[];
phoneNumbers[0] = new ContactField('mobile', phone, true); // preferred number
var emails=[];
emails[0]=new ContactField('mobile',mail,true);
newContact.phoneNumbers=phoneNumbers;
newContact.emails=emails;
newContact.save();
disconnect();
receiverMode();
}
// contentID:1 スケジュール 受信処理
function receiveSchedule(rcvMsg){
Materialize.toast("Received schedule",2000,'blue');
//Base64デコード
rcvMsg=decodeURIComponent(escape(atob(rcvMsg)));
//JSON形式
/*
var sche=JSON.parse(rcvMsg);
var datetime=sche["date"];
var note =sche["note"];
*/
//配列形式
var rcvMsgString=rcvMsg.split(",");
var datetime=rcvMsgString[0];
var note=rcvMsgString[1];
var datetimes=datetime.split("/");
datetime=datetimes[0]+"-"+datetimes[1]+"-"+datetimes[2]+"T"+datetimes[3]+":"+datetimes[4];
//JSONのkeyをスケジュールリストの要素数にする
for(var i=0;i<=Object.keys(scheduleJson).length;i++){
if(!(i in scheduleJson)){
scheIndex=i;
break;
}
}
//受け取ったスケジュールをJsonで保存し画面に反映
scheduleToJson(datetime,note);
scheduleAuto(scheIndex,datetime,note);
scheduleShow();
//ホーム画面のスケジュールは直近のスケジュールに変更
sessionStorage.scheduleIndex='0';
//スケジュール画面に自動で遷移 遷移しない方がいいのなら削除
$("#view").load('scheduleList.html',function(){
scheduleFanc.initialize();
});
disconnect();
receiverMode();
}
// contentID:2 画像 受信処理
function receivePhotoData(imageData){
localStorage.setItem('imageData', imageData);
Materialize.toast('Received Photo', 2000,'blue');
var data = localStorage.getItem('imageData');
$('.card-image').removeClass('loadingWidth');
$('#camera_pic').attr('src', 'data:image/jpeg;charset=utf-8;base64,' + data);
//saveBase64PhotoData(data);
disconnect();
receiverMode();
savePhoto();
}
|
<reponame>danhagen/NonlinearControl
from pendulum_eqns.physiology.muscle_params_BIC_TRI import *
from pendulum_eqns.state_equations import *
from scipy.integrate import cumtrapz
import matplotlib.pyplot as plt
from danpy.sb import dsb
Theta_i = np.pi/6
Theta_f = 2*np.pi/3
Omega = 1
T_end = (Theta_f-Theta_i)/Omega
N = int(T_end*1000+1)
Time = np.linspace(0,T_end,N)
X = np.zeros((2,len(Time)))
X[0,:] = Theta_i + Omega*Time
X[1,:] = Omega*np.ones(np.shape(Time))
lTo1 = 0.5
def error_func_1(X,T,T_i,Time):
assert np.shape(X)[0]>=2, "X must be an array of shape (M,N) where M>=2."
error = (lTo1*kT/np.cos(α1))*np.log((np.exp(T/(F_MAX1*cT*kT))-1)/(np.exp(T_i/(F_MAX1*cT*kT))-1)) + ((np.cos(α1)-1)/np.cos(α1))*np.trapz([v_MTU1(X[:,i]) for i in range(np.shape(X)[1])],Time)
return(error)
def muscle_length_1(X,T,T_i,lm_i,Time):
assert np.shape(X)[0]>=2, "X must be an array of shape (M,N) where M>=2."
lm = (1/np.cos(α1))*np.trapz([v_MTU1(X[:,i]) for i in range(np.shape(X)[1])],Time) + lm_i - (lTo1*kT/np.cos(α1))*np.log((np.exp(T/(F_MAX1*cT*kT))-1)/(np.exp(T_i/(F_MAX1*cT*kT))-1))
return(lm)
def return_required_tension(Omega,T_i):
Theta_i = np.pi/6
Theta_f = 2*np.pi/3
T_end = (Theta_f-Theta_i)/Omega
# N = int(T_end*1000+1)
N = 1001
Time = np.linspace(0,T_end,N)
X = np.zeros((2,len(Time)))
X[0,:] = Theta_i + Omega*Time
X[1,:] = Omega*np.ones(np.shape(Time))
Tension = np.zeros(np.shape(Time))
statusbar = dsb(0,len(Time),title="Finding Tensions")
Tension = (F_MAX1*cT*kT)*np.log(np.exp((1/(lTo1*kT))*cumtrapz([v_MTU1(X[:,i]) for i in range(len(Time))],Time))*(np.exp(T_i/(F_MAX1*cT*kT))-1) + 1)
return(Tension,Time)
def return_required_activation(T):
u = (T*np.cos(α1) - F_MAX1*np.cos(α1)**2*F_PE1_1([0,0,0,0,lo1,0,0,0]))/(F_MAX1*np.cos(α1)**2)
return(u)
U1 = []
T1 = []
Time1 = []
Omega1 = np.arange(0.01,0.51,0.01)
T_i1 = 100*np.ones(np.shape(Omega1))
statusbar = dsb(0,len(Omega1),title="Fixed T_i, Sweeping Omega")
for i in range(len(Omega1)):
T_temp,Time_temp = return_required_tension(Omega1[i],T_i1[i])
T1.append(T_temp)
Time1.append(Time_temp)
U1.append(return_required_activation(T_temp))
statusbar.update(i)
plt.figure()
ax1 = plt.gca()
ax1.set_title(r"Fixed $T_{i}$, Sweeping $\omega$" + "\n Tension vs. Time")
ax1.set_xlabel("Time (s)")
ax1.set_ylabel("Tension (T)")
for i in range(len(Omega1)):
ax1.plot(Time1[i][:-1],T1[i])
plt.figure()
ax2 = plt.gca()
ax2.set_title(r"Fixed $T_{i}$, Sweeping $\omega$" + "\n Activation vs. Time")
ax2.set_xlabel("Time (s)")
ax2.set_ylabel("Activation")
for i in range(len(Omega1)):
ax2.plot(Time1[i][:-1],U1[i])
U2 = []
T2 = []
Time2 = []
Omega2 = 0.01*np.ones(np.shape(Omega1))
T_i2 = np.linspace(10,F_MAX1,len(Omega1))
statusbar = dsb(0,len(T_i2),title="Fixed Omega, Sweeping T_i")
for i in range(len(T_i2)):
T_temp,Time_temp = return_required_tension(Omega2[i],T_i2[i])
T2.append(T_temp)
Time2.append(Time_temp)
U2.append(return_required_activation(T_temp))
statusbar.update(i)
plt.figure()
ax3 = plt.gca()
ax3.set_title(r"Fixed $\omega$, Sweeping $T_{i}$" + "\n Tension vs. Time")
ax3.set_xlabel("Time (s)")
ax3.set_ylabel("Tension (T)")
for i in range(len(Omega2)):
ax3.plot(Time2[i][:-1],T2[i])
plt.figure()
ax4 = plt.gca()
ax4.set_title(r"Fixed $\omega$, Sweeping $T_{i}$" + "\n Tension vs. Time")
ax4.set_xlabel("Time (s)")
ax4.set_ylabel("Activation")
for i in range(len(Omega2)):
ax4.plot(Time2[i][:-1],U2[i])
plt.show()
T_array = np.linspace(
F_MAX1*np.cos(α1)*F_PE1_1([0,0,0,0,lo1,0,0,0])+0.0001,
F_MAX1*np.cos(α1)*(1+F_PE1_1([0,0,0,0,lo1,0,0,0])),
1001
)
plt.plot(
T_array,
[np.log((np.exp(T/(F_MAX1*cT*kT))-1)/(np.exp(T_i/(F_MAX1*cT*kT))-1)) for T in T_array]
)
plt.plot(Time1[0][:-1],np.exp((1/(lTo1*kT))*cumtrapz([v_MTU1(X[:,i]) for i in range(len(Time1[0]))],Time1[0])))
plt.show()
|
<reponame>magma/fbc-js-core
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow
* @format
*/
import type {Options} from 'sequelize';
const fs = require('fs');
// TODO: Pull from shared config
const MYSQL_HOST = process.env.MYSQL_HOST || '127.0.0.1';
const MYSQL_PORT = parseInt(process.env.MYSQL_PORT || '3306');
const MYSQL_USER = process.env.MYSQL_USER || 'root';
const MYSQL_PASS = process.env.MYSQL_PASS || '';
const MYSQL_DB = process.env.MYSQL_DB || 'cxl';
const MYSQL_DIALECT = process.env.MYSQL_DIALECT || 'mysql';
const logger = require('@fbcnms/logging').getLogger(module);
let ssl_required = false;
let CAcert = process.env.CA_FILE;
let Ckey = process.env.KEY_FILE;
let Ccert = process.env.CERT_FILE;
let dialectOptions = {};
if (process.env.CA_FILE) {
try {
CAcert = fs.readFileSync(process.env.CA_FILE);
ssl_required = true;
} catch (e) {
console.warn('cannot read ca cert file', e);
}
}
if (process.env.KEY_FILE) {
try {
Ckey = fs.readFileSync(process.env.KEY_FILE);
ssl_required = true;
} catch (e) {
console.warn('cannot read key file', e);
}
}
if (process.env.CERT_FILE) {
try {
Ccert = fs.readFileSync(process.env.CERT_FILE);
ssl_required = true;
} catch (e) {
console.warn('cannot read cert file', e);
}
}
if (ssl_required) {
dialectOptions = {
ssl: {
ca: CAcert,
key: Ckey,
cert: Ccert,
},
};
}
const config: {[string]: Options} = {
test: {
username: '',
password: '',
database: 'db',
dialect: 'sqlite',
logging: false,
},
development: {
username: MYSQL_USER,
password: <PASSWORD>,
database: MYSQL_DB,
host: MYSQL_HOST,
port: MYSQL_PORT,
dialect: MYSQL_DIALECT,
ssl: ssl_required,
dialectOptions,
logging: (msg: string) => logger.debug(msg),
},
production: {
username: MYSQL_USER,
password: <PASSWORD>,
database: MYSQL_DB,
host: MYSQL_HOST,
port: MYSQL_PORT,
dialect: MYSQL_DIALECT,
ssl: ssl_required,
dialectOptions,
logging: (msg: string) => logger.debug(msg),
},
};
export default config;
|
import torch
import json
import sys
import ttach as tta
from albumentations.augmentations.geometric.resize import Resize
from sklearn.model_selection import train_test_split
from torch.utils.data import DataLoader
from tqdm import tqdm
import missed_planes.engine as engine
import missed_planes.metrics as metrics
from missed_planes.dataset import PlanesDataset
def setup_data_loader(config_path):
# Load the configuration from the JSON file
with open(config_path, "r") as f:
config = json.load(f)
# Load the dataset and split into training and validation sets
dataset = PlanesDataset(config['data_path'], transform=Resize(height=256, width=256))
train_set, val_set = train_test_split(dataset, test_size=config['validation_split'], random_state=config['random_seed'])
# Create custom data loaders for training and validation sets
train_loader = DataLoader(train_set, batch_size=config['batch_size'], shuffle=True, num_workers=config['num_workers'])
val_loader = DataLoader(val_set, batch_size=config['batch_size'], shuffle=False, num_workers=config['num_workers'])
# Implement test-time augmentation using the tta library
tta_transforms = tta.Compose([
tta.HorizontalFlip(),
tta.VerticalFlip(),
tta.Rotate90(angles=[0, 90, 180, 270]),
])
return train_loader, val_loader, tta_transforms
# Usage example
train_loader, val_loader, tta_transforms = setup_data_loader("config.json")
|
<reponame>izikaj/sunrise
# frozen_string_literal: true
require 'sunrise/config/base'
require 'sunrise/config/has_fields'
module Sunrise
module Config
class Form < Base
include Sunrise::Config::HasFields
include Sunrise::Config::HasGroups
# List of permissible attributes
register_instance_option :permited_attributes do
:all
end
end
end
end
|
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../../..)
source $rootdir/test/common/autotest_common.sh
source $rootdir/test/nvmf/common.sh
if [ -z "${DEPENDENCY_DIR}" ]; then
echo DEPENDENCY_DIR not defined!
exit 1
fi
spdk_nvme_cli="${DEPENDENCY_DIR}/nvme-cli"
MALLOC_BDEV_SIZE=64
MALLOC_BLOCK_SIZE=512
rpc_py="$rootdir/scripts/rpc.py"
timing_enter nvme_cli
nvmftestinit
nvmfappstart "-m 0xF"
$rpc_py nvmf_create_transport -t $TEST_TRANSPORT -u 8192
$rpc_py construct_malloc_bdev $MALLOC_BDEV_SIZE $MALLOC_BLOCK_SIZE -b Malloc0
$rpc_py construct_malloc_bdev $MALLOC_BDEV_SIZE $MALLOC_BLOCK_SIZE -b Malloc1
$rpc_py nvmf_subsystem_create nqn.2016-06.io.spdk:cnode1 -a -s SPDK00000000000001 -d SPDK_Controller1
$rpc_py nvmf_subsystem_add_ns nqn.2016-06.io.spdk:cnode1 Malloc0
$rpc_py nvmf_subsystem_add_ns nqn.2016-06.io.spdk:cnode1 Malloc1
$rpc_py nvmf_subsystem_add_listener nqn.2016-06.io.spdk:cnode1 -t $TEST_TRANSPORT -a $NVMF_FIRST_TARGET_IP -s $NVMF_PORT
nvme connect -t $TEST_TRANSPORT -n "nqn.2016-06.io.spdk:cnode1" -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
waitforblk "nvme0n1"
waitforblk "nvme0n2"
nvme list
for ctrl in /dev/nvme?; do
nvme id-ctrl $ctrl
nvme smart-log $ctrl
nvme_model=$(nvme id-ctrl $ctrl | grep -w mn | sed 's/^.*: //' | sed 's/ *$//')
if [ "$nvme_model" != "SPDK_Controller1" ]; then
echo "Wrong model number for controller" $nvme_model
exit 1
fi
done
for ns in /dev/nvme?n*; do
nvme id-ns $ns
done
nvme disconnect -n "nqn.2016-06.io.spdk:cnode1"
if [ -d $spdk_nvme_cli ]; then
# Test spdk/nvme-cli NVMe-oF commands: discover, connect and disconnect
cd $spdk_nvme_cli
sed -i 's/shm_id=.*/shm_id=-1/g' spdk.conf
./nvme discover -t $TEST_TRANSPORT -a $NVMF_FIRST_TARGET_IP -s "$NVMF_PORT"
nvme_num_before_connection=$(nvme list |grep "/dev/nvme*"|awk '{print $1}'|wc -l)
./nvme connect -t $TEST_TRANSPORT -n "nqn.2016-06.io.spdk:cnode1" -a "$NVMF_FIRST_TARGET_IP" -s "$NVMF_PORT"
sleep 1
nvme_num=$(nvme list |grep "/dev/nvme*"|awk '{print $1}'|wc -l)
./nvme disconnect -n "nqn.2016-06.io.spdk:cnode1"
if [ $nvme_num -le $nvme_num_before_connection ]; then
echo "spdk/nvme-cli connect target devices failed"
exit 1
fi
fi
$rpc_py delete_nvmf_subsystem nqn.2016-06.io.spdk:cnode1
trap - SIGINT SIGTERM EXIT
nvmfcleanup
nvmftestfini
report_test_completion "nvmf_spdk_nvme_cli"
timing_exit nvme_cli
|
const axios = require('axios');
const url = 'json data url';
const parseJSON = async url => {
try {
const response = await axios.get(url);
const data = response.data;
console.log(data);
} catch (error) {
console.error(error);
}
};
parseJSON(url);
|
#!./test/libs/bats/bin/bats
DOTFILES_REPO=$HOME/Dotfiles
load 'libs/bats-support/load'
load 'libs/bats-assert/load'
load 'test_helper'
wads='./wads'
@test "Should symlink file from home directory to ~/Dotfiles" {
touch $HOME/.testrc
run $wads add .testrc
assert_success
assert [ -e $DOTFILES_REPO/testrc ]
rm -rf $HOME/.testrc
rm -rf $DOTFILES_REPO/testrc
}
@test "Should remove file from dotfiles repo" {
run $wads rm testrc
assert_success
assert [ ! -e $DOTFILES_REPO/testrc ]
rm -rf $HOME/.testrc
rm -rf $DOTFILES_REPO/testrc
}
@test "Should symlink file from dotfiles repo to home" {
touch $DOTFILES_REPO/testrc
run $wads install testrc
assert_success
assert [ -L $HOME/.testrc ]
rm -rf $HOME/.testrc
rm -rf $DOTFILES_REPO/testrc
}
@test "subcommands should work with multiple files" {
touch $HOME/.testrc
touch $HOME/.testrc1
run $wads add .testrc
run $wads add .testrc1
assert_success
assert [ -e $DOTFILES_REPO/testrc ]
assert [ -e $DOTFILES_REPO/testrc1 ]
run $wads rm testrc
run $wads rm testrc1
assert_success
assert [ ! -e $DOTFILES_REPO/testrc ]
assert [ ! -e $DOTFILES_REPO/testrc1 ]
rm -rf $HOME/.testrc
rm -rf $DOTFILES_REPO/testrc
rm -rf $HOME/.testrc1
rm -rf $DOTFILES_REPO/testrc1
touch $DOTFILES_REPO/testrc
touch $DOTFILES_REPO/testrc1
run $wads install testrc
run $wads install testrc1
assert_success
assert [ -L $HOME/.testrc ]
assert [ -L $HOME/.testrc1 ]
rm -rf $HOME/.testrc
rm -rf $DOTFILES_REPO/testrc
rm -rf $HOME/.testrc1
rm -rf $DOTFILES_REPO/testrc1
}
|
<gh_stars>0
package com.example.assets.model;
import org.litepal.crud.DataSupport;
import java.util.Date;
/**
* Created by Administrator on 2017/3/14.
* 计划表
*/
public class Plan extends DataSupport{
private int id;
private String aim;//计划的目标
private double money;//计划存款金额
private Date endTime;//截止时间
private String beihzu;//备注
public String getAim() {
return aim;
}
public void setAim(String aim) {
this.aim = aim;
}
public double getMoney() {
return money;
}
public void setMoney(double money) {
this.money = money;
}
public Date getEndTime() {
return endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
public String getBeihzu() {
return beihzu;
}
public void setBeihzu(String beihzu) {
this.beihzu = beihzu;
}
}
|
<filename>LineCharts/LineChartHeader.h
//
// LineChartHeader.h
// UVLOOK
//
// Created by Hepburn on 2020/3/10.
// Copyright © 2020 Hepburn. All rights reserved.
//
#ifndef LineChartHeader_h
#define LineChartHeader_h
// 线条类型
typedef NS_ENUM(NSInteger, LineType) {
LineType_Straight, // 折线
LineType_Curve // 曲线
};
typedef enum {
LineChartMarkAlign_TopLeft,
LineChartMarkAlign_TopRight,
LineChartMarkAlign_BottomLeft,
LineChartMarkAlign_BottomRight
} LineChartMarkAlign;
#endif /* LineChartHeader_h */
|
# Write your solution here!
class NumberStats:
def __init__(self):
self.numbers = 0
self.count = 0
self.avg = 0
def add_number(self, number:int):
self.numbers += number
self.count += 1
def count_numbers(self):
return self.count
def get_sum(self):
return self.numbers
def average(self):
if self.count != 0:
self.avg = self.numbers/self.count
else:
self.avg = 0
return self.avg
# Test the scr
stats = NumberStats()
sum_even = 0
sum_odd = 0
while True:
ipt_number = int(input("Please type in integer numbers:"))
if ipt_number != -1:
stats.add_number(ipt_number)
if ipt_number%2 == 0:
sum_even += ipt_number
else:
sum_odd += ipt_number
else:
break
#print("Numbers added:", stats.count_numbers())
print("Sum of numbers:", stats.get_sum())
print("Mean of numbers:", stats.average())
print("Sum of even numbers:", sum_even)
print("Sum of odd numbers:", sum_odd)
|
#!/usr/bin/env bash
mkdir -p build
# shellcheck disable=SC2164
cd build
cmake -DCMAKE_BUILD_TYPE=Release ..
sudo cmake --build . --target install
|
#!/usr/bin/env bash
# Copy all relevant files into a given directory
# Usage: ./scripts/package.sh "target_directory"
# Create the application directory
APP_DIR=$1
mkdir -p $APP_DIR
# Copy built files
cp "build/gui/Grabber" $APP_DIR 2> /dev/null
cp "build/cli/Grabber-cli" $APP_DIR 2> /dev/null
cp build/languages/*.qm "$APP_DIR/languages/" 2> /dev/null
# Copy other required files to the release directory
rsync -ar --exclude="node_modules" --exclude="CMakeLists.txt" --exclude="package*.json" --exclude="*.config.js" --exclude="tsconfig.json" --exclude="tslint.json" --exclude="**/*.ts" --exclude="**/resources" --exclude="Sankaku" --exclude="Tumblr" src/sites $APP_DIR
cp -r src/dist/common/* $APP_DIR
touch "$APP_DIR/settings.ini"
|
#!/bin/bash
# ----------------------------------------------------------------------------
#
# Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
#
# WSO2 Inc. licenses this file to you under the Apache License,
# Version 2.0 (the "License"); you may not use this file except
# in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# ----------------------------------------------------------------------------
# check if Docker has been installed
if [ ! -x "$(command -v docker)" ]; then
echo -e "---> Please install Docker."
exit 1
fi
# check if Bosh CLI has been installed
if [ ! -x "$(command -v bosh)" ]; then
echo -e "---> Please install Bosh CLI v2."
exit 1
fi
# move to deployment folder
cd deployment
# kill and remove the running MySQL Docker container
echo -e "---> Killing MySQL Docker container..."
docker rm -f mysql-5.7 && docker ps -a
# if forced, delete the existing BOSH environment
if [ "$1" == "--force" ]; then
echo -e "---> Deleting existing environment..."
bosh delete-env bosh-deployment/bosh.yml \
--state vbox/state.json \
-o bosh-deployment/virtualbox/cpi.yml \
-o bosh-deployment/virtualbox/outbound-network.yml \
-o bosh-deployment/bosh-lite.yml \
-o bosh-deployment/bosh-lite-runc.yml \
-o bosh-deployment/jumpbox-user.yml \
--vars-store vbox/creds.yml \
-v director_name="Bosh Lite Director" \
-v internal_ip=192.168.50.6 \
-v internal_gw=192.168.50.1 \
-v internal_cidr=192.168.50.0/24 \
-v outbound_network_name=NatNetwork
fi
# remove the added route
sudo route delete -net 10.244.0.0/16 gw 192.168.50.6
|
<reponame>robchambers/hypothetical
import { Injectable } from '@angular/core';
import * as hypothetical from './hypothetical';
import * as _ from 'lodash';
/**
* Store input/output data corresonding to a single baseline and associated hypotheticals.
*
* Functionality should eventually include:
* * Save/load for future.
*/
@Injectable()
export class DataModelService {
baseline: hypothetical.Baseline = new hypothetical.Baseline();
baselineHypothetical: hypothetical.Hypothetical;
hypotheticals: Array<hypothetical.Hypothetical> = [];
constructor() {
this.baselineHypothetical = new hypothetical.Hypothetical(
"Baseline",
this.baseline
);
// for testing
this.hypotheticals.push(
new hypothetical.Hypothetical(
"Earn 20k More",
this.baseline,
[{propertyId: 'Income', modifier: '+', amount: 20000, enabled: true}]
)
);
this.hypotheticals.push(
new hypothetical.Hypothetical(
"Earn 20k Less",
this.baseline,
[{propertyId: 'Income', modifier: '-', amount: 20000, enabled: true}]
)
);
this.simulateHypotheticals();
}
expenseNameChange(expense: hypothetical.iExpense, newName: string) {
let oldName = expense.name;
for ( let e of this.baseline.expenses ) {
if ( e.name === newName ) {
console.log(`Name ${newName} already exists.`);
return
}
}
// Adjust name in any deltas that reference this expense.
for ( let h of this.hypotheticals ) {
for ( let d of h.deltas ) {
if ( d.propertyId === ("Expense: " + oldName) ) {
d.propertyId = "Expense: " + newName;
//console.log(`Updated property name ${oldName} -> ${newName} for ${h.name}.`);
}
}
}
expense.name = newName;
}
deleteExpense(expense: hypothetical.iExpense) {
// Delete any deltas that reference this expense.
for ( let h of this.hypotheticals ) {
for ( let d of h.deltas ) {
if ( d.propertyId === ("Expense: " + expense.name) ) {
_.pull(h.deltas, d)
}
}
}
// And delete the expense.
_.pull(this.baseline.expenses, expense)
}
allHypotheticals() :Array<hypothetical.Hypothetical> {
return [this.baselineHypothetical].concat(this.hypotheticals);
}
simulateHypotheticals() {
for ( let h of this.allHypotheticals() ) {
h.simulateHypothetical();
}
}
}
|
let stars = [];
fetch('https://api.solarsystemscope.com/stars')
.then(res => res.json())
.then(data => {
let starsData = data.filter(star => {
return star.distance < 1000;
});
stars = starsData;
console.log(stars);
});
|
<reponame>pcnate/redapp<filename>src/app/config.service.ts
import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders, HttpErrorResponse } from '@angular/common/http';
import { catchError, retry } from 'rxjs/operators';
import { Observable, throwError } from 'rxjs';
const httpOptions = {
headers: new HttpHeaders({
'Content-Type': 'application/json',
}),
}
@Injectable({
providedIn: 'root'
})
export class ConfigService {
baseUrl: string = '/red-ampp/api/';
constructor(
private http: HttpClient,
) { }
private handleError( error: HttpErrorResponse ) {
if( error.error instanceof ErrorEvent ) {
console.error( 'an error occurred', error.error.message );
} else {
console.error(
`Backend returned code ${error.status}, ` +
`body was ${error.error}`
);
}
return throwError( 'Something bad happened: please try again later' );
}
getRoutes() {
return this.get( 'getRoutes' );
}
get( action: string ) {
return this.http.get( this.baseUrl + action, httpOptions )
.pipe(
catchError( this.handleError )
)
}
post( action: string, redirect ) {
return this.http.post( this.baseUrl + action, redirect, httpOptions )
.pipe(
catchError( this.handleError )
);
}
}
|
source $SRCDIR/libtest.sh
# Test DEVS= directive. Returns 0 on success and 1 on failure.
test_devs() {
local devs=$TEST_DEVS
local test_status=1
local testname=`basename "$0"`
local vg_name="css-test-foo"
# Error out if any pre-existing volume group vg named css-test-foo
if vg_exists "$vg_name"; then
echo "ERROR: $testname: Volume group $vg_name already exists." >> $LOGS
return $test_status
fi
# Create config file
cat << EOF > /etc/sysconfig/docker-storage-setup
DEVS="$devs"
VG=$vg_name
EOF
# Run container-storage-setup
$CSSBIN >> $LOGS 2>&1
# Test failed.
if [ $? -ne 0 ]; then
echo "ERROR: $testname: $CSSBIN failed." >> $LOGS
cleanup $vg_name "$devs"
return $test_status
fi
# Make sure volume group $VG got created
if vg_exists "$vg_name"; then
test_status=0
else
echo "ERROR: $testname: $CSSBIN failed. $vg_name was not created." >> $LOGS
fi
cleanup $vg_name "$devs"
return $test_status
}
test_devs
|
/**
*
*/
package net.abi.abisEngine.rendering.asset;
import net.abi.abisEngine.util.Expendable;
/**
* @author abinash
*
*/
public interface AssetI extends Expendable {
/**
* Increments the asset's reference count by one.
*/
public void incRef();
public int incAndGetRef();
/**
* Decrements the asset's reference count by one.
*/
public void decRef();
public int decAndGetRef();
/**
* Returns the integer representation of the references.
*
* @return The count of refrences.
*/
public int getRefs();
}
|
def remove_duplicates(lst):
seen = set()
filtered_list = []
for item in lst:
if item not in seen:
seen.add(item)
filtered_list.append(item)
return filtered_list
if __name__ == "__main__":
lst = ['a', 'b', 'c', 'd', 'a', 'c']
print(remove_duplicates(lst))
|
class BinaryTree {
int data;
BinaryTree left;
BinaryTree right;
BinaryTree(int data) {
this.data = data;
}
// convert an array of integers into a binary search tree
static BinaryTree arrayToBST(int[] arr) {
BinaryTree bt = null;
for (int i : arr)
bt = insert(bt, i);
return bt;
}
static BinaryTree insert(BinaryTree bt, int data) {
// if root is null
if (bt == null)
return new BinaryTree(data);
// if data is smaller than root, insert in left subtree
if (data < bt.data)
bt.left = insert(bt.left, data);
// else insert in right subtree
else
bt.right = insert(bt.right, data);
return bt;
}
public static void main(String[] args) {
int[] arr = {5, 2, 8, 1, 4};
BinaryTree bt = arrayToBST(arr);
}
}
|
from random import randrange
bolso = 100
resultado = 0
resposta = "s"
while(resposta=="s"):
numero_apostado = int(input("Escolha um número entre 1 e 6 para você apostar: "))
valor_aposta = float(input("Qual o valor da aposta? "))
bolso -= valor_aposta
dado1 = randrange(1,6)
dado2 = randrange(1,6)
print("Sorteados os dados {} e {}.".format(dado1, dado2))
if(dado1==numero_apostado)and(dado2==numero_apostado):
resultado = valor_aposta * 10
bolso += resultado
print("Você ganhou {} e agora está com {}.".format(resultado,bolso))
elif (dado1==numero_apostado)or(dado2==numero_apostado):
resultado = valor_aposta * 2
bolso += resultado
print("Você ganhou {} e agora está com {}.".format(resultado,bolso))
else:
print("Você errou. Agora tem {} no bolso.".format(bolso))
resposta = input("Deseja jogar outra vez? ".lower())
print("Fim de jogo.")
|
#!/bin/bash -e
DEMO_DROP=$HOME/drop
DEMO_HOME=$HOME/guacamole-demo
CERT_DIR=$DEMO_HOME/cert
mkdir -p $CERT_DIR
cd $DEMO_DROP
tar -zxpf guacamole.soulwing.org.tar.gz
cp guacamole.soulwing.org/fullchain1.pem $CERT_DIR/cert.pem
cp guacamole.soulwing.org/privkey1.pem $CERT_DIR/key.pem
|
<reponame>munenelewis/whatsapp-v-email
import firebase from 'firebase'
const firebaseConfig = {
apiKey: '<KEY>',
authDomain: 'whatsapp1-49293.firebaseapp.com',
projectId: 'whatsapp1-49293',
storageBucket: 'whatsapp1-49293.appspot.com',
messagingSenderId: '341210929144',
appId: '1:341210929144:web:459b17deb7bfa040d1280d',
measurementId: 'G-N5FRN77JWE',
}
const app = !firebase.apps.length
? firebase.initializeApp(firebaseConfig)
: firebase.app()
const db = app.firestore()
const auth = app.auth()
const provider = new firebase.auth.GoogleAuthProvider()
export { db, auth, provider }
|
#!/usr/bin/env sh
# SPDX-License-Identifier: MIT
debug () {
create_s3_config
while true
do
echo "Press [CTRL+C] to stop.."
sleep 120
done
}
create_s3_config() {
s3_config=$(cat <<-JSON
{
"identities": [
{
"name": "pds",
"credentials": [
{
"accessKey": "${S3_ACCESSKEY}",
"secretKey": "${S3_SECRETKEY}"
}
],
"actions": [
"Read:${S3_BUCKETNAME}",
"Write:${S3_BUCKETNAME}",
"List:${S3_BUCKETNAME}"
]
}
]
}
JSON
)
echo "$s3_config" > "/home/$STORAGE_USER/s3_config.json"
}
server () {
create_s3_config
echo "Start SeaweedFS"
weed server -dir=/storage -master.volumeSizeLimitMB=1024 -master.volumePreallocate=false -s3 -s3.port=9000 -s3.config="/home/$STORAGE_USER/s3_config.json" &
weed_server_process_id=$!
init
wait $weed_server_process_id
}
init() {
sleep 25
# Create bucket
echo "Create bucket"
echo "s3.bucket.create -name $S3_BUCKETNAME" | weed shell
}
if [ "$OBJECT_STORAGE_START_MODE" = "server" ]
then
server
else
debug
fi
|
<filename>pages/home-page/SpeakersSection.js
import React from "react";
import PageSection from "components/PageSection/index";
import Speakers from "components/Speakers/index";
const items = [
{
image: "/static/image/people/pedram.jpg",
name: "<NAME>",
desc:
"full-stack Javascript web developer and a full-time Software Engineering student.developer at Ezerus.com, and we make enterprise Javascript applications with React, powered by lovely GraphQL, Apollo and Prisma.",
lecture:
"What is Graphql and how to implement a scalable GraphQL project with React & Apollo?",
socials: [
{
url: "https://www.linkedin.com/in/pedrammarandi/",
icon: "fa-linkedin",
},
{
url: "https://github.com/PedramMarandi",
icon: "fa-github",
},
],
},
{
image: "/static/image/people/ana.jpg",
name: "<NAME>",
desc:
"having +4 years of experience in backend development utilizing various technologies and concepts such as Nodejs, mongodb, Elasticsearch, Microservices Architecture, Docker and etc.",
lecture: "An Introduction to Real-Time programming in Javascript",
socials: [
{
url: "https://www.linkedin.com/in/ana-lava-4466348a/",
icon: "fa-linkedin",
},
],
},
{
image: "/static/image/people/sajjad.jpg",
name: "<NAME>",
desc:
"How React Native works, What issues it has and how they will be addressed in the React Native new architecture.",
lecture: "React Native Architecture",
socials: [
{
url: "https://www.linkedin.com/in/smbeiragh/",
icon: "fa-linkedin",
},
{
url: "https://twitter.com/smbeiragh",
icon: "fa-twitter",
},
{
url: "https://github.com/smbeiragh",
icon: "fa-github",
},
],
},
{
image: "/static/image/people/jeremy.jpg",
name: "<NAME>",
desc:
"The talk will be about where Innovation in technology comes from, based on my own research and experience, and how your choice of technologies in a project can stop innovation",
lecture: "How Your Technology Choices Sop Innovation",
socials: [
{
url: "https://www.linkedin.com/in/jeremywebstercc/",
icon: "fa-linkedin",
},
],
},
{
image: "/static/image/people/amirabbas.jpg",
name: "<NAME>",
desc:
"why SVG is such an important part of building websites. From why SVG is useful and how to get your hands on it all the way to implementing it as a system and fancy stuff like animating it.",
lecture: "Everything You Need To Know About SVG",
socials: [
{
url: "https://www.linkedin.com/in/abdolali/",
icon: "fa-linkedin",
},
{
url: "https://twitter.com/amir_abbas",
icon: "fa-twitter",
},
],
},
{
image: "/static/image/people/mojtaba.jpg",
name: "<NAME>",
desc:
"An overview on how React works and what features are added to it in version 16.x",
lecture: "React 16 Overview",
socials: [
{
url: "https://www.linkedin.com/in/mojtabast/",
icon: "fa-linkedin",
},
{
url: "http://www.twitter.com/mojtabast_fa",
icon: "fa-twitter",
},
],
},
{
image: "/static/image/people/milad.jpg",
name: "<NAME>",
desc:
"Developers looking for good companies and companies looking for good developers. We have issues regarding this matter, milad wants to go deep and talk about encounter between knowledge and companies.",
lecture: "hiring issues in Front-End Community",
socials: [
{
url: "http://www.linkedin.com/in/miladheydari",
icon: "fa-linkedin",
},
{
url: "http://www.instagram.com/iammilaad",
icon: "fa-twitter",
},
{
url: "https://github.com/miladbruce",
icon: "fa-github",
},
],
},
{
image: "/static/image/people/ehsun.jpg",
name: "<NAME>",
desc:
"Advanced Efficiency, Reusability of React Components,Performance,Modularity and the market job, ... and everything is Javascript.",
lecture: "Why are we using React.js in our project?",
socials: [
{
url: "https://www.linkedin.com/in/ehsanmohammadi/",
icon: "fa-linkedin",
},
],
},
{
image: "/static/image/people/hamed.jpg",
name: "<NAME>",
desc:
"Manipulating the DOM is an intensive operation in the browser world, react uses VDOM concept to do that with its own diffing algorithm to improve performance which we aim to go through it to understand what is under the hood.",
lecture: "How Diffing Algorithm Works In React",
socials: [
{
url: "http://linkedin.com/in/theham3d",
icon: "fa-linkedin",
},
{
url: "https://twitter.com/theham3d",
icon: "fa-twitter",
},
{
url: "https://github.com/theham3d",
icon: "fa-github",
},
],
},
{
image: "/static/image/people/samaneh.jpg",
name: "<NAME>",
desc:
"how to make Dynamic interactive user interface based on components and how to use it for building complex interfaces.",
lecture: "Styled-Component Architecture",
socials: [
{
url: "https://www.linkedin.com/in/samaneh-1994-13-mohammadi/",
icon: "fa-linkedin",
},
],
},
{
image: "/static/image/people/mamal.jpg",
name: "<NAME>",
desc:
'Hooks are the new/hot feature of React 16.8. The talk is going to cover everything you need to know about Hooks and provide some answers to the questions like "What are Hooks?", "Why we should use them?", and "How to use them?"',
lecture: "useHooks!",
socials: [
{
url: "https://www.linkedin.com/in/mamal72/",
icon: "fa-linkedin",
},
{
url: "https://github.com/mamal72",
icon: "fa-github",
},
{
url: "https://twitter.com/mamal72",
icon: "fa-twitter",
},
],
},
{
image: "/static/image/people/negar.jpg",
name: "<NAME>",
desc:
"Does CORS error haunts you in every project? Are you tired of asking backend team to handle it? This talk is going to give you all the power to handle this error in development.",
lecture: "How to deal with CORS error in Webpack?",
socials: [
{
url: "https://www.linkedin.com/in/negarjamalifard",
icon: "fa-linkedin",
},
{
url: "https://github.com/negarjf",
icon: "fa-github",
},
{
url: "https://twitter.com/NegarJamalifard",
icon: "fa-twitter",
},
{
url: "https://medium.com/@negarjf",
icon: "fa-medium",
},
],
},
{
image: "/static/image/people/ahmad.jpg",
name: "<NAME>",
desc:
"Understanding the communication between JS & Native, Error Tracking, Codepush, VirtualizedList...",
lecture: "A Cup of React-Native",
socials: [
{
url: "https://www.linkedin.com/in/ahmadina/",
icon: "fa-linkedin",
},
{
url: "https://twitter.com/iahmadina",
icon: "fa-twitter",
},
],
},
{
image: "/static/image/people/hasan.jpg",
name: "<NAME>",
desc:
"render on the server rather than in the browser. talk about Cons of Rendering React on the Server, Architecture, Performance and challenges.",
lecture: "React Server-Side Rendering",
socials: [
{
url:
"https://www.linkedin.com/in/mohammad-hassan-gilak-9248268a/",
icon: "fa-linkedin",
},
{
url: "https://twitter.com/hasangilak",
icon: "fa-twitter",
},
{
url: "https://github.com/hasangilak",
icon: "fa-github",
},
],
},
{
image: "/static/image/people/ehsun-h.jpg",
name: "<NAME>",
desc:
"What do you do if you need an enterprise Front-end application? How to develop each part of the application completely in an independent team with their structure, libraries, and framework as a separate application? And how these applications will communicate with each other? These questions will be answered by Micro-Frontend concept.",
lecture: "Micro-Frontend",
socials: [
{
url: "http://www.linkedin.com/in/ehsan-hosseini-a92a676b",
icon: "fa-linkedin",
},
{
url: "https://github.com/zerob4wl",
icon: "fa-github",
},
],
},
];
const SpeakersSection = () => {
return (
<PageSection title="speakers.title" size={5} bg={"#fff"} id="speaker">
<Speakers items={items} type={"team"} />
</PageSection>
);
};
export default SpeakersSection;
|
package zuul.gameState.maps;
import zuul.GameText;
import zuul.gameState.Item;
import zuul.gameState.Room;
import zuul.gameState.characters.Character;
import zuul.gameState.characters.Player;
import java.util.Arrays;
/**
* World of Zuul standard {@link Map}.
* <p>
* This map has five {@link Room Rooms} (outside, theatre, pub, lab, and office) and one {@link Player} (player1)
* <p>
* Character "player1" starts in the room "outside".
* <p>
* Room "outside" contains a single item "notebook".
*
* @author <NAME>
*/
public class ZuulMap extends Map {
/**
* Constructor
*/
public ZuulMap() {
super("World of Zuul!");
createRooms();
createCharacters();
}
/**
* Creates a single {@link Player} with name "player1" in the {@link Room} {@code defaultStartingRoom}.
*/
@Override
protected void createCharacters() {
Player player1 = new Player("player1", getDefaultStartingRoom().get());
setPlayer(player1);
Arrays.stream(new Character[] {player1}).forEach(this::addCharacter); //Add all characters to list
}
/**
* Creates the {@link Room Rooms} outside, theatre, pub, lab, and office and
* instantiates exits to create the following map:
*
* <table>
* <tr> <td>pub</td> <td>--</td> <td>outside</td> <td>--</td> <td>theatre</td> </tr>
* <tr> <td></td> <td></td> <td>|</td> </tr>
* <tr> <td></td> <td></td> <td>lab</td> <td>--</td> <td>office</td> </tr>
* </table>
*
* Adds an item "notebook" to outside.
* <p>
* Sets {@code defaultStartingRoom} to be outside.
*/
@Override
protected void createRooms() {
Room outside, theatre, pub, lab, office;
//Instantiate room names and descriptions
outside = new Room(GameText.getString("outsideName"), GameText.getString("outside"));
theatre = new Room(GameText.getString("theatreName"), GameText.getString("theatre"));
pub = new Room(GameText.getString("pubName"), GameText.getString("pub"));
lab = new Room(GameText.getString("labName"), GameText.getString("lab"));
office = new Room(GameText.getString("officeName"), GameText.getString("office"));
// initialise room exits and items
//outside
outside.setExits(new String[]{GameText.getString("east"), GameText.getString("south"), GameText.getString("west")},
new Room[]{theatre, lab, pub});
outside.getInventory().addItem(new Item(GameText.getString("notebook"), 2));
outside.getInventory().addItem(new Item(GameText.getString("whip"), 2));
//theatre
theatre.setExits(new String[]{GameText.getString("west")}, new Room[]{outside});
//pub
pub.setExits(new String[]{GameText.getString("east")}, new Room[]{outside});
//lab
lab.setExits(new String[]{GameText.getString("north"), GameText.getString("east")},
new Room[]{outside, office});
//office
office.setExits(new String[]{GameText.getString("west")}, new Room[]{lab});
//Start game outside
setDefaultStartingRoom(outside);
//Add all rooms to this map
Arrays.stream(new Room[]{outside, theatre, pub, lab, office}).forEach(this::addRoom);
}
@Override
public String getWelcome() {
String welcomeString = "\n" +
GameText.getString("welcome_ln1") + "\n" +
GameText.getString("welcome_ln2") + "\n" +
(GameText.getString("welcome_ln3",
new Object[] {GameText.getString("CommandWordsBundle", "help")})) + "\n" +
"\n";
return welcomeString;
}
}
|
#!/bin/bash
echo '1' | ./ontology --enable-shard-rpc --config solo-config.json --enable-consensus --disable-broadcast-net-tx --disable-tx-pool-pre-exec
echo $! > pid
|
package com.atjl.retry.mapper.gen;
import com.atjl.retry.domain.gen.TsProcessLog;
import com.atjl.retry.domain.gen.TsProcessLogExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface TsProcessLogMapper {
int countByExample(TsProcessLogExample example);
int deleteByExample(TsProcessLogExample example);
int deleteByPrimaryKey(Long dataProcessId);
int insert(TsProcessLog record);
int insertSelective(TsProcessLog record);
List<TsProcessLog> selectByExampleWithBLOBs(TsProcessLogExample example);
List<TsProcessLog> selectByExample(TsProcessLogExample example);
TsProcessLog selectByPrimaryKey(Long dataProcessId);
int updateByExampleSelective(@Param("record") TsProcessLog record, @Param("example") TsProcessLogExample example);
int updateByExampleWithBLOBs(@Param("record") TsProcessLog record, @Param("example") TsProcessLogExample example);
int updateByExample(@Param("record") TsProcessLog record, @Param("example") TsProcessLogExample example);
int updateByPrimaryKeySelective(TsProcessLog record);
int updateByPrimaryKeyWithBLOBs(TsProcessLog record);
int updateByPrimaryKey(TsProcessLog record);
int insertBatchSelective(List<TsProcessLog> records);
int updateBatchByPrimaryKeySelective(List<TsProcessLog> records);
}
|
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.dataanalyser.ga.action.charts;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.amplecode.quick.StatementManager;
import org.apache.struts2.ServletActionContext;
import org.apache.velocity.tools.generic.ListTool;
import org.hisp.dhis.dataanalyser.util.DashBoardService;
import org.hisp.dhis.dataanalyser.util.DataElementChartResult;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.i18n.I18nFormat;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.organisationunit.OrganisationUnitGroup;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupService;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import com.opensymphony.xwork2.Action;
import com.opensymphony.xwork2.ActionContext;
/**
* @author <NAME>
*
* @version GenerateDrillDownPeriodToOrgChildChartDataElementResultAction.java Dec 30, 2010 1:07:30 PM
*/
public class GenerateDrillDownPeriodToOrgChildChartDataElementResultAction implements Action
{
private final String CHILDREN = "children";
private final String SELECTED = "random";
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private StatementManager statementManager;
public void setStatementManager( StatementManager statementManager )
{
this.statementManager = statementManager;
}
private DashBoardService dashBoardService;
public void setDashBoardService( DashBoardService dashBoardService )
{
this.dashBoardService = dashBoardService;
}
private DataElementService dataElementService;
public void setDataElementService( DataElementService dataElementService )
{
this.dataElementService = dataElementService;
}
private DataElementCategoryService dataElementCategoryService;
public void setDataElementCategoryService( DataElementCategoryService dataElementCategoryService )
{
this.dataElementCategoryService = dataElementCategoryService;
}
private OrganisationUnitService organisationUnitService;
public OrganisationUnitService getOrganisationUnitService()
{
return organisationUnitService;
}
public void setOrganisationUnitService( OrganisationUnitService organisationUnitService )
{
this.organisationUnitService = organisationUnitService;
}
private OrganisationUnitGroupService organisationUnitGroupService;
public void setOrganisationUnitGroupService( OrganisationUnitGroupService organisationUnitGroupService )
{
this.organisationUnitGroupService = organisationUnitGroupService;
}
private I18nFormat format;
public void setFormat( I18nFormat format )
{
this.format = format;
}
// -------------------------------------------------------------------------
// Input/output
// -------------------------------------------------------------------------
private String selDrillDownData;
public void setSelDrillDownData( String selDrillDownData )
{
this.selDrillDownData = selDrillDownData;
}
private String selectedButton;
public String getSelectedButton()
{
return selectedButton;
}
public void setSelectedButton( String selectedButton )
{
this.selectedButton = selectedButton;
}
private String categoryLB;
public void setCategoryLB( String categoryLB )
{
this.categoryLB = categoryLB;
}
public String getCategoryLB()
{
return categoryLB;
}
private List<String> selectedDrillDownData;
public List<String> getSelectedDrillDownData()
{
return selectedDrillDownData;
}
private HttpSession session;
public HttpSession getSession()
{
return session;
}
ListTool listTool;
public ListTool getListTool()
{
return listTool;
}
private DataElementChartResult dataElementChartResult;
public DataElementChartResult getDataElementChartResult()
{
return dataElementChartResult;
}
private List<DataElementCategoryOptionCombo> selectedOptionComboList;
private OrganisationUnit selectedOrgUnit;
private OrganisationUnitGroup selectedOrgUnitGroup;
private DataElement dataElement;
private List<Date> selStartPeriodList;
private List<Date> selEndPeriodList;
public String[] values;
public String[] startDateArray;
public String[] endDateArray;
public String[] priodNameArray;
private List<String> periodNames;
private String drillDownPeriodStartDate;
private String drillDownPeriodEndDate;
private String drillDownPeriodNames;
// -------------------------------------------------------------------------
// Action implements
// -------------------------------------------------------------------------
public String execute()throws Exception
{
statementManager.initialise();
selectedDrillDownData = new ArrayList<String>();
selectedOptionComboList = new ArrayList<DataElementCategoryOptionCombo>();
listTool = new ListTool();
values = selDrillDownData.split( ":" );
int orgunit =Integer.parseInt( values[0] );
int orgUnitGroup = Integer.parseInt( values[1]);
if ( orgUnitGroup != 0 )
{
selectedOrgUnitGroup = organisationUnitGroupService.getOrganisationUnitGroup( orgUnitGroup );
}
selectedOrgUnit = organisationUnitService.getOrganisationUnit( orgunit );
int dataElementid = Integer.parseInt( values[2] );
int optionComboid = Integer.parseInt( values[3] );
dataElement = dataElementService.getDataElement( dataElementid );
DataElementCategoryOptionCombo categoryCombo = dataElementCategoryService.getDataElementCategoryOptionCombo( optionComboid );
List<DataElement> dataElementList = new ArrayList<DataElement>();
dataElementList.add( dataElement );
selectedOptionComboList.add( categoryCombo );
String periodTypeLB = values[4];
selStartPeriodList = new ArrayList<Date>();
selEndPeriodList = new ArrayList<Date>();
periodNames = new ArrayList<String>();
drillDownPeriodStartDate = values[5];
drillDownPeriodEndDate = values[6];
drillDownPeriodNames = values[7];
startDateArray = values[5].split( ";" );
for ( int i = 0 ; i < startDateArray.length ; i++ )
{
String startD = startDateArray[i];
selStartPeriodList.add( format.parseDate( startD ) );
}
endDateArray = values[6].split( ";" );
for ( int i = 0 ; i < endDateArray.length ; i++ )
{
String startD = endDateArray[i];
selEndPeriodList.add( format.parseDate( startD ) );
}
priodNameArray = values[7].split( ";" );
for ( int i = 0 ; i < priodNameArray.length ; i++ )
{
String periodName = priodNameArray[i];
periodNames.add( periodName );
}
String deSelection = values[8];
String aggDataCB = values[9];
//System.out.println( selStartPeriodList + ":" + selEndPeriodList + ":" +periodTypeLB + ":" + dataElementList+ ":" + deSelection + ":" + selectedOptionComboList + ":" + selectedOrgUnit + ":" + aggDataCB );
// System.out.println( "Chart Generation Start Time is for drillDown: \t" + new Date() );
if( orgUnitGroup == 0 && ( categoryLB.equalsIgnoreCase( CHILDREN ) || ( categoryLB.equalsIgnoreCase( SELECTED )) ))
{
List<OrganisationUnit> childOrgUnitList = new ArrayList<OrganisationUnit>();
childOrgUnitList = new ArrayList<OrganisationUnit>( selectedOrgUnit.getChildren());
for( OrganisationUnit orgChild : childOrgUnitList )
{
String drillDownData = orgChild.getId() + ":" + "0" + ":" + dataElement.getId() + ":"+ categoryCombo.getId() + ":" + periodTypeLB + ":" + drillDownPeriodStartDate + ":" + drillDownPeriodEndDate + ":" + drillDownPeriodNames + ":" + deSelection + ":" + aggDataCB;
System.out.println(drillDownData);
selectedDrillDownData.add( drillDownData );
}
}
if ( orgUnitGroup != 0 && ( categoryLB.equalsIgnoreCase( CHILDREN ) || ( categoryLB.equalsIgnoreCase( SELECTED )) ))
{
List<OrganisationUnit> selectedOUGroupMemberList = new ArrayList<OrganisationUnit>( selectedOrgUnitGroup.getMembers() );
List<OrganisationUnit> childOrgUnitList = new ArrayList<OrganisationUnit>();
childOrgUnitList = new ArrayList<OrganisationUnit>( organisationUnitService.getOrganisationUnitWithChildren( selectedOrgUnit.getId() ) );
selectedOUGroupMemberList.retainAll( childOrgUnitList );
for( OrganisationUnit orgChild : selectedOUGroupMemberList )
{
String drillDownData = orgChild.getId() + ":" + selectedOrgUnitGroup.getId() + ":" + dataElement.getId() + ":"+ categoryCombo.getId() + ":" + periodTypeLB + ":" + drillDownPeriodStartDate + ":" + drillDownPeriodEndDate + ":" + drillDownPeriodNames + ":" + deSelection + ":" + aggDataCB;
System.out.println(drillDownData);
selectedDrillDownData.add( drillDownData );
}
}
if ( orgUnitGroup != 0 )
{
dataElementChartResult = dashBoardService.generateDataElementChartDataWithGroupMemberWise( selStartPeriodList, selEndPeriodList, periodTypeLB, dataElementList, deSelection, selectedOptionComboList, selectedOrgUnit, selectedOrgUnitGroup ,aggDataCB );
}
else
{
dataElementChartResult = dashBoardService.generateDataElementChartDataWithChildrenWise( selStartPeriodList, selEndPeriodList, periodTypeLB, dataElementList, deSelection, selectedOptionComboList, selectedOrgUnit, aggDataCB );
}
ActionContext ctx = ActionContext.getContext();
HttpServletRequest req = (HttpServletRequest) ctx.get( ServletActionContext.HTTP_REQUEST );
session = req.getSession();
session.setAttribute( "data1", dataElementChartResult.getData() );
session.setAttribute( "series1", dataElementChartResult.getSeries() );
session.setAttribute( "categories1", dataElementChartResult.getCategories() );
session.setAttribute( "chartTitle", dataElementChartResult.getChartTitle() );
session.setAttribute( "xAxisTitle", dataElementChartResult.getXAxis_Title() );
session.setAttribute( "yAxisTitle", dataElementChartResult.getYAxis_Title() );
System.out.println( "Chart Generation End Time is drillDown : \t" + new Date() );
statementManager.destroy();
return SUCCESS;
}
}
|
<gh_stars>0
import { Component, OnInit, NgZone } from '@angular/core';
import { ContentService } from "../../services/content.service";
import { NavigationItem } from "../../classes/navigationItem";
@Component({
selector: 'nav-menu',
templateUrl: './navmenu.component.html',
styleUrls: ['../../scss/master.scss']
})
export class NavMenuComponent implements OnInit {
navigationItems: NavigationItem[];
navClass: string;
constructor(private contentService: ContentService, zone: NgZone) {
window.onscroll = () => {
zone.run(() => {
if (window.pageYOffset > 100) {
this.navClass = "navbar-shrink"
} else {
this.navClass = "";
}
});
}
}
ngOnInit() {
this.contentService.getNavigationItems().then(navItems => this.navigationItems = navItems);
}
}
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin
else
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/prj/gate_level/gate_level.runs/impl_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
# pre-commands:
/bin/touch .init_design.begin.rst
EAStep vivado -log top_KR1533IE5.vdi -applog -m64 -product Vivado -messageDb vivado.pb -mode batch -source top_KR1533IE5.tcl -notrace
|
<gh_stars>1-10
export const AdminPanel = () => {
return <div>AdminPanel</div>;
};
|
#!/usr/bin/env bash
here=$(pwd)
cd "$1"
num_file=$(find -type f | wc -l) #Find file types and count
num_dir=$(find -type d | wc -l) #Find directory types and count
echo "There were" "$num_dir" "directories."
echo "There were" "$num_file" "regular files."
cd "$here"
|
package net.haesleinhuepf.imagej.zoo.data;
import fiji.util.gui.GenericDialogPlus;
import ij.ImageJ;
import ij.Prefs;
import ij.plugin.PlugIn;
public class ClearControlDataSetOpener implements PlugIn {
private static String path = Prefs.getDefaultDirectory();
private static String datasetName = "C0opticsprefused";
@Override
public void run(String arg) {
GenericDialogPlus gd = new GenericDialogPlus("Open ClearControl data set");
gd.addDirectoryField("Folder", path);
gd.addStringField("Dataset", datasetName);
gd.showDialog();
if (gd.wasCanceled()) {
return;
}
path = gd.getNextString();
datasetName = gd.getNextString();
open(path, datasetName).show();
}
public static ClearControlDataSet open(String path, String datasetName) {
return new ClearControlDataSet(path, datasetName);
}
public static void main(String[] args) {
new ImageJ();
String dataSetRootFolder = "C:/structure/data/2018-05-23-16-18-13-89-Florence_multisample/";
String dataSetName = "opticsprefused";
ClearControlDataSet ccds = open(dataSetRootFolder, dataSetName);
ccds.show();
}
}
|
<gh_stars>0
# frozen_string_literal: true
require "sidekiq/web"
Rails.application.routes.draw do
mount Blacklight::Oembed::Engine, at: "oembed"
mount Riiif::Engine => "/images", as: "riiif"
root to: "spotlight/exhibits#index"
mount Spotlight::Engine, at: "starlight"
mount Blacklight::Engine => "/"
# Dynamic robots.txt
get "robots.:format" => "robots#index"
# https://github.com/mperham/sidekiq/wiki/Monitoring#devise
authenticate :user, lambda { |u| u.superadmin? } do
mount Sidekiq::Web => "/sidekiq"
end
# root to: "catalog#index" # replaced by spotlight root path
concern :searchable, Blacklight::Routes::Searchable.new
resource :catalog, only: [:index], as: "catalog", path: "/catalog", controller: "catalog" do
concerns :searchable
end
if ENV["AUTH_METHOD"] == "database"
devise_for :users
else
devise_for :users, controllers: { omniauth_callbacks: "users/omniauth_callbacks" }
devise_scope :user do
get "/users/sign_in", to: "users/sessions#new", as: :new_user_session
get "/users/sign_out", to: "users/sessions#destroy", as: :destroy_user_session
end
end
concern :exportable, Blacklight::Routes::Exportable.new
resources :solr_documents, only: [:show], path: "/catalog", controller: "catalog" do
concerns :exportable
end
resources :bookmarks do
concerns :exportable
collection do
delete "clear"
end
end
mount LetterOpenerWeb::Engine, at: "/letter_opener" if ENV.fetch("DELIVERY_METHOD", "").eql? "letter_opener_web"
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
end
|
<filename>artifacts/maven-classpath-munger/munger/src/main/java/org/apache/maven/classpath/munger/AbstractMunger.java
/*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.maven.classpath.munger;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import org.apache.maven.classpath.munger.logging.AbstractLoggingBean;
import org.apache.maven.classpath.munger.logging.Log;
import org.apache.maven.classpath.munger.parse.DependeciesLoader;
import org.apache.maven.classpath.munger.parse.log.LogDependenciesLoader;
import org.apache.maven.classpath.munger.parse.pom.PomDependenciesLoader;
import org.apache.maven.classpath.munger.resolve.DependencyResolver;
import org.apache.maven.classpath.munger.resolve.maven.MavenDependencyResolver;
import org.apache.maven.classpath.munger.util.properties.AggregateNamedPropertySource;
import org.apache.maven.classpath.munger.util.properties.NamedPropertySource;
import org.apache.maven.classpath.munger.util.properties.PropertiesUtil;
import org.apache.maven.classpath.munger.util.properties.PropertySource;
import org.apache.maven.classpath.munger.validation.ArtifactValidator;
import org.apache.maven.classpath.munger.validation.maven.MavenArtifactValidator;
/**
* @author <NAME>.
* @since Jan 1, 2014 12:27:05 PM
*/
public abstract class AbstractMunger extends AbstractLoggingBean {
protected AbstractMunger() {
this(null);
}
protected AbstractMunger(Log log) {
super(log);
}
public void processDependencies(URL dependenciesDefinitionsLocation,
URL signaturesDataLocation,
NamedPropertySource propsSource) throws Exception {
NamedPropertySource processProps=propsSource;
DependeciesLoader loader=resolveDependenciesLoader(dependenciesDefinitionsLocation, processProps);
loader.load(dependenciesDefinitionsLocation);
Collection<String> extraNames=loader.getDefinedPropertiesNames();
if ((extraNames != null) && (extraNames.size() > 0)) {
processProps = new AggregateNamedPropertySource(loader.getProperties(), processProps);
}
processProps = resolveLoadingProperties(processProps);
List<? extends Dependency> dependencies=resolveDependencies(processProps, loader.getDependencies());
if (dependencies.isEmpty()) {
logger.info("No dependencies to resolve");
return;
}
List<? extends Repository> repos=resolveRepositories(processProps, loader.getRepositories());
// TODO use another property to choose resolver - e.g., Gradle, Ivy
DependencyResolver resolver=new MavenDependencyResolver(logger);
Map<Dependency,URL> urls=resolver.resolveDependencies(processProps, repos, dependencies);
validateDependencies(urls, signaturesDataLocation);
// TODO add (configurable) support for signed JAR(s) validation
addClasspathURLs(urls.values());
}
protected void validateDependencies(Map<Dependency,URL> urlsMap, URL signaturesDataLocation) throws IOException, SecurityException {
// TODO use another property to choose validator - e.g., Gradle, Ivy
ArtifactValidator validator=new MavenArtifactValidator(logger, signaturesDataLocation);
SecurityException se=null;
for (Map.Entry<Dependency,URL> dp : urlsMap.entrySet()) {
Dependency d=dp.getKey();
URL artifactData=dp.getValue();
try {
validator.validate(d, artifactData);
} catch(Exception e) {
logger.error("Failed (" + e.getClass().getSimpleName() + ")"
+ " to validate dependency="
+ " at location=" + artifactData.toExternalForm()
+ ": " + e.getMessage());
/*
* NOTE: we throw runtime and I/O exception(s) immediately but
* delay security ones in order to list all the invalid issues
* so they can be addressed at once instead of piecemeal
*/
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
} else if (e instanceof IOException) {
throw (IOException) e;
} else if (e instanceof SecurityException) {
se = (SecurityException) e;
} else {
throw new RuntimeException(e);
}
}
}
if (se != null) {
throw se;
}
}
protected abstract void addClasspathURLs(Collection<? extends URL> urls) throws Exception;
protected DependeciesLoader resolveDependenciesLoader(URL dependenciesDefinitionsLocation, PropertySource processProps) {
String dependencyDataLocation=dependenciesDefinitionsLocation.toExternalForm();
// TODO use another property to decide format
int sepPos=dependencyDataLocation.lastIndexOf('.');
if ((sepPos <= 0) || (sepPos >= (dependencyDataLocation.length() - 1))) {
throw new IllegalArgumentException("No file location: " + dependencyDataLocation);
}
String suffix=dependencyDataLocation.substring(sepPos + 1);
if ("xml".equalsIgnoreCase(suffix)) {
return new PomDependenciesLoader(logger);
} else if ("log".equalsIgnoreCase(suffix)) {
return new LogDependenciesLoader(logger);
} else {
throw new NoSuchElementException("No loader found for " + dependencyDataLocation);
}
}
protected NamedPropertySource resolveLoadingProperties(NamedPropertySource processProps) {
Map<String,String> fmtValues=PropertiesUtil.resolvePropertiesValues(processProps);
if (fmtValues.size() <= 0) {
return processProps;
}
if (logger.isDebugEnabled()) {
for (Map.Entry<String,String> fe : fmtValues.entrySet()) {
String name=fe.getKey();
logger.debug("contextInitialized(" + name + ")"
+ " formatted property value: old=" + processProps.getProperty(name)
+ ",new=" + fe.getValue());
}
}
// if have any formatted properties put them first so we won't have to re-format them
return new AggregateNamedPropertySource(PropertiesUtil.asPropertySource(fmtValues), processProps);
}
protected List<? extends Dependency> resolveDependencies(PropertySource processProps, List<? extends Dependency> deps) {
if ((deps == null) || deps.isEmpty()) {
return Collections.emptyList();
}
for (Dependency d : deps) {
String orgVersion=d.getVersion(), fmtVersion=PropertiesUtil.format(orgVersion, processProps);
if (orgVersion == fmtVersion) {
continue;
}
if (logger.isDebugEnabled()) {
logger.debug("resolveDependencies(" + d.getGroupId() + ":" + d.getArtifactId() + ")"
+ " " + orgVersion + " => " + fmtVersion);
}
d.setVersion(fmtVersion);
}
return deps;
}
protected List<? extends Repository> resolveRepositories(PropertySource processProps, List<? extends Repository> repos) {
if ((repos == null) || repos.isEmpty()) {
return Collections.emptyList();
}
for (Repository r : repos) {
String orgUrl=r.getUrl(), fmtUrl=PropertiesUtil.format(orgUrl, processProps);
if (orgUrl == fmtUrl) {
continue;
}
if (logger.isDebugEnabled()) {
logger.debug("resolveRepositories(" + r.getId() + ") " + orgUrl + " => " + fmtUrl);
}
r.setUrl(fmtUrl);
}
return repos;
}
}
|
import { State } from "@aicacia/state";
import { RecordOf } from "immutable";
import { useState, useEffect, useRef } from "react";
import { shallowEqual } from "shallow-equal-object";
export function createHook<T>(state: State<T>) {
return function useMapStateToProps<TProps>(
mapStateToProps: (state: RecordOf<T>) => TProps
) {
const [props, setProps] = useState(() =>
mapStateToProps(state.getCurrent())
),
lastProps = useRef<TProps>();
useEffect(() => {
function onChange(state: RecordOf<T>) {
const nextProps = mapStateToProps(state);
if (!shallowEqual(lastProps.current, nextProps)) {
lastProps.current = nextProps;
setProps(nextProps);
}
}
state.on("change", onChange);
return () => {
state.off("change", onChange);
};
});
return props;
};
}
|
#!/bin/bash
set -eu
LIRASM=$1
TESTS_DIR=`dirname "$0"`/tests
function runtest {
local infile=$1
local options=${2-}
# Catch a request for the random tests.
if [[ $infile == --random* ]]
then
local outfile=$TESTS_DIR/random.out
else
local outfile=`echo $infile | sed 's/\.in/\.out/'`
fi
if [[ ! -e "$outfile" ]]
then
echo "$0: error: no out file $outfile"
exit 1
fi
if $LIRASM $options --execute $infile | tr -d '\r' > testoutput.txt && cmp -s testoutput.txt $outfile
then
echo "TEST-PASS | lirasm | lirasm $options --execute $infile"
else
echo "TEST-UNEXPECTED-FAIL | lirasm | lirasm $options --execute $infile"
echo "expected output"
cat $outfile
echo "actual output"
cat testoutput.txt
fi
}
# Tests common to all supported back-ends.
for infile in "$TESTS_DIR"/*.in
do
runtest $infile
done
runtest "--random 1000000"
runtest "--random 1000000 --optimize"
# ---- Platform-specific tests and configurations. ----
# 64-bit platforms
if [[ $($LIRASM --word-size) == 64 ]]
then
for infile in "$TESTS_DIR"/64-bit/*.in
do
runtest $infile
done
fi
# ARM
if [[ $(uname -m) == arm* ]]
then
for infile in "$TESTS_DIR"/*.in
do
# Run standard tests, but set code generation for older architectures.
# It may also be beneficial to test ARMv6 and ARMv7 with --novfp, but such
# a platform seems so unlikely that it probably isn't worthwhile. It's also
# unlikely that it's worth testing ARMv5 with VFP.
runtest $infile "--arch 6"
runtest $infile "--arch 5 --novfp"
done
# Run specific soft-float tests, but only for ARMv5 without VFP.
# NOTE: It looks like MIPS ought to be able to run these tests, but I can't
# test this and _not_ running them seems like the safest option.
for infile in "$TESTS_DIR"/softfloat/*.in
do
runtest $infile "--arch 5 --novfp"
done
# Run reduced random tests for these targets. (The default ARMv7 target
# still runs the long tests.)
runtest "--random 10000 --arch 6"
runtest "--random 10000 --arch 5 --novfp"
runtest "--random 10000 --optimize --arch 6"
runtest "--random 10000 --optimize --arch 5 --novfp"
fi
rm testoutput.txt
|
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part2.sh
# Description: OpenWrt DIY script part 2 (After Update feeds)
#
sed -i 's/192.168.1.1/192.168.8.111/g' package/base-files/files/bin/config_generate #定制默认IP
sed -i 's@.*CYXluq4wUazHjmCDBCqXF*@#&@g' package/lean/default-settings/files/zzz-default-settings #取消系统默认密码
# 替换更新默认argon主题
rm -rf package/lean/luci-theme-argon && git clone https://github.com/jerrykuku/luci-theme-argon package/luci-theme-argon -b 18.06
# 替换https-dns-proxy.init文件,解决用LEDE源码加入passwall编译固件后DNS转发127.0.0.1#5053和12.0.0.1#5054问题
curl -fsSL https://raw.githubusercontent.com/Lienol/openwrt-packages/19.07/net/https-dns-proxy/files/https-dns-proxy.init > feeds/packages/net/https-dns-proxy/files/https-dns-proxy.init
|
<?php
$x = 5;
$y = 8;
$z = 3;
$result = ($x + $y + $z) / 3;
echo "Average of ".$x.", ".$y.", and ".$z." is ".$result;
?>
|
require("make-promises-safe")
require("dotenv").config()
const fs = require("fs")
const { getSites } = require("../common/getSites")
const feedReader = require("feed-reader")
;(async () => {
const feedDb = JSON.parse(
fs.readFileSync("tmp/webring-site-data/feed.json", "utf8")
)
const sites = getSites()
const toKeep = new Set()
const toReplace = new Set()
const newItems = []
for (const site of sites) {
try {
if (site.feed) {
toKeep.add(site.id)
const feed = await feedReader.read(site.feed)
const unknownHostnames = new Set()
const entries = feed.entries
.filter((entry) => Date.parse(entry.published) < Date.now())
.filter((entry) => {
const url = new URL(entry.link, site.url)
const siteUrl = new URL(site.url)
const fixHostnames = [
// chrisza.me
"gatsby-starter-blog-demo.netlify.app",
// wp.curve.in.th
"chameleontk.github.io",
]
if (fixHostnames.includes(url.hostname)) {
url.hostname = siteUrl.hostname
}
if (url.hostname.includes(siteUrl.hostname.replace(/^www\./, ""))) {
return true
} else {
unknownHostnames.add(url.hostname)
return false
}
})
if (unknownHostnames.size > 0) {
console.warn(
`[${site.id}] Feed points to unknown hostnames: ${Array.from(
unknownHostnames
).join(", ")}`
)
}
if (entries.length > 0) {
const dateOf = (entry) => new Date(entry.published).toISOString()
entries.sort((a, b) => dateOf(a).localeCompare(dateOf(b)))
entries.reverse()
const title = entries[0].title.replace(/\s+/g, " ").trim()
if (title) {
const item = {
site: site.id,
title,
url: entries[0].link,
published: dateOf(entries[0]),
}
newItems.push(item)
toReplace.add(site.id)
console.log(
`[${site.id}] Latest: ${item.published.slice(0, 10)} ${title}`
)
} else {
console.warn(`[${site.id}] Feed’s latest entry has no title`)
}
} else {
console.warn(`[${site.id}] Feed has no entries`)
}
} else {
console.warn(`[${site.id}] Has no feed`)
}
} catch (e) {
console.error(`[${site.id}] Unable to process feed`, e)
}
}
const nextFeedDb = [
...feedDb.filter(
(item) => toKeep.has(item.site) && !toReplace.has(item.site)
),
...newItems,
].sort((a, b) => b.published.localeCompare(a.published))
fs.writeFileSync(
"tmp/webring-site-data/feed.json",
JSON.stringify(nextFeedDb, null, 2)
)
fs.writeFileSync(
"tmp/webring-site-data/feed.opml",
[
`<?xml version="1.0" encoding="UTF-8"?>`,
`<opml version="1.1">`,
` <head>`,
` <title>วงแหวนเว็บ.ไทย</title>`,
` </head>`,
` <body>`,
` <outline title="วงแหวนเว็บ.ไทย" text="วงแหวนเว็บ.ไทย">`,
...sites
.filter((s) => s.feed)
.flatMap((site) => {
return [
` <outline text="${site.id}" title="${site.id}" type="rss" xmlUrl="${site.feed}" htmlUrl="${site.url}"/>`,
]
}),
` </outline>`,
` </body>`,
`</opml>`,
].join("\n")
)
// Add (or update) the commit message
try {
fs.writeFileSync("tmp/webring-site-data-commit-message", "Update feed", {
flag: "wx",
})
} catch (error) {
const original = fs.readFileSync(
"tmp/webring-site-data-commit-message",
"utf8"
)
if (!original.includes("Update feed")) {
fs.writeFileSync(
"tmp/webring-site-data-commit-message",
`${original}, Update feed`
)
}
}
})()
|
def longest_consecutive_zeros(s: str) -> int:
max_zeros = 0
current_zeros = 0
for c in s:
if c == '0':
current_zeros += 1
max_zeros = max(max_zeros, current_zeros)
else:
current_zeros = 0
return max_zeros
|
'use strict'
const Article = `
type Article {
topicId: String,
text: String,
author: Member
}
`
exports.schema = [Article]
exports.resolvers = {
Article: {
topicId (article) { return article.topic.id },
},
}
|
/* eslint-disable no-restricted-globals */
self.addEventListener('install', () => {
self.skipWaiting()
})
self.addEventListener('activate', () => {
self.registration
.unregister()
.then(() => {
return self.clients.matchAll()
})
.then(clients => {
clients.forEach(client => client.navigate(client.url))
})
})
|
util/test.sh
artifact/test.sh
model/test.sh
|
package aufgabe10_8;
public class Return extends Statement {
private Expression expr;
public Expression getExpression() {
return expr;
}
public Return(Expression expr) {
super();
this.expr = expr;
}
@Override
public void accept(Visitor visitor) {
visitor.visit(this);
}
}
|
<filename>modules/sink/reporter/api.go
package reporter
import (
"net/http"
"github.com/blushft/strana/modules/sink/reporter/entity"
"github.com/gofiber/fiber"
"github.com/gofiber/websocket"
)
func (mod *reporter) routes(rtr fiber.Router) {
api := rtr.Group("/reporter")
mod.liveRoutes(api)
mod.reportRoutes(api)
}
func (mod *reporter) liveRoutes(rtr fiber.Router) {
grp := rtr.Group("/live")
grp.Get(
"/events",
func(c *fiber.Ctx) {
if websocket.IsWebSocketUpgrade(c) {
c.Next()
}
},
websocket.New(mod.live.handleLive),
)
grp.Get("/rates", mod.live.handleRates)
}
func (mod *reporter) reportRoutes(rtr fiber.Router) {
rtr.Get("/events", getParams, mod.handleGetEvents)
rtr.Get("/events/count", mod.handleGetEventsCount)
rtr.Get("/events/actions", getParams, mod.handleGetEventsActions)
rtr.Get("/events/actions/top", getParams, mod.handleGetTopActions)
}
func getParams(c *fiber.Ctx) {
var params entity.QueryParams
if err := c.QueryParser(params); err != nil {
c.Status(http.StatusBadRequest).Send(err)
return
}
c.Locals("params", params)
c.Next()
}
func (mod *reporter) handleGetEvents(c *fiber.Ctx) {
params := c.Locals("params").(entity.QueryParams)
evts, err := mod.evtReporter.Events(params)
if err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
if err := c.JSON(evts); err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
}
func (mod *reporter) handleGetEventsCount(c *fiber.Ctx) {
count, err := mod.evtReporter.EventsCount()
if err != nil {
c.Status(http.StatusInternalServerError).Send(err)
}
res := map[string]int{"count": count}
if err := c.JSON(res); err != nil {
c.Status(http.StatusInternalServerError).Send(err)
}
}
func (mod *reporter) handleGetEventsActions(c *fiber.Ctx) {
params := c.Locals("params").(entity.QueryParams)
evts, err := mod.evtReporter.EventsWithAction(params)
if err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
if err := c.JSON(evts); err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
}
func (mod *reporter) handleGetTopActions(c *fiber.Ctx) {
params := c.Locals("params").(entity.QueryParams)
cats, err := mod.evtReporter.TopActionCatgories(params)
if err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
if err := c.JSON(cats); err != nil {
c.Status(http.StatusInternalServerError).Send(err)
return
}
}
|
import nltk
import pandas as pd
from nltk.sentiment.vader import SentimentIntensityAnalyzer
# Create the sentiment analyzer
analyzer = SentimentIntensityAnalyzer()
# Create the classifer
classifier = nltk.NaiveBayesClassifier.train([
("Today was great!", "positive"),
("I am feeling sad.", "negative"),
("I'm not sure what to think.", "neutral")
])
# Create a function to predict the sentiment
def predict_sentiment(text):
# Get the sentiment of the text
sentiment = analyzer.polarity_scores(text)
# classify the sentiment
if sentiment["compound"] > 0:
return classifier.classify(text, "positive")
elif sentiment["compound"] == 0:
return classifier.classify(text, "neutral")
else:
return classifier.classify(text, "negative")
# Get the sentiment of the tweet
sentiment = predict_sentiment("Today was great!")
print(sentiment) # prints "positive"
|
<gh_stars>10-100
/*
* Copyright [2018] [<NAME>]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jacpfx.vxms.k8s.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.jacpfx.vxms.k8s.api.CustomClientConfig;
import org.jacpfx.vxms.k8s.api.DefaultCustomClientConfig;
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
/** The Kubernetes discovery annotation */
public @interface K8SDiscovery {
/**
* The user to access the master API
*
* @return the name of the user to access the master API
*/
String user() default "";
/**
* The password to access the master API
*
* @return The API password
*/
String password() default "";
/**
* The API token
*
* @return the API token
*/
String api_token() default "";
/**
* The Kubernetes master URL
*
* @return the master url
*/
String master_url() default "https://kubernetes.default.svc";
/**
* The namespace where to do the discovery
*
* @return the namespace where to discover
*/
String namespace() default "default";
/**
* Returns a custom Kubernetes Client Configuration handler. If you define this, all other
* properties will be ignored
*
* @return the customer kubernetes client configuration handler
*/
Class<? extends CustomClientConfig> customClientConfiguration() default
DefaultCustomClientConfig.class;
}
|
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
module OCI
module OperatorAccessControl::Models
ACCESS_REQUEST_LIFECYCLE_STATES_ENUM = [
ACCESS_REQUEST_LIFECYCLE_STATES_CREATED = 'CREATED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_APPROVALWAITING = 'APPROVALWAITING'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_PREAPPROVED = 'PREAPPROVED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_APPROVED = 'APPROVED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_REJECTED = 'REJECTED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_DEPLOYED = 'DEPLOYED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_DEPLOYFAILED = 'DEPLOYFAILED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_UNDEPLOYED = 'UNDEPLOYED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_UNDEPLOYFAILED = 'UNDEPLOYFAILED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_CLOSEFAILED = 'CLOSEFAILED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_REVOKEFAILED = 'REVOKEFAILED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_EXPIRYFAILED = 'EXPIRYFAILED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_REVOKING = 'REVOKING'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_REVOKED = 'REVOKED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_EXTENDING = 'EXTENDING'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_EXTENDED = 'EXTENDED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_EXTENSIONREJECTED = 'EXTENSIONREJECTED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_COMPLETING = 'COMPLETING'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_COMPLETED = 'COMPLETED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_EXPIRED = 'EXPIRED'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_APPROVEDFORFUTURE = 'APPROVEDFORFUTURE'.freeze,
ACCESS_REQUEST_LIFECYCLE_STATES_INREVIEW = 'INREVIEW'.freeze
].freeze
end
end
|
import Hue from "/hue/hue.js";
import HueService from "/hue/hue.service.js";
const priv = Symbol("private");
export default class HueLight extends Object {
constructor() {
super();
this.init(-1);
}
set on(newValue) {
this._setValue("on", newValue);
}
set bri(newValue) {
this._setValue("bri", newValue);
}
set hue(newValue) {
this._setValue("hue", newValue);
}
set sat(newValue) {
this._setValue("sat", newValue);
}
set xy(newValue) {
this._setValue("xy", newValue);
}
set ct(newValue) {
this._setValue("ct", newValue);
}
set effect(newValue) {
console.log("effect");
this._setValue("effect", newValue);
}
init(lightID) {
this[priv] = this[priv] ?? {};
this[priv].lightID = lightID;
Object.seal(this);
}
_setValue(property, newValue) {
if (this.state.on || property === "on") {
this.state[property] = newValue;
const state = {};
state[property] = newValue
this._setState(state);
}
}
async _setState(state) {
await HueService.query("PUT", ["lights", this[priv].lightID, "state"], JSON.stringify(state));
Hue.update();
}
}
|
# Clean up unneeded packages.
yum -y clean all
# solve network interface problems
rm /etc/udev/rules.d/70-persistent-net.rules
mkdir /etc/udev/rules.d/70-persistent-net.rules
rm /lib/udev/rules.d/75-persistent-net-generator.rules
rm -rf /dev/.udev/
sed -i "/^HWADDR/d" /etc/sysconfig/network-scripts/ifcfg-eth0
|
cp anura anura~
make clean && time nice -n 19 make "-j$(nproc)"
kdialog --msgbox "make finished"
|
/**
* Orthanc - A Lightweight, RESTful DICOM Store
* Copyright (C) 2012-2016 <NAME>, Medical Physics
* Department, University Hospital of Liege, Belgium
* Copyright (C) 2017-2020 <NAME>., Belgium
*
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
**/
#include "../PrecompiledHeaders.h"
#include "FilesystemHttpHandler.h"
#include "../OrthancException.h"
#include "../SystemToolbox.h"
#include "FilesystemHttpSender.h"
#include <boost/filesystem.hpp>
namespace Orthanc
{
struct FilesystemHttpHandler::PImpl
{
UriComponents baseUri_;
boost::filesystem::path root_;
};
static void OutputDirectoryContent(HttpOutput& output,
const IHttpHandler::Arguments& headers,
const UriComponents& uri,
const boost::filesystem::path& p)
{
namespace fs = boost::filesystem;
std::string s;
s += "<html>";
s += " <body>";
s += " <h1>Subdirectories</h1>";
s += " <ul>";
if (uri.size() > 0)
{
std::string h = Toolbox::FlattenUri(uri) + "/..";
s += "<li><a href=\"" + h + "\">..</a></li>";
}
fs::directory_iterator end;
for (fs::directory_iterator it(p) ; it != end; ++it)
{
#if BOOST_HAS_FILESYSTEM_V3 == 1
std::string f = it->path().filename().string();
#else
std::string f = it->path().filename();
#endif
std::string h = Toolbox::FlattenUri(uri) + "/" + f;
if (fs::is_directory(it->status()))
s += "<li><a href=\"" + h + "\">" + f + "</a></li>";
}
s += " </ul>";
s += " <h1>Files</h1>";
s += " <ul>";
for (fs::directory_iterator it(p) ; it != end; ++it)
{
#if BOOST_HAS_FILESYSTEM_V3 == 1
std::string f = it->path().filename().string();
#else
std::string f = it->path().filename();
#endif
std::string h = Toolbox::FlattenUri(uri) + "/" + f;
if (SystemToolbox::IsRegularFile(it->path().string()))
{
s += "<li><a href=\"" + h + "\">" + f + "</a></li>";
}
}
s += " </ul>";
s += " </body>";
s += "</html>";
output.SetContentType(MimeType_Html);
output.Answer(s);
}
FilesystemHttpHandler::FilesystemHttpHandler(const std::string& baseUri,
const std::string& root) : pimpl_(new PImpl)
{
Toolbox::SplitUriComponents(pimpl_->baseUri_, baseUri);
pimpl_->root_ = root;
listDirectoryContent_ = false;
namespace fs = boost::filesystem;
if (!fs::exists(pimpl_->root_) ||
!fs::is_directory(pimpl_->root_))
{
throw OrthancException(ErrorCode_DirectoryExpected);
}
}
bool FilesystemHttpHandler::Handle(
HttpOutput& output,
RequestOrigin /*origin*/,
const char* /*remoteIp*/,
const char* /*username*/,
HttpMethod method,
const UriComponents& uri,
const Arguments& headers,
const GetArguments& arguments,
const void* /*bodyData*/,
size_t /*bodySize*/)
{
if (!Toolbox::IsChildUri(pimpl_->baseUri_, uri))
{
// This URI is not served by this handler
return false;
}
if (method != HttpMethod_Get)
{
output.SendMethodNotAllowed("GET");
return true;
}
namespace fs = boost::filesystem;
fs::path p = pimpl_->root_;
for (size_t i = pimpl_->baseUri_.size(); i < uri.size(); i++)
{
p /= uri[i];
}
if (SystemToolbox::IsRegularFile(p.string()))
{
FilesystemHttpSender sender(p);
sender.SetContentType(SystemToolbox::AutodetectMimeType(p.string()));
output.Answer(sender); // TODO COMPRESSION
}
else if (listDirectoryContent_ &&
fs::exists(p) &&
fs::is_directory(p))
{
OutputDirectoryContent(output, headers, uri, p);
}
else
{
output.SendStatus(HttpStatus_404_NotFound);
}
return true;
}
}
|
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.cats.version.client;
import java.io.InputStream;
import java.util.Timer;
import java.util.TimerTask;
import com.cats.version.utils.Utils;
/**
* @author xblia
* 2015-08-20
*/
public class IWaitingStreamWithTimeout
{
private String result;
private InputStream inputStream;
public IWaitingStreamWithTimeout(InputStream inputStream)
{
super();
this.result = null;
this.inputStream = inputStream;
}
public String getResult()
{
return result;
}
public void registResultHandlerAndStart(final IStartupSubProcCallBack callBack, long timeoutMillsecond)
{
new Thread()
{
@Override
public void run()
{
result = Utils.getResultFromStream(inputStream);
}
}.start();
final Timer timer = new Timer();
timer.schedule(new TimerTask()
{
@Override
public void run()
{
timer.cancel();
callBack.onSubProcResult(result);
}
}, timeoutMillsecond);
}
}
|
#!/usr/bin/env sh
#
# This file invokes cmake and generates the build system for Gcc.
#
if [ $# -lt 5 ]
then
echo "Usage..."
echo "gen-buildsys-gcc.sh <path to top level CMakeLists.txt> <GccMajorVersion> <GccMinorVersion> <Architecture> <ScriptDirectory> [build flavor] [coverage] [ninja] [cmakeargs]"
echo "Specify the path to the top level CMake file - <ProjectK>/src/NDP"
echo "Specify the Gcc version to use, split into major and minor version"
echo "Specify the target architecture."
echo "Specify the script directory."
echo "Optionally specify the build configuration (flavor.) Defaults to DEBUG."
echo "Optionally specify 'coverage' to enable code coverage build."
echo "Target ninja instead of make. ninja must be on the PATH."
echo "Pass additional arguments to CMake call."
exit 1
fi
# Locate gcc
gcc_prefix=""
if [ "$CROSSCOMPILE" = "1" ]; then
# Locate gcc
if [ -n "$TOOLCHAIN" ]; then
gcc_prefix="$TOOLCHAIN-"
fi
fi
# Set up the environment to be used for building with gcc.
if command -v "${gcc_prefix}gcc-$2.$3" > /dev/null
then
desired_gcc_version="-$2.$3"
elif command -v "${gcc_prefix}gcc$2$3" > /dev/null
then
desired_gcc_version="$2$3"
elif command -v "${gcc_prefix}gcc-$2$3" > /dev/null
then
desired_gcc_version="-$2$3"
elif command -v "${gcc_prefix}gcc" > /dev/null
then
desired_gcc_version=
else
echo "Unable to find ${gcc_prefix}gcc Compiler"
exit 1
fi
if [ -z "$CLR_CC" ]; then
CC="$(command -v "${gcc_prefix}gcc$desired_gcc_version")"
else
CC="$CLR_CC"
fi
if [ -z "$CLR_CXX" ]; then
CXX="$(command -v "${gcc_prefix}g++$desired_gcc_version")"
else
CXX="$CLR_CXX"
fi
export CC CXX
build_arch="$4"
script_dir="$5"
buildtype=DEBUG
code_coverage=OFF
generator="Unix Makefiles"
__UnprocessedCMakeArgs=""
ITER=-1
for i in "$@"; do
ITER=$((ITER + 1))
if [ $ITER -lt 6 ]; then continue; fi
upperI="$(echo "$i" | awk '{print toupper($0)}')"
case $upperI in
# Possible build types are DEBUG, CHECKED, RELEASE, RELWITHDEBINFO, MINSIZEREL.
DEBUG | CHECKED | RELEASE | RELWITHDEBINFO | MINSIZEREL)
buildtype=$upperI
;;
COVERAGE)
echo "Code coverage is turned on for this build."
code_coverage=ON
;;
NINJA)
generator=Ninja
;;
*)
__UnprocessedCMakeArgs="${__UnprocessedCMakeArgs}${__UnprocessedCMakeArgs:+ }$i"
esac
done
OS=$(uname)
locate_gcc_exec() {
ENV_KNOB="CLR_$(echo "$1" | tr '[:lower:]' '[:upper:]')"
if env | grep -q "^$ENV_KNOB="; then
eval "echo \"\$$ENV_KNOB\""
return
fi
if command -v "$gcc_prefix$1$desired_gcc_version" > /dev/null 2>&1
then
command -v "$gcc_prefix$1$desired_gcc_version"
elif command -v "$gcc_prefix$1" > /dev/null 2>&1
then
command -v "$gcc_prefix$1"
else
exit 1
fi
}
if ! gcc_link="$(locate_gcc_exec link)"; then { echo "Unable to locate link"; exit 1; } fi
if ! gcc_ar="$(locate_gcc_exec ar)"; then { echo "Unable to locate gcc-ar"; exit 1; } fi
if ! gcc_nm="$(locate_gcc_exec nm)"; then { echo "Unable to locate gcc-nm"; exit 1; } fi
if [ "$OS" = "Linux" ] || [ "$OS" = "FreeBSD" ] || [ "$OS" = "OpenBSD" ] || [ "$OS" = "NetBSD" ] || [ "$OS" = "SunOS" ]; then
if ! gcc_objdump="$(locate_gcc_exec objdump)"; then { echo "Unable to locate gcc-objdump"; exit 1; } fi
fi
if ! gcc_objcopy="$(locate_gcc_exec objcopy)"; then { echo "Unable to locate gcc-objcopy"; exit 1; } fi
if ! gcc_ranlib="$(locate_gcc_exec ranlib)"; then { echo "Unable to locate gcc-ranlib"; exit 1; } fi
cmake_extra_defines=
if [ -n "$LLDB_LIB_DIR" ]; then
cmake_extra_defines="$cmake_extra_defines -DWITH_LLDB_LIBS=$LLDB_LIB_DIR"
fi
if [ -n "$LLDB_INCLUDE_DIR" ]; then
cmake_extra_defines="$cmake_extra_defines -DWITH_LLDB_INCLUDES=$LLDB_INCLUDE_DIR"
fi
if [ "$CROSSCOMPILE" = "1" ]; then
if [ -z "$ROOTFS_DIR" ]; then
echo "ROOTFS_DIR not set for crosscompile"
exit 1
fi
if [ -z "$CONFIG_DIR" ]; then
CONFIG_DIR="$1/cross"
fi
export TARGET_BUILD_ARCH=$build_arch
cmake_extra_defines="$cmake_extra_defines -C $CONFIG_DIR/tryrun.cmake"
cmake_extra_defines="$cmake_extra_defines -DCMAKE_TOOLCHAIN_FILE=$CONFIG_DIR/toolchain.cmake"
cmake_extra_defines="$cmake_extra_defines --sysroot=$ROOTFS_DIR"
cmake_extra_defines="$cmake_extra_defines -DCLR_UNIX_CROSS_BUILD=1"
fi
if [ "$OS" = "Linux" ]; then
linux_id_file="/etc/os-release"
if [ -n "$CROSSCOMPILE" ]; then
linux_id_file="$ROOTFS_DIR/$linux_id_file"
fi
if [ -e "$linux_id_file" ]; then
. "$linux_id_file"
cmake_extra_defines="$cmake_extra_defines -DCLR_CMAKE_LINUX_ID=$ID"
fi
fi
if [ "$build_arch" = "armel" ]; then
cmake_extra_defines="$cmake_extra_defines -DARM_SOFTFP=1"
fi
__currentScriptDir="$script_dir"
cmake_command=$(command -v cmake3 || command -v cmake)
# Include CMAKE_USER_MAKE_RULES_OVERRIDE as uninitialized since it will hold its value in the CMake cache otherwise can cause issues when branch switching
$cmake_command \
-G "$generator" \
"-DCMAKE_AR=$gcc_ar" \
"-DCMAKE_LINKER=$gcc_link" \
"-DCMAKE_NM=$gcc_nm" \
"-DCMAKE_RANLIB=$gcc_ranlib" \
"-DCMAKE_OBJCOPY=$gcc_objcopy" \
"-DCMAKE_OBJDUMP=$gcc_objdump" \
"-DCMAKE_BUILD_TYPE=$buildtype" \
"-DCLR_CMAKE_ENABLE_CODE_COVERAGE=$code_coverage" \
"-DCLR_CMAKE_COMPILER=GNU" \
"-DCMAKE_USER_MAKE_RULES_OVERRIDE=" \
"-DCMAKE_INSTALL_PREFIX=$__CMakeBinDir" \
$cmake_extra_defines \
"$__UnprocessedCMakeArgs" \
"$1"
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .oracle import Oracle, OracleConfigError
from .__about__ import __version__
__all__ = [
"__version__",
'Oracle',
'OracleConfigError'
]
|
kubectl create ns $1
status=$?
if [ $status -eq 0 ];
then
echo command succeeded
else
echo command failed
fi
echo status is $status
|
sudo apt-get install -y libxml2-dev
sudo apt-get install -y swig
sudo pip install Numberjack matplotlib bokeh
|
#!/bin/bash
envsubst < exercises/frontend/ingress.yaml.in > exercises/frontend/ingress.yaml
|
import Logo from './logo.png';
import WatchVector from './watch_vector.jpeg';
import Background from './bg1.png';
export { Logo, WatchVector, Background };
|
#!/bin/bash
set -e
set -x
# Script to run http://goreleaser.com
# Removed from `build` stanza
# binary: $module
module=$1
shift
# The following assumes git tags formatted like
# "api/v1.2.3" and splits on the slash.
# Goreleaser doesn't know what to do with this
# tag format, and fails when creating an archive
# with a / in the name.
fullTag=$(git describe)
export tModule=${fullTag%/*}
export tSemver=${fullTag#*/}
echo "tModule=$tModule"
echo "tSemver=$tSemver"
if [ "$module" != "$tModule" ]; then
# Tag and argument sanity check
echo "Unexpected mismatch: moduleFromArg=$module, moduleFromTag=$tModule"
echo "Either the module arg to this script is wrong, or the git tag is wrong."
exit 1
fi
cd $module
configFile=$(mktemp)
cat <<EOF >$configFile
project_name: $module
env:
- CGO_ENABLED=0
- GO111MODULE=on
checksum:
name_template: 'checksums.txt'
changelog:
sort: asc
filters:
exclude:
- '^docs:'
- '^test:'
- Merge pull request
- Merge branch
release:
github:
owner: kubernetes-sigs
name: kustomize
draft: true
builds:
- ldflags: >
-s
-X sigs.k8s.io/kustomize/api/provenance.version={{.Version}}
-X sigs.k8s.io/kustomize/api/provenance.gitCommit={{.Commit}}
-X sigs.k8s.io/kustomize/api/provenance.buildDate={{.Date}}
goos:
- linux
- darwin
- windows
goarch:
- amd64
archives:
- name_template: "${module}_${tSemver}_{{ .Os }}_{{ .Arch }}"
EOF
cat $configFile
/bin/goreleaser release --config=$configFile --rm-dist --skip-validate $@
|
#!/usr/bin/env bash
export CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7
NGPUS=8
CFG_DIR=cfgs/kitti_models
CFG_NAME=hh3d_rcnn_car
python -m torch.distributed.launch --nproc_per_node=${NGPUS} train.py --launcher pytorch --cfg_file $CFG_DIR/$CFG_NAME.yaml --workers 8
|
module MultiSessionStore
module DefaultUrlOptions
def default_url_options
options = params[:subsession_id] ? {subsession_id: params[:subsession_id]} : {}
begin
super.merge options
rescue NoMethodError
options
end
end
end
end
|
/*
* Copyright © 2019 <NAME>.
*/
package filters
import (
"errors"
"github.com/hedzr/voxr-api/api/v10"
"github.com/hedzr/voxr-api/models"
"github.com/hedzr/voxr-common/dc"
"github.com/hedzr/voxr-common/tool"
"github.com/hedzr/voxr-lite/misc/impl/dao"
"github.com/hedzr/voxr-lite/misc/impl/mq"
"github.com/sirupsen/logrus"
"github.com/streadway/amqp"
"plugin"
"strings"
"time"
)
type (
holder struct {
preFilters map[string]Plugin
postFilters map[string]Plugin
exited bool
exitCh chan bool
externalExitCh chan bool
appAdded chan *models.Filter
appRemoved chan *models.Filter
appUpdated chan *models.Filter
}
Config struct {
Name string
Icon string
Version string
Author string
Copyright string
CaredEvents string
Permissions string
Tags string //
Keyword string //
Mode int // 0: Pre-filter, 1: Post filter
Website string // url
Logo string // url
Info string // short detail about this filter
Cover string // url for cover image
HelpPage string // url
TermsPage string // url
Privacy string // url
}
)
var holderCore *holder
func Start() {
holderCore = newHolder()
go holderCore.loader()
}
func Stop() {
if !holderCore.exited {
holderCore.exitCh <- true
}
}
func newHolder() *holder {
return &holder{
make(map[string]Plugin),
make(map[string]Plugin),
true,
make(chan bool, 3),
nil,
make(chan *models.Filter, 10),
make(chan *models.Filter, 10),
make(chan *models.Filter, 10),
}
}
func (h *holder) loader() {
// load all filters
h.loadFilters()
// starting the run looper
go h.run()
// monitor apps add/remove global events
h.monitorEvents()
}
func (h *holder) loadFilters() {
dx := dao.NewFilterDao()
if ret, err := dx.ListFast("1=1"); err != nil {
logrus.Fatalf("[filters] CAN'T load apps from DB: %v", err)
} else {
cnt := 0
for _, r := range ret {
if len(r.Name) == 0 || len(r.Callback) == 0 {
continue
}
if strings.HasPrefix(r.Callback, "file://") {
if h.loadPlugin(r, r.Callback[7:]) {
cnt++
}
}
}
logrus.Debugf("[filters] %v filters loaded.", cnt)
}
}
func (h *holder) loadPlugin(r *models.Filter, file string) (ok bool) {
if tool.FileExists(file) {
if p, err := plugin.Open(file); err != nil {
logrus.WithFields(logrus.Fields{"Err": err}).Warnf("[apps] CAN'T load filters' plugin '%v'", r.Name)
} else {
if sym, err := p.Lookup("VxPlugin"); err == nil {
logrus.Debugf("[filters] 'VxPlugin' is: %v", sym)
if entry, ok := sym.(VxPlug); ok {
_ = entry.OnLoad()
// cfg := entry.Config()
}
if r.Mode == 0 {
h.preFilters[r.Name] = NewFilterPlugin(r, p, sym)
} else if r.Mode == 1 {
h.postFilters[r.Name] = NewFilterPlugin(r, p, sym)
} else {
h.preFilters[r.Name] = NewFilterPlugin(r, p, sym)
h.postFilters[r.Name] = NewFilterPlugin(r, p, sym)
}
return true
} else {
logrus.Warnf("[filters] CANT load app plugin 'VxPlugin' symbol '%v': %v", r.Name, err, errors.New("x"))
}
}
}
return
}
func (h *holder) eventsHandlerForFilters(d amqp.Delivery) {
// key := d.RoutingKey
// if strings.HasPrefix(key, "fx.im.ev.") {
// key = key[9:]
// }
// keyInt := util.GlobalEventNameToInt(key)
// ge := v10.GlobalEvents(keyInt)
// logrus.Debugf(" [x][filters] %v (%v), ge: %v", d.RoutingKey, keyInt, ge)
//
// if ge == v10.GlobalEvents_EvMsgIncoming {
// for _, v := range h.preFilters {
// // logrus.Debugf("v: %v", v)
// entry, ok := v.PluginMainEntry().(VxPlug)
// if ok && v.IsCared(ge) {
// // logrus.Debugf("ge hit: %v | plugin: %v", ge, v.Model.Name)
// go func() {
// logrus.Debugf("run plugin: %v, entry=%v", v.Model().Name, entry)
// if _, err := entry.OnEvent(v, &Args{ge, d.Timestamp, d.ConsumerTag, d.Body}); err != nil {
// logrus.Warnf("[x][filters] invoke app '%v' return failed: %v", v.Model().Name, err)
// }
// }()
// }
// }
// } else if ge == v10.GlobalEvents_EvMsgRead {
// for _, v := range h.postFilters {
// // logrus.Debugf("v: %v", v)
// entry, ok := v.PluginMainEntry().(VxPlug)
// if ok && v.IsCared(ge) {
// // logrus.Debugf("ge hit: %v | plugin: %v", ge, v.Model.Name)
// go func() {
// logrus.Debugf("run plugin: %v, entry=%v", v.Model().Name, entry)
// if _, err := entry.OnEvent(v, &Args{ge, d.Timestamp, d.ConsumerTag, d.Body}); err != nil {
// logrus.Warnf("[x][filters] invoke app '%v' return failed: %v", v.Model().Name, err)
// }
// }()
// }
// }
// }
}
func CallPre(ge v10.GlobalEvents, msg *models.Msg) (ret *models.Msg, err error) {
return holderCore.CallPre(ge, msg)
}
func CallPost(ge v10.GlobalEvents, msg *models.Msg) (ret *models.Msg, err error) {
return holderCore.CallPost(ge, msg)
}
func (h *holder) CallPre(ge v10.GlobalEvents, msg *models.Msg) (ret *models.Msg, err error) {
for _, v := range h.preFilters {
entry, ok := v.PluginMainEntry().(VxPlug)
if ok && v.IsCared(ge) {
logrus.Debugf("run pre-filter: %v", v.Model().Name)
msgCopy := new(models.Msg)
_ = dc.StandardCopier.Copy(msgCopy, msg)
ret = msg
if msgCopy, err = entry.OnCall(v, &Args{ge, time.Now(), "", msgCopy}); err != nil {
logrus.Warnf("[x][filters] invoke app '%v' return failed: %v", v.Model().Name, err)
}
}
}
ret = msg
return
}
func (h *holder) CallPost(ge v10.GlobalEvents, msg *models.Msg) (ret *models.Msg, err error) {
for _, v := range h.postFilters {
entry, ok := v.PluginMainEntry().(VxPlug)
if ok && v.IsCared(ge) {
logrus.Debugf("run post-filter: %v", v.Model().Name)
if ret, err = entry.OnCall(v, &Args{ge, time.Now(), "", msg}); err != nil {
logrus.Warnf("[x][filters] invoke app '%v' return failed: %v", v.Model().Name, err)
}
}
}
ret = msg
return
}
func (h *holder) monitorEvents() {
mq.HandleEvent("filters.mgr", mq.DEFAULT_QUEUE_FOR_FILTERS, mq.DEFAULT_CAST, h.eventsHandlerForFilters)
}
func (h *holder) run() {
ticker := time.NewTicker(20 * time.Second)
defer func() {
ticker.Stop()
logrus.Debug("--- filters mgr run() stopped.")
}()
for {
select {
case e := <-h.exitCh:
if e {
Stop()
}
case e := <-h.externalExitCh:
if e {
return
}
case tm := <-ticker.C:
logrus.Debugf("--- filter run() looper: %v", tm)
case c := <-h.appAdded:
logrus.Debugf("--- filter added: %v", c)
case c := <-h.appRemoved:
logrus.Debugf("--- filter removed: %v", c)
case c := <-h.appUpdated:
logrus.Debugf("--- filter updated: %v", c)
}
}
}
|
#!/bin/bash
python3 -m http.server 1111
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.13.0
// source: executor.proto
package proto
import (
proto "github.com/golang/protobuf/proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type CommandStatus int32
const (
CommandStatus_UNKNOWN CommandStatus = 0
CommandStatus_IN_QUEUE CommandStatus = 1
CommandStatus_IN_PROGRESS CommandStatus = 2
CommandStatus_COMPLETE CommandStatus = 3
)
// Enum value maps for CommandStatus.
var (
CommandStatus_name = map[int32]string{
0: "UNKNOWN",
1: "IN_QUEUE",
2: "IN_PROGRESS",
3: "COMPLETE",
}
CommandStatus_value = map[string]int32{
"UNKNOWN": 0,
"IN_QUEUE": 1,
"IN_PROGRESS": 2,
"COMPLETE": 3,
}
)
func (x CommandStatus) Enum() *CommandStatus {
p := new(CommandStatus)
*p = x
return p
}
func (x CommandStatus) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (CommandStatus) Descriptor() protoreflect.EnumDescriptor {
return file_executor_proto_enumTypes[0].Descriptor()
}
func (CommandStatus) Type() protoreflect.EnumType {
return &file_executor_proto_enumTypes[0]
}
func (x CommandStatus) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use CommandStatus.Descriptor instead.
func (CommandStatus) EnumDescriptor() ([]byte, []int) {
return file_executor_proto_rawDescGZIP(), []int{0}
}
type Command struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Binary string `protobuf:"bytes,1,opt,name=binary,proto3" json:"binary,omitempty"`
Parameters []string `protobuf:"bytes,2,rep,name=parameters,proto3" json:"parameters,omitempty"`
DeleteOnComplete bool `protobuf:"varint,3,opt,name=delete_on_complete,json=deleteOnComplete,proto3" json:"delete_on_complete,omitempty"`
}
func (x *Command) Reset() {
*x = Command{}
if protoimpl.UnsafeEnabled {
mi := &file_executor_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Command) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Command) ProtoMessage() {}
func (x *Command) ProtoReflect() protoreflect.Message {
mi := &file_executor_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Command.ProtoReflect.Descriptor instead.
func (*Command) Descriptor() ([]byte, []int) {
return file_executor_proto_rawDescGZIP(), []int{0}
}
func (x *Command) GetBinary() string {
if x != nil {
return x.Binary
}
return ""
}
func (x *Command) GetParameters() []string {
if x != nil {
return x.Parameters
}
return nil
}
func (x *Command) GetDeleteOnComplete() bool {
if x != nil {
return x.DeleteOnComplete
}
return false
}
type ExecuteRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Command *Command `protobuf:"bytes,1,opt,name=command,proto3" json:"command,omitempty"`
ReadyForDeletion bool `protobuf:"varint,2,opt,name=ready_for_deletion,json=readyForDeletion,proto3" json:"ready_for_deletion,omitempty"`
Key string `protobuf:"bytes,3,opt,name=key,proto3" json:"key,omitempty"`
}
func (x *ExecuteRequest) Reset() {
*x = ExecuteRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_executor_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ExecuteRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ExecuteRequest) ProtoMessage() {}
func (x *ExecuteRequest) ProtoReflect() protoreflect.Message {
mi := &file_executor_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ExecuteRequest.ProtoReflect.Descriptor instead.
func (*ExecuteRequest) Descriptor() ([]byte, []int) {
return file_executor_proto_rawDescGZIP(), []int{1}
}
func (x *ExecuteRequest) GetCommand() *Command {
if x != nil {
return x.Command
}
return nil
}
func (x *ExecuteRequest) GetReadyForDeletion() bool {
if x != nil {
return x.ReadyForDeletion
}
return false
}
func (x *ExecuteRequest) GetKey() string {
if x != nil {
return x.Key
}
return ""
}
type ExecuteResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
TimeTakenInMillis int64 `protobuf:"varint,1,opt,name=time_taken_in_millis,json=timeTakenInMillis,proto3" json:"time_taken_in_millis,omitempty"`
CommandOutput string `protobuf:"bytes,2,opt,name=command_output,json=commandOutput,proto3" json:"command_output,omitempty"`
Status CommandStatus `protobuf:"varint,3,opt,name=status,proto3,enum=executor.CommandStatus" json:"status,omitempty"`
ExitCode int32 `protobuf:"varint,4,opt,name=exit_code,json=exitCode,proto3" json:"exit_code,omitempty"`
}
func (x *ExecuteResponse) Reset() {
*x = ExecuteResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_executor_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ExecuteResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ExecuteResponse) ProtoMessage() {}
func (x *ExecuteResponse) ProtoReflect() protoreflect.Message {
mi := &file_executor_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ExecuteResponse.ProtoReflect.Descriptor instead.
func (*ExecuteResponse) Descriptor() ([]byte, []int) {
return file_executor_proto_rawDescGZIP(), []int{2}
}
func (x *ExecuteResponse) GetTimeTakenInMillis() int64 {
if x != nil {
return x.TimeTakenInMillis
}
return 0
}
func (x *ExecuteResponse) GetCommandOutput() string {
if x != nil {
return x.CommandOutput
}
return ""
}
func (x *ExecuteResponse) GetStatus() CommandStatus {
if x != nil {
return x.Status
}
return CommandStatus_UNKNOWN
}
func (x *ExecuteResponse) GetExitCode() int32 {
if x != nil {
return x.ExitCode
}
return 0
}
var File_executor_proto protoreflect.FileDescriptor
var file_executor_proto_rawDesc = []byte{
0x0a, 0x0e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x08, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x22, 0x6f, 0x0a, 0x07, 0x43, 0x6f,
0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x12, 0x1e, 0x0a,
0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
0x09, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x2c, 0x0a,
0x12, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x5f, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6c,
0x65, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x64, 0x65, 0x6c, 0x65, 0x74,
0x65, 0x4f, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x22, 0x7d, 0x0a, 0x0e, 0x45,
0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a,
0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11,
0x2e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e,
0x64, 0x52, 0x07, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x12, 0x2c, 0x0a, 0x12, 0x72, 0x65,
0x61, 0x64, 0x79, 0x5f, 0x66, 0x6f, 0x72, 0x5f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e,
0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x72, 0x65, 0x61, 0x64, 0x79, 0x46, 0x6f, 0x72,
0x44, 0x65, 0x6c, 0x65, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18,
0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x22, 0xb7, 0x01, 0x0a, 0x0f, 0x45,
0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2f,
0x0a, 0x14, 0x74, 0x69, 0x6d, 0x65, 0x5f, 0x74, 0x61, 0x6b, 0x65, 0x6e, 0x5f, 0x69, 0x6e, 0x5f,
0x6d, 0x69, 0x6c, 0x6c, 0x69, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x11, 0x74, 0x69,
0x6d, 0x65, 0x54, 0x61, 0x6b, 0x65, 0x6e, 0x49, 0x6e, 0x4d, 0x69, 0x6c, 0x6c, 0x69, 0x73, 0x12,
0x25, 0x0a, 0x0e, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75,
0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64,
0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73,
0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f,
0x72, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52,
0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x1b, 0x0a, 0x09, 0x65, 0x78, 0x69, 0x74, 0x5f,
0x63, 0x6f, 0x64, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x65, 0x78, 0x69, 0x74,
0x43, 0x6f, 0x64, 0x65, 0x2a, 0x49, 0x0a, 0x0d, 0x43, 0x6f, 0x6d, 0x6d, 0x61, 0x6e, 0x64, 0x53,
0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e,
0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x49, 0x4e, 0x5f, 0x51, 0x55, 0x45, 0x55, 0x45, 0x10, 0x01,
0x12, 0x0f, 0x0a, 0x0b, 0x49, 0x4e, 0x5f, 0x50, 0x52, 0x4f, 0x47, 0x52, 0x45, 0x53, 0x53, 0x10,
0x02, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x03, 0x32,
0x9a, 0x01, 0x0a, 0x0f, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x53, 0x65, 0x72, 0x76,
0x69, 0x63, 0x65, 0x12, 0x40, 0x0a, 0x07, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x18,
0x2e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74,
0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x75,
0x74, 0x6f, 0x72, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x45, 0x0a, 0x0c, 0x51, 0x75, 0x65, 0x75, 0x65, 0x45, 0x78,
0x65, 0x63, 0x75, 0x74, 0x65, 0x12, 0x18, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72,
0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x19, 0x2e, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75,
0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x28, 0x5a, 0x26,
0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x62, 0x72, 0x6f, 0x74, 0x68,
0x65, 0x72, 0x6c, 0x6f, 0x67, 0x69, 0x63, 0x2f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x6f, 0x72,
0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_executor_proto_rawDescOnce sync.Once
file_executor_proto_rawDescData = file_executor_proto_rawDesc
)
func file_executor_proto_rawDescGZIP() []byte {
file_executor_proto_rawDescOnce.Do(func() {
file_executor_proto_rawDescData = protoimpl.X.CompressGZIP(file_executor_proto_rawDescData)
})
return file_executor_proto_rawDescData
}
var file_executor_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_executor_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_executor_proto_goTypes = []interface{}{
(CommandStatus)(0), // 0: executor.CommandStatus
(*Command)(nil), // 1: executor.Command
(*ExecuteRequest)(nil), // 2: executor.ExecuteRequest
(*ExecuteResponse)(nil), // 3: executor.ExecuteResponse
}
var file_executor_proto_depIdxs = []int32{
1, // 0: executor.ExecuteRequest.command:type_name -> executor.Command
0, // 1: executor.ExecuteResponse.status:type_name -> executor.CommandStatus
2, // 2: executor.ExecutorService.Execute:input_type -> executor.ExecuteRequest
2, // 3: executor.ExecutorService.QueueExecute:input_type -> executor.ExecuteRequest
3, // 4: executor.ExecutorService.Execute:output_type -> executor.ExecuteResponse
3, // 5: executor.ExecutorService.QueueExecute:output_type -> executor.ExecuteResponse
4, // [4:6] is the sub-list for method output_type
2, // [2:4] is the sub-list for method input_type
2, // [2:2] is the sub-list for extension type_name
2, // [2:2] is the sub-list for extension extendee
0, // [0:2] is the sub-list for field type_name
}
func init() { file_executor_proto_init() }
func file_executor_proto_init() {
if File_executor_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_executor_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Command); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_executor_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExecuteRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_executor_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExecuteResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_executor_proto_rawDesc,
NumEnums: 1,
NumMessages: 3,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_executor_proto_goTypes,
DependencyIndexes: file_executor_proto_depIdxs,
EnumInfos: file_executor_proto_enumTypes,
MessageInfos: file_executor_proto_msgTypes,
}.Build()
File_executor_proto = out.File
file_executor_proto_rawDesc = nil
file_executor_proto_goTypes = nil
file_executor_proto_depIdxs = nil
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/0+1024+512/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/0+1024+512/7-512+512+512-FW-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_function_words_first_third_sixth --eval_function penultimate_sixth_eval
|
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import {
PopoverHeader,
PopoverBody,
PopoverList,
PopoverFooter,
AvatarImg
} from './styles';
import Filter from './Filter';
const propTypes = {
items: PropTypes.arrayOf(
PropTypes.shape({
_id: PropTypes.string.isRequired,
title: PropTypes.string,
iconClass: PropTypes.string,
iconColor: PropTypes.string,
selectedBy: PropTypes.string
})
).isRequired,
links: PropTypes.arrayOf(
PropTypes.shape({
title: PropTypes.string.isRequired,
link: PropTypes.element,
onClick: PropTypes.func
})
),
showCheckmark: PropTypes.bool,
selectable: PropTypes.bool,
className: PropTypes.string,
// hooks
onClick: PropTypes.func,
onExit: PropTypes.func
};
class FilterableList extends Component {
constructor(props) {
super(props);
this.state = {
key: '',
items: props.items
};
this.filterItems = this.filterItems.bind(this);
this.toggleItem = this.toggleItem.bind(this);
}
componentWillUnmount() {
// onExit hook
const { onExit } = this.props;
if (onExit) onExit(this.state.items);
}
componentWillReceiveProps(nextProps) {
this.setState({
items: nextProps.items
});
}
filterItems(e) {
this.setState({ key: e.target.value });
}
toggleItem(id) {
const items = this.state.items;
const item = items.find(i => i._id === id);
items[items.indexOf(item)].selectedBy =
item.selectedBy === 'all' ? 'none' : 'all';
this.setState({ items });
// onClick hook
const { onClick } = this.props;
if (onClick) onClick(items, id);
}
renderItems() {
const { showCheckmark = true } = this.props;
const { items, key } = this.state;
return items.map(item => {
// filter items by key
if (key && item.title.toLowerCase().indexOf(key) < 0) {
return false;
}
return (
<li
key={item._id}
className={showCheckmark ? item.selectedBy : ''}
onClick={() => {
this.toggleItem(item._id);
}}
>
{item.iconClass ? (
<i
className={`icon ${item.iconClass}`}
style={{ color: item.iconColor }}
/>
) : null}{' '}
{item.avatar ? <AvatarImg src={item.avatar} /> : null}
{item.title || '[undefined]'}
</li>
);
});
}
render() {
return (
<div className={this.props.className}>
<PopoverHeader>
<Filter onChange={this.filterItems} />
</PopoverHeader>
<PopoverBody>
<PopoverList selectable={this.props.selectable}>
{this.renderItems()}
</PopoverList>
</PopoverBody>
{this.props.links && (
<PopoverFooter>
<PopoverList>
{this.props.links.map(link => (
<li key={link.href}>
<a onClick={link.onClick} href={link.href}>
{link.title}
</a>
</li>
))}
</PopoverList>
</PopoverFooter>
)}
</div>
);
}
}
FilterableList.propTypes = propTypes;
export default FilterableList;
|
#!/usr/bin/env bash
set -e
git remote set-url origin https://${GH_TOKEN}@github.com/newsuk/times-components.git > /dev/null 2>&1
git checkout master
TIP_COMMIT=$(git rev-parse HEAD)
echo $(printf "CircleCI commit: %s, Head commit: %s" $CIRCLE_SHA1 $TIP_COMMIT)
# make sure we only publish if we are at the head of master
if [[ $TIP_COMMIT != $CIRCLE_SHA1 ]]
then
echo "Not on the tip of master!"
exit 0
fi
# set npm credentials
echo "Setting up npm"
echo "//registry.npmjs.org/:_authToken=${NEWS_TOOLS_NPM_TOKEN}" > ~/.npmrc
git config user.name "times-tools"
git config user.email "tools@news.co.uk"
# bump versions, create change logs, create tags, publish to npm
PR_MSG=$(git log --pretty=format:"%h" -1)
MESSAGE=$(printf "chore: Publish %s [ci skip]" $PR_MSG)
echo $MESSAGE
npx lerna publish --conventional-commits --yes --concurrency=1 --exact -m "$MESSAGE"
# push above changes to git
echo "Pushing to master"
git config user.name "times-tools"
git config user.email "tools@news.co.uk"
git push origin master --tags --quiet > /dev/null 2>&1
|
source ../testsupport.sh
run
grep -q "It failed" test.out || err "Failed to find expected text 'It Failed' in output"
bpipe override hello > test.out
run
grep -q "It failed" test.out && err "Found unexpected text 'It Failed' in output"
true
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.grasea.grandroid.actions;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
/**
*
* @author Rovers
*/
public class AlertAction extends ContextAction {
protected boolean cancelable;
/**
*
*/
protected String title;
/**
*
*/
protected String msg;
/**
*
*/
protected Action actPositive;
/**
*
*/
protected Action actNegative;
protected Action actCancel;
/**
*
* @param context
* @param actionName
*/
public AlertAction(Context context, String actionName) {
super(context, actionName);
}
/**
*
* @param context
*/
public AlertAction(Context context) {
super(context);
}
/**
*
* @param title
* @param msg
* @param actPositive
* @param actNegative
* @return
*/
public AlertAction setData(String title, String msg, Action actPositive, Action actNegative) {
if (title != null) {
this.title = title;
}
if (msg != null) {
this.msg = msg;
}
if (actPositive != null) {
this.actPositive = actPositive;
}
if (actNegative != null) {
this.actNegative = actNegative;
}
cancelable = true;
return this;
}
public AlertAction setCancelable(boolean cancelable) {
this.cancelable = cancelable;
return this;
}
public AlertAction setCancelAction(Action actCancel) {
this.actCancel = actCancel;
return this;
}
/**
*
* @param context
* @return
*/
@Override
public boolean execute(Context context) {
AlertDialog.Builder builder = new AlertDialog.Builder(context).setTitle(title).setMessage(msg);
if (actPositive != null) {
builder.setPositiveButton(actPositive.getActionName(), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
actPositive.execute();
}
});
}
if (actNegative != null) {
builder.setNegativeButton(actNegative.getActionName(), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface arg0, int arg1) {
actNegative.execute();
}
});
}
builder.setCancelable(cancelable);
if (cancelable && actCancel != null) {
builder.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface dialog) {
actCancel.setArgs(dialog).execute();
}
});
}
builder.show();
return true;
}
}
|
One of the simplest sorting algorithms that can be used to sort an array of integers in ascending order is the Bubble Sort algorithm. Bubble Sort works by iterating through the array and comparing two adjacent elements at a time and swapping them if necessary to ensure that the elements are in the correct order. This sorting process is repeated until all of the elements in the array are in order.
|
CHANNEL_NAME="utilityemissionchannel"
# CC_NAME="emissionscontract"
LOG_FILE_NAME=chaincode${2}_log.txt
CC_SUBDIR="one"
NODE_SUBDIR="node-one"
CC_NN=${2}
if [ $CC_NN -eq 2 ]; then
CC_SUBDIR="two"
NODE_SUBDIR="node-two"
fi
export FABRIC_CFG_PATH=$PWD/fabric-config/
export PATH=${PWD}/bin:$PATH
# import utils
. scripts/envVar.sh true
fcn_call=$1
shift
parsePeerConnectionParameters $@
res=$?
verifyResult $res "Invoke transaction failed on channel '$CHANNEL_NAME' due to uneven number of peer and org parameters "
set -x
set +x
cat log.txt
#verifyResult $res "Invoke execution on $PEERS failed "
echo "===================== Query chaincode on $PEERS on channel '$CHANNEL_NAME' ===================== "
echo
cd ./chaincode/${CC_SUBDIR}
# tar connection.json and metadata.json
tar cfz code.tar.gz connection.json
tar cfz utilityemissions-chaincode.tgz code.tar.gz metadata.json
cd ../..
./bin/peer lifecycle chaincode install chaincode/${CC_SUBDIR}/utilityemissions-chaincode.tgz >&$LOG_FILE_NAME
export CHAINCODE_CCID=`cat ${LOG_FILE_NAME} | grep "Chaincode code package identifier:" | awk '{split($0,a,"Chaincode code package identifier:"); print a[2]}'`
sed -i -e "s!CHAINCODE_CCID=.*!CHAINCODE_CCID=${CHAINCODE_CCID}!g" docker/nodes/${NODE_SUBDIR}/docker-compose-chaincode.yaml
### Examples
# sudo bash ./scripts/installChaincode.sh 1 2
|
#!/bin/bash
LAUNCH_DIR=$PWD
APPLEDOC_EXE=$(which appledoc)
if [ -z "$APPLEDOC_EXE" ]; then
APPLEDOC_EXE=/usr/local/bin/appledoc
fi
PROJECT_ROOT=$PWD
DEPLOYMENT_DIR=${PROJECT_ROOT}/deployment
SDK_LIBRARIES_ROOT=${PROJECT_ROOT}/ObjcScopedGuard/ObjcScopedGuard
if [ -d "$DEPLOYMENT_DIR" ]; then
rm -rf "$DEPLOYMENT_DIR"
fi
mkdir -p "$DEPLOYMENT_DIR"
cd "$DEPLOYMENT_DIR"
which appledoc
${APPLEDOC_EXE} \
--project-name "ObjcScopedGuard" \
--project-company "dodikk" \
--company-id org.dodikk \
--no-repeat-first-par \
--output . \
"$SDK_LIBRARIES_ROOT" \
| tee appledoc-log.txt
DOCUMENTATION_PATH=$( cat docset-installed.txt | grep Path: | awk 'BEGIN { FS = " " } ; { print $2 }' )
echo DOCUMENTATION_PATH - $DOCUMENTATION_PATH
cp -R "${DOCUMENTATION_PATH}" .
find . -name "*.docset" -exec zip -r ObjcScopedGuard-doc.zip {} \; -print
cd "$LAUNCH_DIR"
|
import React, { useState, useEffect } from "react";
import { useSelector } from "react-redux";
import { getPosts } from "../api/posts";
import PostList from "../components/PostList";
import WelcomeJumbotron from "../components/WelcomeJumbotron";
function Home() {
const isAuthenticated = useSelector((state) => state.auth.isAuthenticated);
const [posts, setPosts] = useState([]);
useEffect(() => {
getPosts("http://localhost:8080/v1/posts").then((data) => setPosts(data));
}, [setPosts]);
return (
<div>
{!isAuthenticated ? <WelcomeJumbotron /> : ""} <br />{" "}
<PostList posts={posts} />
</div>
);
}
export default Home;
|
package ExerciciosExtras;
import java.util.Scanner;
public class ExercicioTresComplemento {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Digite a palavra oculta: ");
String palavraOculta = scanner.next();
int tentativas = 5;
for(int contador=0; contador<tentativas; contador++){
System.out.println("Digite a resposta:");
String resposta = scanner.next();
if(palavraOculta.equals(resposta)){
System.out.println("Parabéns!");
System.out.println("Você acertou na tentativa: " + (contador+1));
break;
} else {
System.out.println("Você errou! Tem ainda "+ (tentativas - (contador+1)));
switch(contador){
case 0:
System.out.println("xx Cabeça xx");
break;
case 1:
System.out.println("xx Tronco xx");
break;
case 2:
System.out.println("xx Braço xx");
break;
case 3:
System.out.println("xx Perna xx");
break;
case 4:
System.out.println("xx Pé xx");
break;
}
if(contador+1 == tentativas){
System.out.println("Você foi enforcado!");
}
}
}
scanner.close();
}
}
|
#include "precompiled.h"
#pragma hdrstop
#include "AnimationBlendPoseNode.h"
AnimationBlendPoseNode::AnimationBlendPoseNode(std::shared_ptr<AnimationPoseNode> firstNode,
std::shared_ptr<AnimationPoseNode> secondNode,
SkeletalAnimationVariableId blendParameterVariableId,
SkeletalAnimationBlendPoseType blendType,
uint8_t overriddenBone)
: m_firstNode(firstNode),
m_secondNode(secondNode),
m_blendType(blendType),
m_blendParameterVariableId(blendParameterVariableId),
m_blendedPose(AnimationPose(m_firstNode->getCurrentPose()))
{
SW_ASSERT(firstNode->getCurrentPose().getSkeleton() == secondNode->getCurrentPose().getSkeleton());
uint8_t bonesCount = m_blendedPose.getBonesCount();
m_overrideMask.resize(bonesCount);
fillOverrideMask(overriddenBone);
}
AnimationBlendPoseNode::~AnimationBlendPoseNode() = default;
const AnimationPose& AnimationBlendPoseNode::getCurrentPose() const
{
return m_blendedPose;
}
void AnimationBlendPoseNode::increaseCurrentTime(float delta,
const AnimationStatesMachineVariables& variablesSet)
{
ARG_UNUSED(variablesSet);
m_firstNode->increaseCurrentTime(delta, variablesSet);
m_secondNode->increaseCurrentTime(delta, variablesSet);
switch (m_blendType) {
case SkeletalAnimationBlendPoseType::Linear:
linearBlendPoses(variablesSet);
break;
case SkeletalAnimationBlendPoseType::Override:
overriddenBlendPoses(variablesSet);
break;
case SkeletalAnimationBlendPoseType::Additive:
additiveBlendPoses(variablesSet);
break;
default:
break;
}
AnimationPoseNodeState firstClipState = m_firstNode->getState();
AnimationPoseNodeState secondClipState = m_secondNode->getState();
if (firstClipState == AnimationPoseNodeState::Finished &&
secondClipState == AnimationPoseNodeState::Finished) {
m_state = AnimationPoseNodeState::Finished;
}
}
void AnimationBlendPoseNode::setBlendParameterVariableId(SkeletalAnimationVariableId variableId)
{
m_blendParameterVariableId = variableId;
}
SkeletalAnimationVariableId AnimationBlendPoseNode::getBlendParameterVariableId() const
{
return m_blendParameterVariableId;
}
void AnimationBlendPoseNode::fillOverrideMask(uint8_t overriddenBoneId)
{
for (size_t i = 0; i < m_overrideMask.size(); i++) {
m_overrideMask[i] = false;
}
m_overrideMask[overriddenBoneId] = true;
for (size_t boneIndex = 1; boneIndex < m_overrideMask.size(); boneIndex++) {
uint8_t parentId = m_blendedPose.getSkeleton()->getBoneParentId(uint8_t(boneIndex));
while (parentId != Bone::ROOT_BONE_PARENT_ID) {
if (m_overrideMask[parentId]) {
m_overrideMask[boneIndex] = true;
}
parentId = m_blendedPose.getSkeleton()->getBoneParentId(parentId);
}
}
}
void AnimationBlendPoseNode::linearBlendPoses(const AnimationStatesMachineVariables& variablesSet)
{
AnimationPose::interpolate(m_firstNode->getCurrentPose(), m_secondNode->getCurrentPose(),
variablesSet.getVariableValue(m_blendParameterVariableId), m_overrideMask, m_blendedPose);
}
void AnimationBlendPoseNode::overriddenBlendPoses(const AnimationStatesMachineVariables& variablesSet)
{
ARG_UNUSED(variablesSet);
const AnimationPose& firstClipPose = m_firstNode->getCurrentPose();
const AnimationPose& secondClipPose = m_secondNode->getCurrentPose();
for (uint8_t boneIndex = 0; boneIndex < m_overrideMask.size(); boneIndex++) {
if (m_overrideMask[boneIndex]) {
m_blendedPose.setBoneLocalPose(boneIndex, secondClipPose.getBoneLocalPose(boneIndex));
}
else {
m_blendedPose.setBoneLocalPose(boneIndex, firstClipPose.getBoneLocalPose(boneIndex));
}
}
}
void AnimationBlendPoseNode::additiveBlendPoses(const AnimationStatesMachineVariables& variablesSet)
{
ARG_UNUSED(variablesSet);
float blendFactor = variablesSet.getVariableValue(m_blendParameterVariableId);
const AnimationPose& mainClipPose = m_firstNode->getCurrentPose();
const AnimationPose& additiveClipPose = m_secondNode->getCurrentPose();
for (uint8_t boneIndex = 0; boneIndex < m_overrideMask.size(); boneIndex++) {
const BonePose& mainBonePose = mainClipPose.getBoneLocalPose(boneIndex);
const BonePose& additiveBonePose = additiveClipPose.getBoneLocalPose(boneIndex);
m_blendedPose.setBoneLocalPose(boneIndex, BonePose::interpolate(mainBonePose, additiveBonePose * mainBonePose,
blendFactor));
}
}
AnimationPoseNodeState AnimationBlendPoseNode::getState() const
{
return m_state;
}
void AnimationBlendPoseNode::startAnimation()
{
AnimationPoseNodeState firstClipState = m_firstNode->getState();
AnimationPoseNodeState secondClipState = m_secondNode->getState();
SW_ASSERT(firstClipState == AnimationPoseNodeState::NotStarted ||
firstClipState == AnimationPoseNodeState::Paused ||
secondClipState == AnimationPoseNodeState::NotStarted ||
secondClipState == AnimationPoseNodeState::Paused);
m_state = AnimationPoseNodeState::Active;
m_firstNode->startAnimation();
m_secondNode->startAnimation();
}
void AnimationBlendPoseNode::pauseAnimation()
{
m_state = AnimationPoseNodeState::Paused;
}
void AnimationBlendPoseNode::resetAnimation()
{
m_state = AnimationPoseNodeState::NotStarted;
m_firstNode->resetAnimation();
m_secondNode->resetAnimation();
}
void AnimationBlendPoseNode::setFinalAction(AnimationPoseNodeFinalAction action)
{
m_finalAction = action;
m_firstNode->setFinalAction(action);
m_secondNode->setFinalAction(action);
}
[[nodiscard]] AnimationPoseNodeFinalAction AnimationBlendPoseNode::getFinalAction() const
{
return m_finalAction;
}
|
package com.boria.borialearndemo.DayOneForAIDL;
import android.app.Activity;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import com.boria.borialearndemo.IImoocAIDL;
import com.boria.borialearndemo.R;
/***********************************************************************************************
* 类名称:
* 类描述: 用于探究多进程aidl通信
* 创建人: 包勇 2019/3/20.
* 创建时间: 2019/3/20.
* 创建备注:
* 创建版本:
* 修改人:
* 修改时间:
* 修改备注:
*
************************************************************************************************/
public class AidlActivity extends Activity implements View.OnClickListener {
private EditText num1;
private EditText num2;
private Button button;
private TextView text;
private IImoocAIDL iImoocAIDL;
private ServiceConnection conn = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
iImoocAIDL = IImoocAIDL.Stub.asInterface(service);
}
@Override
public void onServiceDisconnected(ComponentName name) {
iImoocAIDL = null;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_aidl_dayone);
bindService();
initView();
}
private void initView() {
num1 = (EditText) findViewById(R.id.num1);
num2 = (EditText) findViewById(R.id.num2);
button = (Button) findViewById(R.id.button);
text = (TextView) findViewById(R.id.text);
button.setOnClickListener(this);
}
@Override
public void onClick(View v) {
int num11 = Integer.parseInt(num1.getText().toString());
int num22 = Integer.parseInt(num2.getText().toString());
try {
int res = iImoocAIDL.add(num11, num22);
text.setText(num11 + "+" + num22 + "=" + res);
} catch (RemoteException e) {
e.printStackTrace();
}
}
private void bindService() {
Intent intent = new Intent();
intent.setAction("com.boria.borialearndemo.DayOneForAIDL.AidlService");
intent.setComponent(new ComponentName("com.boria.borialearndemo", "com.boria.borialearndemo.DayOneForAIDL.AidlService"));
bindService(intent, conn, Context.BIND_AUTO_CREATE);
}
@Override
protected void onDestroy() {
super.onDestroy();
unbindService(conn);
}
}
|
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
VERSION=001
JOB_NAME="seg_scannet_${VERSION}"
EVAL_DIR="/tmp/tf3d_experiment/${JOB_NAME}"
CKPT_DIR="${EVAL_DIR}/model/"
NUM_STEPS_PER_EPOCH=100
LOG_FREQ=100
# Data
DATASET_NAME='scannet_scene'
EVAL_SPLIT='val'
DATASET_PATH="/usr/local/google/home/${USER}/Developer/scannet_data/" # REPLACE
# Gin config
IMPORT_MODULE='tf3d.gin_imports'
EVAL_GIN_CONFIG="tf3d/semantic_segmentation/configs/scannet_scene_eval.gin"
PARAMS="get_tf_data_dataset.dataset_name = '${DATASET_NAME}'
get_tf_data_dataset.dataset_dir = '${DATASET_PATH}'
get_tf_data_dataset.dataset_format = 'tfrecord'
get_tf_data_dataset.split_name = '${EVAL_SPLIT}'
"
echo "EVAL_DIR at ${EVAL_DIR}..."
python -m tf3d.eval \
--params="${PARAMS}" \
--import_module="${IMPORT_MODULE}" \
--config_file="${EVAL_GIN_CONFIG}" \
--eval_dir="${EVAL_DIR}" \
--ckpt_dir="${CKPT_DIR}" \
--run_functions_eagerly=false \
--num_steps_per_epoch="${NUM_STEPS_PER_EPOCH}" \
--num_steps_per_log="${LOG_FREQ}" \
--alsologtostderr
|
<reponame>dnmvisser/pyFF
"""
An abstraction layer for metadata fetchers. Supports both syncronous and asyncronous fetchers with cache.
"""
from .logs import get_log
import os
import requests
from .constants import config
from datetime import datetime
from collections import deque
from .parse import parse_resource
from .exceptions import ResourceException
from .utils import url_get, non_blocking_lock, hex_digest, img_to_data, Watchable
from copy import deepcopy
from threading import Lock, Condition
from .fetch import make_fetcher
requests.packages.urllib3.disable_warnings()
log = get_log(__name__)
class URLHandler(object):
def __init__(self, *args, **kwargs):
log.debug("create urlhandler {} {}".format(args, kwargs))
self.pending = {}
self.name = kwargs.pop('name', None)
self.content_handler = kwargs.pop('content_handler', None)
self._setup()
def _setup(self):
self.done = Condition()
self.lock = Lock()
self.fetcher = make_fetcher(name=self.name, content_handler=self.content_handler)
self.fetcher.add_watcher(self)
def __getstate__(self):
return dict(name=self.name)
def __setstate__(self, state):
self.__dict__.update(state)
self._setup()
def is_done(self):
return self.count == 0
def thing_to_url(self, t):
return t
@property
def count(self):
return len(self.pending)
def schedule(self, things):
try:
self.lock.acquire()
self.i_schedule(things)
finally:
self.lock.release()
def i_schedule(self, things):
for t in things:
self.pending[self.thing_to_url(t)] = t
self.fetcher.schedule(self.thing_to_url(t))
def i_handle(self, t, url=None, response=None, exception=None, last_fetched=None):
raise NotImplementedError()
def __call__(self, watched=None, url=None, response=None, exception=None, last_fetched=None):
if url in self.pending:
t = self.pending[url]
with self.lock:
log.debug("RESPONSE url={}, exception={} @ {}".format(url, exception, self.count))
self.i_handle(t, url=url, response=response, exception=exception, last_fetched=last_fetched)
del self.pending[url]
if self.is_done():
try:
self.done.acquire()
self.done.notify()
finally:
self.done.release()
class IconHandler(URLHandler):
def __init__(self, *args, **kwargs):
kwargs['content_handler'] = IconHandler._convert_image_response
super().__init__(self, *args, **kwargs)
self.icon_store = kwargs.pop('icon_store')
@staticmethod
def _convert_image_response(response):
return img_to_data(response.content, response.headers.get('Content-Type'))
def i_handle(self, t, url=None, response=None, exception=None, last_fetched=None):
try:
if exception is None:
self.icon_store.update(url, response)
else:
self.icon_store.update(url, None, info=dict(exception=exception))
except BaseException as ex:
log.warn(ex)
class ResourceHandler(URLHandler):
def __init__(self, *args, **kwargs):
super().__init__(self, *args, **kwargs)
def thing_to_url(self, t):
return t.url
def i_handle(self, t, url=None, response=None, exception=None, last_fetched=None):
try:
if exception is not None:
t.info['Exception'] = exception
else:
children = t.parse(lambda u: response)
self.i_schedule(children)
except BaseException as ex:
log.warn(ex)
t.info['Exception'] = ex
class Resource(Watchable):
def __init__(self, url=None, **kwargs):
super().__init__()
self.url = url
self.opts = kwargs
self.t = None
self.type = "text/plain"
self.etag = None
self.expire_time = None
self.never_expires = False
self.last_seen = None
self.last_parser = None
self._infos = deque(maxlen=config.info_buffer_size)
self.children = deque()
self._setup()
def _setup(self):
self.opts.setdefault('cleanup', [])
self.opts.setdefault('via', [])
self.opts.setdefault('fail_on_error', False)
self.opts.setdefault('verify', None)
self.opts.setdefault('filter_invalid', True)
self.opts.setdefault('validate', True)
if self.url is not None:
if "://" not in self.url:
pth = os.path.abspath(self.url)
if os.path.isdir(pth):
self.url = "dir://{}".format(pth)
elif os.path.isfile(pth) or os.path.isabs(self.url):
self.url = "file://{}".format(pth)
if self.url.startswith('file://') or self.url.startswith('dir://'):
self.never_expires = True
self.lock = Lock()
def __getstate__(self):
raise ValueError("this object should not be pickled")
def __setstate__(self, state):
raise ValueError("this object should not be unpickled")
@property
def post(self):
return self.opts['via']
def add_via(self, callback):
self.opts['via'].append(callback)
@property
def cleanup(self):
return self.opts['cleanup']
def __str__(self):
return "Resource {} expires at {} using ".format(self.url if self.url is not None else "(root)", self.expire_time) + \
",".join(["{}={}".format(k, v) for k, v in list(self.opts.items())])
def reload(self, fail_on_error=False):
with non_blocking_lock(self.lock):
if fail_on_error:
for r in self.walk():
r.parse(url_get)
else:
rp = ResourceHandler(name="Metadata")
rp.schedule(self.children)
try:
rp.done.acquire()
rp.done.wait()
finally:
rp.done.release()
rp.fetcher.stop()
rp.fetcher.join()
self.notify()
def __len__(self):
return len(self.children)
def __iter__(self):
return self.walk()
def __eq__(self, other):
return self.url == other.url
def __contains__(self, item):
return item in self.children
def walk(self):
if self.url is not None:
yield self
for c in self.children:
for cn in c.walk():
yield cn
def is_expired(self):
if self.never_expires:
return False
now = datetime.now()
return self.expire_time is not None and self.expire_time < now
def is_valid(self):
return not self.is_expired() and self.last_seen is not None and self.last_parser is not None
def add_info(self, info):
self._infos.append(info)
def _replace(self, r):
for i in range(0, len(self.children)):
if self.children[i].url == r.url:
self.children[i] = r
return
raise ValueError("Resource {} not present - use add_child".format(r.url))
def add_child(self, url, **kwargs):
opts = deepcopy(self.opts)
if 'as' in opts:
del opts['as']
opts.update(kwargs)
r = Resource(url, **opts)
if r in self.children:
self._replace(r)
else:
self.children.append(r)
return r
@property
def name(self):
if 'as' in self.opts:
return self.opts['as']
else:
return self.url
@property
def info(self):
if self._infos is None or not self._infos:
return dict()
else:
return self._infos[-1]
def parse(self, getter):
info = dict()
info['Resource'] = self.url
self.add_info(info)
data = None
log.debug("getting {}".format(self.url))
r = getter(self.url)
info['HTTP Response Headers'] = r.headers
log.debug("got status_code={:d}, encoding={} from_cache={} from {}".
format(r.status_code, r.encoding, getattr(r, "from_cache", False), self.url))
info['Status Code'] = str(r.status_code)
info['Reason'] = r.reason
if r.ok:
data = r.text
else:
raise ResourceException("Got status={:d} while getting {}".format(r.status_code, self.url))
parse_info = parse_resource(self, data)
if parse_info is not None and isinstance(parse_info, dict):
info.update(parse_info)
if self.t is not None:
self.last_seen = datetime.now()
if self.post and isinstance(self.post, list):
for cb in self.post:
if self.t is not None:
self.t = cb(self.t, **self.opts)
if self.is_expired():
info['Expired'] = True
raise ResourceException("Resource at {} expired on {}".format(self.url, self.expire_time))
else:
info['Expired'] = False
for (eid, error) in list(info['Validation Errors'].items()):
log.error(error)
self.etag = r.headers.get('ETag', None) or hex_digest(r.text, 'sha256')
return self.children
|
def insertion_sort(lst):
for i in range(1, len(lst)):
key = lst[i]
j = i-1
while j >=0 and key < lst[j] :
lst[j+1] = lst[j]
j -= 1
lst[j+1] = key
lst = [8,5,6,4,7]
insertion_sort(lst)
print("Sorted Array: ", lst)
|
function drawShape(canvas, shapeType, x1, y1, x2, y2) {
if (x1 < 0 || y1 < 0 || x2 < 0 || y2 < 0 || x1 >= canvas.length || y1 >= canvas[0].length || x2 >= canvas.length || y2 >= canvas[0].length) {
return canvas; // Invalid coordinates, return original canvas
}
switch (shapeType) {
case "rectangle":
for (let i = x1; i <= x2; i++) {
for (let j = y1; j <= y2; j++) {
canvas[i][j] = "R";
}
}
break;
case "circle":
const centerX = x1;
const centerY = y1;
const radius = Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2));
for (let i = 0; i < canvas.length; i++) {
for (let j = 0; j < canvas[0].length; j++) {
if (Math.pow(i - centerX, 2) + Math.pow(j - centerY, 2) <= Math.pow(radius, 2)) {
canvas[i][j] = "C";
}
}
}
break;
case "line":
const dx = Math.abs(x2 - x1);
const dy = Math.abs(y2 - y1);
const sx = (x1 < x2) ? 1 : -1;
const sy = (y1 < y2) ? 1 : -1;
let err = dx - dy;
let x = x1;
let y = y1;
while (true) {
canvas[x][y] = "L";
if (x === x2 && y === y2) {
break;
}
const e2 = 2 * err;
if (e2 > -dy) {
err -= dy;
x += sx;
}
if (e2 < dx) {
err += dx;
y += sy;
}
}
break;
default:
return canvas; // Invalid shape type, return original canvas
}
return canvas;
}
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React from 'react';
import { useEuiI18n } from '../i18n';
import { EuiTablePagination } from '../table/table_pagination';
import { EuiDataGridPaginationRendererProps } from './data_grid_types';
export const EuiDataGridPaginationRenderer = ({
pageIndex,
pageSize,
pageSizeOptions,
onChangePage,
onChangeItemsPerPage,
rowCount,
controls,
'aria-label': ariaLabel,
}: EuiDataGridPaginationRendererProps) => {
const detailedPaginationLabel = useEuiI18n(
'euiDataGridPagination.detailedPaginationLabel',
'Pagination for preceding grid: {label}',
{ label: ariaLabel ?? '' }
);
const paginationLabel = useEuiI18n(
'euiDataGridPagination.paginationLabel',
'Pagination for preceding grid'
);
const pageCount = Math.ceil(rowCount / pageSize);
const minSizeOption =
pageSizeOptions && [...pageSizeOptions].sort((a, b) => a - b)[0];
if (rowCount < (minSizeOption || pageSize)) {
/**
* Do not render the pagination when:
* 1. Rows count is less than min pagination option (rows per page)
* 2. Rows count is less than pageSize (the case when there are no pageSizeOptions provided)
*/
return null;
}
// hide select rows per page if pageSizeOptions is undefined or an empty array
const hidePerPageOptions = !pageSizeOptions || pageSizeOptions.length === 0;
return (
<div className="euiDataGrid__pagination">
<EuiTablePagination
aria-controls={controls}
activePage={pageIndex}
hidePerPageOptions={hidePerPageOptions}
itemsPerPage={pageSize}
itemsPerPageOptions={pageSizeOptions}
pageCount={pageCount}
onChangePage={onChangePage}
onChangeItemsPerPage={onChangeItemsPerPage}
aria-label={ariaLabel ? detailedPaginationLabel : paginationLabel}
/>
</div>
);
};
|
import json
def save_user_settings(username, settings):
# Write settings to json file
with open(username+'.json', 'w+') as f:
json.dump(settings, f)
def read_user_settings(username):
# Read settings from json file
with open(username+'.json', 'r') as f:
settings = json.load(f)
return settings
|
#! /bin/bash
#
# Installation script for MVNC
#
# See CK LICENSE for licensing details.
# See CK COPYRIGHT for copyright details.
#
# Developer(s):
# - Grigori Fursin, 2017;
#
# PACKAGE_DIR
# INSTALL_DIR
echo "**************************************************************"
echo "Executing make install ..."
cd ${INSTALL_DIR}/${PACKAGE_SUB_DIR}
make install
if [ "${?}" != "0" ] ; then
echo "Error: cmake failed!"
exit 1
fi
return 0
|
<reponame>tengxing/ObjectToJsonPressureTest<filename>src/main/java/cn/yjxxclub/ObjectToJsonPressureTest/entity/Book.java<gh_stars>0
package cn.yjxxclub.ObjectToJsonPressureTest.entity;
import java.io.Serializable;
import java.util.Date;
/**
* Author: 遇见小星
* Email: <EMAIL>
* Date: 17-6-28
* Time: 上午9:53
* Describe: 书实体
*/
public class Book implements Serializable{
private Integer id;
private String name;
private Double price;
private boolean publish;
private Date createDate;
@Override
public String toString() {
return "Book{" +
"id=" + id +
", name='" + name + '\'' +
", price=" + price +
", publish=" + publish +
", createDate=" + createDate +
'}';
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Double getPrice() {
return price;
}
public void setPrice(Double price) {
this.price = price;
}
public boolean isPublish() {
return publish;
}
public void setPublish(boolean publish) {
this.publish = publish;
}
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
}
|
<gh_stars>0
"""Set up bmt-lite package."""
import json
from pathlib import Path
import re
from setuptools import setup
import sys
stash = sys.path.pop(0) # avoid trying to import the local bmt
from bmt import Toolkit
sys.path = [stash] + sys.path # restore the path
import httpx
FILEPATH = Path(__file__).parent
DATAPATH = Path("bmt/_data")
DATAPATH.mkdir(exist_ok=True) # create data dir
(DATAPATH / "__init__.py").touch(exist_ok=True) # make data path a module
response = httpx.get("https://api.github.com/repos/biolink/biolink-model/releases")
releases = response.json()
versions = [
release["tag_name"]
for release in releases
]
def build(version: str):
"""Build BMT data."""
if version in versions:
BMT = Toolkit(
schema=f"https://raw.githubusercontent.com/biolink/biolink-model/{version}/biolink-model.yaml",
)
elif "v" + version in versions:
BMT = Toolkit(
schema=f"https://raw.githubusercontent.com/biolink/biolink-model/v{version}/biolink-model.yaml",
)
elif version.removeprefix("v") in versions:
BMT = Toolkit(
schema=f"https://raw.githubusercontent.com/biolink/biolink-model/{version[1:]}/biolink-model.yaml",
)
# get_all_classes()
classes = BMT.get_all_classes()
with open(DATAPATH / "all_classes.json", "w") as stream:
json.dump(classes, stream)
# get_all_slots()
slots = BMT.get_all_slots()
with open(DATAPATH / "all_slots.json", "w") as stream:
json.dump(slots, stream)
# get_all_types()
types = BMT.get_all_types()
with open(DATAPATH / "all_types.json", "w") as stream:
json.dump(types, stream)
# get_all_elements()
elements = classes + slots + types
with open(DATAPATH / "all_elements.json", "w") as stream:
json.dump(elements, stream)
# get_ancestors()
basic_ancestors = {
element: BMT.get_ancestors(
element,
reflexive=False,
mixin=False,
)
for element in elements
}
alias_ancestors = {
element: [
alias
for ancestor in basics
if (el := BMT.get_element(ancestor)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, basics in basic_ancestors.items()
}
mixin_ancestors = {
element: [
ancestor
for ancestor in BMT.get_ancestors(
element,
reflexive=False,
mixin=True,
)
if ancestor not in basic_ancestors[element]
]
for element in elements
}
alias_mixin_ancestors = {
element: [
alias
for ancestor in mixins
if (el := BMT.get_element(ancestor)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, mixins in mixin_ancestors.items()
}
with open(DATAPATH / "basic_ancestors.json", "w") as stream:
json.dump(basic_ancestors, stream)
with open(DATAPATH / "alias_ancestors.json", "w") as stream:
json.dump(alias_ancestors, stream)
with open(DATAPATH / "mixin_ancestors.json", "w") as stream:
json.dump(mixin_ancestors, stream)
with open(DATAPATH / "alias_mixin_ancestors.json", "w") as stream:
json.dump(alias_mixin_ancestors, stream)
# get_descendants()
basic_descendants = {
element: BMT.get_descendants(
element,
reflexive=False,
mixin=False,
)
for element in elements
}
alias_descendants = {
element: [
alias
for descendant in basics
if (el := BMT.get_element(descendant)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, basics in basic_descendants.items()
}
mixin_descendants = {
element: [
descendant
for descendant in BMT.get_descendants(
element,
reflexive=False,
mixin=True,
)
if descendant not in basic_descendants[element]
]
for element in elements
}
alias_mixin_descendants = {
element: [
alias
for descendant in mixins
if (el := BMT.get_element(descendant)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, mixins in mixin_descendants.items()
}
with open(DATAPATH / "basic_descendants.json", "w") as stream:
json.dump(basic_descendants, stream)
with open(DATAPATH / "alias_descendants.json", "w") as stream:
json.dump(alias_descendants, stream)
with open(DATAPATH / "mixin_descendants.json", "w") as stream:
json.dump(mixin_descendants, stream)
with open(DATAPATH / "alias_mixin_descendants.json", "w") as stream:
json.dump(alias_mixin_descendants, stream)
# get_children()
basic_children = {
element: BMT.get_children(
element,
mixin=False,
)
for element in elements
}
alias_children = {
element: [
alias
for _child in basics
if (el := BMT.get_element(_child)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, basics in basic_children.items()
}
mixin_children = {
element: [
child
for child in BMT.get_children(
element,
mixin=True,
)
if child not in basic_children[element]
]
for element in elements
}
alias_mixin_children = {
element: [
alias
for _child in mixins
if (el := BMT.get_element(_child)) is not None and (aliases := el.aliases) is not None
for alias in aliases
]
for element, mixins in mixin_children.items()
}
with open(DATAPATH / "basic_children.json", "w") as stream:
json.dump(basic_children, stream)
with open(DATAPATH / "alias_children.json", "w") as stream:
json.dump(alias_children, stream)
with open(DATAPATH / "mixin_children.json", "w") as stream:
json.dump(mixin_children, stream)
with open(DATAPATH / "alias_mixin_children.json", "w") as stream:
json.dump(alias_mixin_children, stream)
# get_parent()
parent = {
element: BMT.get_parent(element)
for element in elements
}
with open(DATAPATH / "parent.json", "w") as stream:
json.dump(parent, stream)
# get_element()
element = dict(
**{
class_: {
"id_prefixes": el.id_prefixes,
"mixins": el.mixins,
}
for class_ in classes
if (el := BMT.get_element(class_)) is not None
},
**{
slot: {
"symmetric": el.symmetric,
"inverse": el.inverse,
"annotations": {
tag: annotation.value.lower() == "true"
for tag, annotation in el.annotations.items()
},
"slot_uri": el.slot_uri,
"range": el.range,
}
for slot in slots
if (el := BMT.get_element(slot)) is not None
}
)
with open(DATAPATH / "element.json", "w") as stream:
json.dump(element, stream)
with open("README.md", "r") as stream:
long_description = stream.read()
try:
idx = next(
idx for idx, arg in enumerate(sys.argv)
if (match := re.fullmatch(r"--v\d+\.\d+\.\d+", arg)) is not None
)
except StopIteration:
print("ERROR: Specify a biolink-model version using the '--vX.Y.Z' argument")
exit()
version = sys.argv.pop(idx)[3:]
build(version)
setup(
name=f"bmt-lite-{version}",
version="2.2.0",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/patrickkwang/bmt-lite",
description="A zero-dependency near-clone of common bmt capabilities",
long_description=long_description,
long_description_content_type="text/markdown",
packages=["bmt", "bmt._data"],
package_data={"bmt._data": ["*.json"]},
include_package_data=True,
install_requires=[],
zip_safe=False,
license="MIT",
python_requires=">=3.7",
)
|
package com.uber;
import javax.annotation.Nullable;
public class Super {
@Nullable
public IntSet getPredNodeNumbers(T node) throws UnimplementedError {
Assertions.UNREACHABLE();
return null;
}
@Nullable
OrdinalSet<Statement> computeResult(
Statement s,
Map<PointerKey, MutableIntSet> pointerKeyMod,
BitVectorSolver<? extends ISSABasicBlock> solver,
OrdinalSetMapping<Statement> domain,
CGNode node,
ExtendedHeapModel h,
PointerAnalysis<T> pa,
Map<CGNode, OrdinalSet<PointerKey>> mod,
ExplodedControlFlowGraph cfg,
Map<Integer, NormalStatement> ssaInstructionIndex2Statement) {
return null;
}
}
|
from cached_property import cached_property
from nudgebot.thirdparty.base import EndpointScope
from nudgebot.thirdparty.irc.base import IRCendpoint
from nudgebot.thirdparty.irc.server import Server
class Channel(EndpointScope):
Endpoint = IRCendpoint()
Parents = [Server]
primary_keys = ['server', 'channel']
def __init__(self, server: Server, name: str):
self._server = server
self._name = name
@property
def server(self):
return self._server
@property
def name(self):
return self._name
@classmethod
def init_by_keys(cls, **query):
return cls(Server.init_by_keys(**query), query.get('channel'))
@cached_property
def query(self) -> dict:
return {'server': self._server.url, 'channel': self._name}
@cached_property
def parent(self):
return self.Parents[0](self._server)
|
fn calculate_remaining_time(total_size: u32, speeds: &Vec<u32>) -> u32 {
let total_speed: u32 = speeds.iter().sum();
let remaining_size = total_size - (total_speed * speeds.len() as u32);
let remaining_time = remaining_size / total_speed;
remaining_time
}
|
package elasta.pipeline.converter;
import elasta.core.promise.intfs.Promise;
/**
* Created by Jango on 2016-11-20.
*/
public interface ConverterAsync<T, R> extends Converter<T, Promise<R>> {
@Override
Promise<R> convert(T t);
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.