text stringlengths 1 1.05M |
|---|
export MESOS_zk=zk://mesos-master-1.example.com:2181,mesos-master-2.example.com:2181,mesos-master-3.example.com:2181/mesos
export MESOS_quorum=2
export MESOS_work_dir=/var/lib/mesos
export MESOS_log_dir=/var/log/mesos
|
const express = require('express');
const app = express();
const port = 8000;
const movies = [
{
name: 'Iron Man',
genre: 'Action',
rating: 8
}
];
app.get('/movies', (req, res) => {
res.json(movies);
});
app.listen(port, () => {
console.log(`Server is running on port ${port}`);
}); |
#!/bin/bash
TASK=8
SHOT=1
LANG=es
MODEL=ctrl_xuniter
MODEL_CONFIG=ctrl_xuniter_base
TASKS_CONFIG=iglue_fewshot_tasks_boxes36.dtu
TRTASK=RetrievalxFlickrCO${LANG}_${SHOT}
TEXT_TR=/home/projects/ku_00062/data/xFlickrCO/annotations/${LANG}/train_${SHOT}.jsonl
FEAT_TR=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-few_boxes36.lmdb
TEXT_TE=/home/projects/ku_00062/data/flickr30k/annotations_machine-translate/valid-${LANG}_gmt.jsonl
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xflickrco/${MODEL}/RetrievalFlickr30k_${MODEL_CONFIG}/pytorch_model_best.bin
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
for lr in 1e-4 5e-5 1e-5; do
OUTPUT_DIR=/home/projects/ku_00062/checkpoints/iglue/few_shot.mt/xflickrco/${TRTASK}/${MODEL}/${lr}
LOGGING_DIR=/home/projects/ku_00062/logs/iglue/few_shot.mt/xflickrco/${TRTASK}/${lr}/${MODEL_CONFIG}
python train_task.py \
--bert_model /home/projects/ku_00062/huggingface/xlm-roberta-base --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --num_epoch 20 \
--train_split train_${LANG}_${SHOT} --train_annotations_jsonpath $TEXT_TR --train_features_lmdbpath $FEAT_TR \
--val_split dev_${LANG}_gmt --val_annotations_jsonpath $TEXT_TE \
--lr $lr --batch_size 1 --gradient_accumulation_steps 1 --num_workers 0 --save_every_num_epochs 5 --eval_batch_size 16 --max_val_batches 130 \
--adam_epsilon 1e-6 --adam_betas 0.9 0.999 --adam_correct_bias --weight_decay 0.0001 --warmup_proportion 0.1 --clip_grad_norm 1.0 \
--output_dir ${OUTPUT_DIR} \
--logdir ${LOGGING_DIR} \
&> ${here}/train.${lr}.log
done
deactivate
|
const config = require('@bedrockio/config');
const { promises: fs } = require('fs');
const path = require('path');
const os = require('os');
const crypto = require('crypto');
const { logger } = require('@bedrockio/instrumentation');
const mime = require('mime-types');
async function uploadLocal(file, hash) {
const destinationPath = path.join(os.tmpdir(), hash);
await fs.copyFile(file.path, destinationPath);
logger.debug('Uploading locally %s -> %s', file.name, destinationPath);
return file.path;
}
async function uploadGcs(file, hash) {
const { Storage } = require('@google-cloud/storage');
const storage = new Storage();
const bucketName = config.get('UPLOADS_GCS_BUCKET');
const bucket = storage.bucket(bucketName);
const extension = path.extname(file.name).toLowerCase();
const options = {
destination: `${hash}${extension}`,
};
await bucket.upload(file.path, options);
logger.info('Uploading gcs %s -> gs://%s/%s', file.name, bucketName, options.destination);
const uploadedGcsFile = bucket.file(options.destination);
await uploadedGcsFile.makePublic();
const metaData = await uploadedGcsFile.getMetadata();
return metaData[0].mediaLink;
}
async function storeUploadedFile(uploadedFile) {
if (!uploadedFile.name) {
uploadedFile.name = path.basename(uploadedFile.path);
}
const object = {
mimeType: uploadedFile.type || mime.lookup(uploadedFile.name),
filename: uploadedFile.name,
hash: crypto.randomBytes(32).toString('hex'),
};
if (config.get('UPLOADS_STORE') === 'gcs') {
object.rawUrl = await uploadGcs(uploadedFile, object.hash);
object.storageType = 'gcs';
} else {
object.rawUrl = await uploadLocal(uploadedFile, object.hash);
object.storageType = 'local';
}
object.thumbnailUrl = object.rawUrl;
return object;
}
module.exports = { storeUploadedFile };
|
#mount /system
#rm -rf /vendor
#ln -s /system/vendor /vendor
#qseecomd&
cd /data/local/tmp
echo 1 > /sys/class/power_supply/battery/charging_enabled
/sbin/charger &
/sbin/bruteforce hw < wordlist.txt
BLKDEV=/dev/block/bootdevice/by-name/userdata
BLKDEVSSD=/dev/block/bootdevice/by-name/ssd
BLKDEV_SIZE=$(blockdev --getsize64 $BLKDEV)
OFFSET=$(expr $BLKDEV_SIZE - 16384)
OFFSET=$(expr $OFFSET / 512)
dd if=/data/local/tmp/footer.bin count=32 skip=$OFFSET bs=512 of=$BLKDEV
dd if=/data/local/tmp/ssd of=$BLKDEVSSD
sync
|
<reponame>ningxiaojiang/guns-wq<gh_stars>0
package com.stylefeng.guns.modular.manage.controller;
import javax.annotation.Resource;
import javax.validation.Valid;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.stylefeng.guns.common.controller.BaseController;
import com.stylefeng.guns.common.persistence.dao.ProductTypeMapper;
import com.stylefeng.guns.common.persistence.model.ProductType;
import com.stylefeng.guns.modular.manage.dao.ProductTypeDao;
/**
* 产品类型管理控制器
*
* @author nxj
* @Date 2018-01-30 09:02:35
*/
@SuppressWarnings("static-access")
@Controller
@RequestMapping("/productType")
public class ProductTypeController extends BaseController {
private String PREFIX = "/manage/productType/";
@Resource
private ProductTypeDao typeDao;
@Resource
private ProductTypeMapper typeMapper;
/**
* 跳转到产品类型管理首页
*/
@RequestMapping("")
public String index() {
return PREFIX + "productType.html";
}
/**
* 跳转到添加产品类型管理
*/
@RequestMapping("/productType_add")
public String productTypeAdd() {
return PREFIX + "productType_add.html";
}
/**
* 跳转到修改产品类型管理
*/
@RequestMapping("/productType_update/{productTypeId}")
public String productTypeUpdate(@PathVariable Integer productTypeId, Model model) {
model.addAttribute("productType", typeMapper.selectById(productTypeId));
return PREFIX + "productType_edit.html";
}
/**
* 获取产品类型管理列表
*/
@RequestMapping(value = "/list")
@ResponseBody
public Object list(String condition) {
return typeDao.selectAll();
}
/**
* 新增产品类型管理
*/
@RequestMapping(value = "/add")
@ResponseBody
public Object add(@Valid ProductType pro, BindingResult result) {
pro.setDelflag(1);
typeMapper.insert(pro);
return super.SUCCESS_TIP;
}
/**
* 删除产品类型管理
*/
@RequestMapping(value = "/delete")
@ResponseBody
public Object delete(Integer productTypeId) {
typeDao.delete(productTypeId);
return SUCCESS_TIP;
}
/**
* 修改产品类型管理
*/
@RequestMapping(value = "/update")
@ResponseBody
public Object update(@Valid ProductType pro, BindingResult result) {
pro.setDelflag(1);
typeMapper.updateById(pro);
return super.SUCCESS_TIP;
}
/**
* 产品类型管理详情
*/
@RequestMapping(value = "/detail")
@ResponseBody
public Object detail() {
return null;
}
}
|
import Vue from 'vue'
import ToggleButton from '@d14na/vue-js-toggle-button'
Vue.use(ToggleButton)
|
#coding: utf-8
require 'gmail' # for more info -> http://dcparker.github.com/ruby-gmail/
require 'readline' # for email confirmation
require 'date'
Signal.trap(:INT){
puts "logout Gmail ..."
@gmail.logout if defined? @gmail
puts "loged out!"
exit
}
def check_file(filename)
if File.exist?(filename)
puts "checked the given file '#{filename}' exists."
else
puts "it seems '#{filename}' doesn not exist."
puts "check if the file really exists on the given path."
exit
end
end
# initialize variables
yyyymm = Date.today.strftime("%Y%m")
mmdd = Date.today.strftime("%m%d")
last_yyyymm = (Date.today - 28).strftime("%Y%m")
is_last_month_report_included = false
USERNAME = "<EMAIL>"
PASSWORD = "<PASSWORD>"
to_addr = "<EMAIL>"
cc_addr = "<EMAIL>" # comment out if no co-creators
subject_text = "MAIL TITLE"
body_text = open("./body.txt").read()
path_to_file1 = "./sample.csv"
check_file path_to_file1
#path_to_file2 = "./sample2.csv"
#check_file path_to_file2
if Time.now.day <= 7
path_to_file3 = "./sample3.csv"
check_file path_to_file3
#path_to_file4 = "./sample4.csv"
#check_file path_to_file4
is_last_month_report_included = true
end
# login, confirm, then send/cancel and logout
@gmail = Gmail.new(USERNAME, PASSWORD)
puts "Please confirm..."
puts " [from] #{USERNAME}"
puts " [to] #{to_addr}"
puts " [cc] #{cc_addr}" if defined? cc_addr
puts " [subject] #{subject_text}"
puts " [body]"
puts body_text
puts " [file] #{path_to_file1}" if defined? path_to_file1
puts " [file] #{path_to_file2}" if defined? path_to_file2
if is_last_month_report_included
puts " [file] #{path_to_file3}" if defined? path_to_file3
puts " [file] #{path_to_file4}" if defined? path_to_file4
end
puts ""
while true
input = Readline.readline("can I send this?[Y/n] > ")
if input == 'Y' or input == 'y'
# send email and quit
puts "start sending email ..."
@gmail.deliver do
to to_addr
cc cc_addr if defined? cc_addr
subject subject_text
body body_text
add_file path_to_file1 if defined? path_to_file1
add_file path_to_file2 if defined? path_to_file2
if is_last_month_report_included
add_file path_to_file3 if defined? path_to_file3
add_file path_to_file4 if defined? path_to_file4
end
end
puts "just sent email!"
puts "check your Gmail 'sent box' for double check :-)"
@gmail.logout
break
elsif input == 'N' or input == 'n'
# stop sending email and quit
@gmail.logout
puts 'stopped sending email and logged out.'
break
else
# do nothing
end
end
|
<reponame>zaidmukaddam/linkto<filename>packages/gamut/src/icons/VerifiedBadge.tsx
import * as React from "react";
import { CustomSVGProps } from "@/types";
export default function VerifiedBadge({
size = 18,
...otherProps
}: CustomSVGProps) {
return (
<svg
width={size}
height={size}
xmlns="http://www.w3.org/2000/svg"
viewBox="0 0 20 20"
fill="currentColor"
role="presentation"
aria-label="verified badge"
{...otherProps}
>
<path
fillRule="evenodd"
d="M6.267 3.455a3.066 3.066 0 001.745-.723 3.066 3.066 0 013.976 0 3.066 3.066 0 001.745.723 3.066 3.066 0 012.812 2.812c.051.643.304 1.254.723 1.745a3.066 3.066 0 010 3.976 3.066 3.066 0 00-.723 1.745 3.066 3.066 0 01-2.812 2.812 3.066 3.066 0 00-1.745.723 3.066 3.066 0 01-3.976 0 3.066 3.066 0 00-1.745-.723 3.066 3.066 0 01-2.812-2.812 3.066 3.066 0 00-.723-1.745 3.066 3.066 0 010-3.976 3.066 3.066 0 00.723-1.745 3.066 3.066 0 012.812-2.812zm7.44 5.252a1 1 0 00-1.414-1.414L9 10.586 7.707 9.293a1 1 0 00-1.414 1.414l2 2a1 1 0 001.414 0l4-4z"
clipRule="evenodd"
/>
</svg>
);
}
|
print_usage()
{
echo "Usage: $0 <app_path> <exp_path> <conda_path>"
echo "e.g: $0 /Scratch/ng98/CL/avalanche_nuwan_fork/exp_scripts/train_pool.py /Scratch/ng98/CL/results/ /Scratch/ng98/CL/conda"
echo "e.g: $0 ~/Desktop/avalanche_nuwan_fork/exp_scripts/train_pool.py ~/Desktop/CL/results/ /Users/ng98/miniconda3/envs/avalanche-dev-env"
}
cuda='0'
clean_dir="TRUE"
clean_dir="FALSE"
dataset=(LED_a RotatedMNIST RotatedCIFAR10 CORe50)
dataset=(CORe50 RotatedMNIST RotatedCIFAR10)
#dataset=(CORe50)
strategy=(LwF EWC GDumb ER TrainPool)
strategy=(TrainPool)
mini_batch_size='16'
tp_pool_type='6CNN'
tp_number_of_nns_to_train='6'
tp_predict_methods_array=('ONE_CLASS' 'ONE_CLASS_end' 'MAJORITY_VOTE' 'RANDOM' 'NAIVE_BAYES' 'NAIVE_BAYES_end' 'TASK_ID_KNOWN')
tp_predict_methods_array=('ONE_CLASS_end')
#tp_reset_tp='reset'
tp_reset_tp='no_reset'
#tp_use_one_class_probas='no_use_probas'
tp_use_one_class_probas='use_probas'
#tp_use_weights_from_task_detectors='no_use_weights'
tp_use_weights_from_task_detectors='use_weights'
tp_auto_detect_tasks='no_detect'
#tp_auto_detect_tasks='detect'
# DO_NOT_NOT_TRAIN_TASK_PREDICTOR_AT_THE_END, WITH_ACCUMULATED_INSTANCES, WITH_ACCUMULATED_LEARNED_FEATURES, WITH_ACCUMULATED_STATIC_FEATURES
train_task_predictor_at_the_end_default='DO_NOT_NOT_TRAIN_TASK_PREDICTOR_AT_THE_END'
train_task_predictor_at_the_end='WITH_ACCUMULATED_LEARNED_FEATURES'
model='SimpleCNN'
model='CNN4'
optimizer='Adam'
l_rate='0.0005'
app_path="/Users/ng98/Desktop/avalanche_test/train_pool.py"
base_dir="/Users/ng98/Desktop/avalanche_test/exp"
if [ $# -lt 3 ]; then
print_usage
exit 1
fi
app_path=$1
base_dir=$2
echo "Activating conda environment $3"
eval "$(conda shell.bash hook)"
conda init bash
conda activate "$3"
conda env list
log_dir="${base_dir}/logs"
log_file_name=''
if [ -d "${log_dir}" ]; then
if [ "${clean_dir}" == "TRUE" ]; then
echo "Remove directory ${log_dir}"
rm -rf ${log_dir}
echo "Create log directory structure"
mkdir -p "${log_dir}/"{tb_data,txt_logs,exp_logs,csv_data}
fi
else
echo "Create log directory structure"
mkdir -p "${log_dir}/"{tb_data,txt_logs,exp_logs,csv_data}
fi
for (( j=0; j<${#dataset[@]}; j++ ))
do
for (( i=0; i<${#strategy[@]}; i++ ))
do
if [ "${strategy[$i]}" = "TrainPool" ] ; then
tp_predict_methods=("${tp_predict_methods_array[@]}")
else
tp_predict_methods=('OTHER')
fi
for (( k=0; k<${#tp_predict_methods[@]}; k++ ))
do
command_args="--base_dir $base_dir --dataset ${dataset[$j]} --strategy ${strategy[$i]} --minibatch_size ${mini_batch_size} --cuda ${cuda}"
log_file_name="${dataset[$j]}_${strategy[$i]}_mb_${mini_batch_size}"
case ${strategy[$i]} in
LwF)
command_args="${command_args} --module ${model} --optimizer ${optimizer} --lr ${l_rate} --hs 1024"
log_file_name="${log_file_name}_${model}"
;;
EWC)
command_args="${command_args} --module ${model} --optimizer ${optimizer} --lr ${l_rate} --hs 1024"
log_file_name="${log_file_name}_${model}"
;;
GDumb)
command_args="${command_args} --module ${model} --optimizer ${optimizer} --lr ${l_rate} --hs 1024 --mem_buff_size 1000"
log_file_name="${log_file_name}_${model}_b1000"
;;
ER)
command_args="${command_args} --module ${model} --optimizer ${optimizer} --lr ${l_rate} --hs 1024 --mem_buff_size 1000"
log_file_name="${log_file_name}_${model}_b1000"
;;
TrainPool)
tp_predict_method=${tp_predict_methods[$k]}
tp_p_method="${tp_predict_method}"
tp_train_task_p_at_end_type="${train_task_predictor_at_the_end_default}"
case ${tp_predict_method} in
ONE_CLASS)
;;
ONE_CLASS_end)
tp_train_task_p_at_end_type="${train_task_predictor_at_the_end}"
tp_p_method='ONE_CLASS'
;;
NAIVE_BAYES)
;;
NAIVE_BAYES_end)
tp_train_task_p_at_end_type="${train_task_predictor_at_the_end}"
tp_p_method='NAIVE_BAYES'
;;
*)
;;
esac
if [ "${tp_reset_tp}" == "reset" ]; then
tp_reset_tp_cmd='--reset_training_pool'
else
tp_reset_tp_cmd='--no-reset_training_pool'
fi
if [ "${tp_use_one_class_probas}" == "use_probas" ]; then
tp_use_one_class_probas_cmd='--use_one_class_probas'
else
tp_use_one_class_probas_cmd='--no-use_one_class_probas'
fi
if [ "${tp_use_weights_from_task_detectors}" == "use_weights" ]; then
tp_use_weights_from_task_detectors_cmd='--use_weights_from_task_detectors'
else
tp_use_weights_from_task_detectors_cmd='--no-use_weights_from_task_detectors'
fi
if [ "${tp_auto_detect_tasks}" == "detect" ]; then
tp_auto_detect_tasks_cmd='--auto_detect_tasks'
else
tp_auto_detect_tasks_cmd='--no-auto_detect_tasks'
fi
command_args="${command_args} --module MultiMLP --pool_type ${tp_pool_type} --number_of_mpls_to_train ${tp_number_of_nns_to_train} --skip_back_prop_threshold 0.0 --task_detector_type ${tp_p_method} ${tp_reset_tp_cmd} ${tp_use_one_class_probas_cmd} ${tp_use_weights_from_task_detectors_cmd} ${tp_auto_detect_tasks_cmd} --train_task_predictor_at_the_end ${tp_train_task_p_at_end_type}"
log_file_name="${log_file_name}_TP_${tp_pool_type}_${tp_number_of_nns_to_train}_${tp_predict_method}_${tp_reset_tp}_${tp_use_one_class_probas}_${tp_use_weights_from_task_detectors}_${tp_auto_detect_tasks}_${tp_train_task_p_at_end_type}"
;;
*)
command_args=""
log_file_name=""
;;
esac
command_args="${command_args} --log_file_name ${log_file_name}"
if [ -n "$command_args" ] ; then
full_log_file="${log_dir}/exp_logs/$log_file_name"
echo "python $app_path $command_args &>${full_log_file}"
echo "Log file: $full_log_file"
time python $app_path $command_args &>$full_log_file &
fi
if [ -z $! ]; then
task_failed=1
else
PID=$!
# echo -e "PID=$PID : $exp_cmd \n"
echo -e "PID=$PID : \n"
sleep 5
while [ $(grep -m 1 -c 'Experiment completed' $full_log_file ) -lt 1 ];
do
sleep 10
if ! ps -p $PID &>/dev/null;
then
task_failed=1
break
esle
echo -ne "Waiting for exp with $PID to finish\r"
fi
done
echo "Child processors of PID $PID----------------------"
# This is process id, parameter passed by user
ppid=$PID
if [ -z $ppid ] ; then
echo "No PID given."
fi
child_process_count=1
while true
do
FORLOOP=FALSE
# Get all the child process id
for c_pid in `ps -ef| awk '$3 == '$ppid' { print $2 }'`
do
if [ $c_pid -ne $SCRIPT_PID ] ; then
child_pid[$child_process_count]=$c_pid
child_process_count=$((child_process_count + 1))
ppid=$c_pid
FORLOOP=TRUE
else
echo "Skip adding PID $SCRIPT_PID"
fi
done
if [ "$FORLOOP" = "FALSE" ] ; then
child_process_count=$((child_process_count - 1))
## We want to kill child process id first and then parent id's
while [ $child_process_count -ne 0 ]
do
echo "killing ${child_pid[$child_process_count]}"
kill -9 "${child_pid[$child_process_count]}" >/dev/null
child_process_count=$((child_process_count - 1))
done
break
fi
done
echo "Child processors of PID $PID----------------------"
echo -e "killing PID $PID\n"
kill $PID
fi
done
done
done
|
/*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.app.web.base.login;
import javax.annotation.Resource;
import org.dbflute.optional.OptionalEntity;
import org.dbflute.optional.OptionalThing;
import org.docksidestage.app.web.signin.SigninAction;
import org.docksidestage.dbflute.cbean.MemberCB;
import org.docksidestage.dbflute.exbhv.MemberBhv;
import org.docksidestage.dbflute.exbhv.MemberLoginBhv;
import org.docksidestage.dbflute.exentity.Member;
import org.docksidestage.dbflute.exentity.MemberLogin;
import org.docksidestage.mylasta.action.HarborUserBean;
import org.docksidestage.mylasta.direction.HarborConfig;
import org.lastaflute.core.magic.async.AsyncManager;
import org.lastaflute.core.time.TimeManager;
import org.lastaflute.db.jta.stage.TransactionStage;
import org.lastaflute.web.login.PrimaryLoginManager;
import org.lastaflute.web.login.TypicalLoginAssist;
import org.lastaflute.web.login.credential.UserPasswordCredential;
import org.lastaflute.web.login.option.LoginSpecifiedOption;
/**
* @author jflute
*/
public class HarborLoginAssist extends TypicalLoginAssist<Integer, HarborUserBean, Member> // #change_it also UserBean
implements PrimaryLoginManager { // #app_customize
// ===================================================================================
// Attribute
// =========
@Resource
private TimeManager timeManager;
@Resource
private AsyncManager asyncManager;
@Resource
private TransactionStage transactionStage;
@Resource
private HarborConfig config;
@Resource
private MemberBhv memberBhv;
@Resource
private MemberLoginBhv memberLoginBhv;
// ===================================================================================
// Find User
// =========
@Override
protected void checkCredential(CredentialChecker checker) {
checker.check(UserPasswordCredential.class, credential -> {
return memberBhv.selectCount(cb -> arrangeLoginByCredential(cb, credential)) > 0;
});
}
@Override
protected void resolveCredential(CredentialResolver resolver) {
resolver.resolve(UserPasswordCredential.class, credential -> {
return memberBhv.selectEntity(cb -> arrangeLoginByCredential(cb, credential));
});
}
private void arrangeLoginByCredential(MemberCB cb, UserPasswordCredential credential) {
cb.query().arrangeLogin(credential.getUser(), encryptPassword(credential.getPassword()));
}
@Override
protected OptionalEntity<Member> doFindLoginUser(Integer userId) {
return memberBhv.selectEntity(cb -> cb.query().arrangeLoginByIdentity(userId));
}
// ===================================================================================
// Login Process
// =============
@Override
protected HarborUserBean createUserBean(Member userEntity) {
return new HarborUserBean(userEntity);
}
@Override
protected OptionalThing<String> getCookieRememberMeKey() {
return OptionalThing.of(config.getCookieRememberMeHarborKey());
}
@Override
protected Integer toTypedUserId(String userKey) {
return Integer.valueOf(userKey);
}
@Override
protected void saveLoginHistory(Member member, HarborUserBean userBean, LoginSpecifiedOption option) {
asyncManager.async(() -> {
transactionStage.requiresNew(tx -> {
insertLogin(member);
});
});
}
protected void insertLogin(Member member) {
MemberLogin login = new MemberLogin();
login.setMemberId(member.getMemberId());
login.setLoginMemberStatusCodeAsMemberStatus(member.getMemberStatusCodeAsMemberStatus());
login.setLoginDatetime(timeManager.currentDateTime());
login.setMobileLoginFlg_False(); // mobile unsupported for now
memberLoginBhv.insert(login);
}
// ===================================================================================
// Login Resource
// ==============
@Override
protected Class<HarborUserBean> getUserBeanType() {
return HarborUserBean.class;
}
@Override
protected Class<?> getLoginActionType() {
return SigninAction.class;
}
}
|
<reponame>i-a-n/eui
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import React from 'react';
import { render, mount } from 'enzyme';
import { EuiNotificationEventReadButton } from './notification_event_read_button';
describe('EuiNotificationEventReadButton', () => {
test('is rendered', () => {
const component = render(
<EuiNotificationEventReadButton
id="id"
eventName="eventName"
isRead={true}
onClick={() => {}}
/>
);
expect(component).toMatchSnapshot();
});
test('renders isRead to false', () => {
const component = render(
<EuiNotificationEventReadButton
id="id"
eventName="eventName"
isRead={false}
onClick={() => {}}
/>
);
expect(component).toMatchSnapshot();
});
test('onClick fires for buttons', () => {
const handler = jest.fn();
const component = mount(
<EuiNotificationEventReadButton
id="id"
eventName="eventName"
isRead={false}
onClick={handler}
/>
);
component.find('button').simulate('click');
expect(handler.mock.calls.length).toEqual(1);
});
});
|
#!/usr/bin/env bash
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
# environment
export AIRFLOW_HOME=${AIRFLOW_HOME:=~/airflow}
export AIRFLOW__CORE__UNIT_TEST_MODE=True
# configuration test
export AIRFLOW__TESTSECTION__TESTKEY=testvalue
# use Airflow 2.0-style imports
export AIRFLOW_USE_NEW_IMPORTS=1
# any argument received is overriding the default nose execution arguments:
nose_args=$@
#--with-doctest
# Generate the `airflow` executable if needed
which airflow > /dev/null || python setup.py develop
echo "Initializing the DB"
yes | airflow resetdb
airflow initdb
if [ "${TRAVIS}" ]; then
if [ -z "$nose_args" ]; then
nose_args="--with-coverage \
--cover-erase \
--cover-html \
--cover-package=airflow \
--cover-html-dir=airflow/www/static/coverage \
--with-ignore-docstrings \
--rednose \
--with-timer \
-v \
--logging-level=DEBUG "
fi
# For impersonation tests running on SQLite on Travis, make the database world readable so other
# users can update it
AIRFLOW_DB="/home/travis/airflow/airflow.db"
if [ -f "${AIRFLOW_DB}" ]; then
sudo chmod a+rw "${AIRFLOW_DB}"
fi
# For impersonation tests on Travis, make airflow accessible to other users via the global PATH
# (which contains /usr/local/bin)
sudo ln -s "${VIRTUAL_ENV}/bin/airflow" /usr/local/bin/
else
if [ -z "$nose_args" ]; then
nose_args="--with-coverage \
--cover-erase \
--cover-html \
--cover-package=airflow \
--cover-html-dir=airflow/www/static/coverage \
--with-ignore-docstrings \
--rednose \
--with-timer \
-s \
-v \
--logging-level=DEBUG "
fi
fi
echo "Starting the unit tests with the following nose arguments: "$nose_args
nosetests $nose_args
# To run individual tests:
# nosetests tests.core:CoreTest.test_scheduler_job
|
<gh_stars>1-10
package entity
import (
"fmt"
"time"
)
type SiteStats struct {
New bool `db:"-" json:"-" `
SiteID int64 `db:"site_id" json:"-"`
Visitors int64 `db:"visitors"`
Pageviews int64 `db:"pageviews"`
Sessions int64 `db:"sessions"`
BounceRate float64 `db:"bounce_rate"`
AvgDuration float64 `db:"avg_duration"`
KnownDurations int64 `db:"known_durations" json:",omitempty"`
Date time.Time `db:"ts" json:",omitempty"`
}
func (s *SiteStats) FormattedDuration() string {
return fmt.Sprintf("%d:%d", int(s.AvgDuration/60.00), (int(s.AvgDuration) % 60))
}
|
<gh_stars>0
import { Convert } from "./convert";
import { Wave } from "./wave";
export class Generate {
static inclusive(from: number, to: number) {
return from > to
? Generate.range(from, to - from - 1)
: Generate.range(from, to - from + 1)
}
static range(from: number, length: number) {
const result: number[] = [];
if (length === 0) return [];
if (length > 0) {
for (let i = 0; i < length; i++) {
result.push(i + from);
}
}
else {
for (let i = 0; i > length; i--) {
result.push(i + from);
}
}
return result;
}
static complexSignal(ampl: number, freqHz: number, phaseRad: number | null = null) {
return new Wave(ampl, freqHz, phaseRad == null ? Convert.deg2rad(0) : phaseRad);
}
static realSignal(ampl: number, freqHz: number, phaseRad: number | null = null) {
return new Wave(ampl, freqHz, phaseRad == null ? Convert.deg2rad(0) : phaseRad, false);
}
} |
package com.wyp.materialqqlite.qqclient.protocol.protocoldata;
import java.util.ArrayList;
public class RecentList {
public int m_nRetCode;
public ArrayList<RecentInfo> m_arrRecentInfo = new ArrayList<RecentInfo>();
public void reset() {
m_nRetCode = 0;
m_arrRecentInfo.clear();
}
public boolean addRecent(RecentInfo recentInfo) {
if (null == recentInfo)
return false;
m_arrRecentInfo.add(recentInfo);
return true;
}
public int getRecentCount() {
return m_arrRecentInfo.size();
}
public RecentInfo getRecent(int nIndex) {
if (nIndex >= 0 && nIndex < m_arrRecentInfo.size())
return m_arrRecentInfo.get(nIndex);
else
return null;
}
}
|
function transformKeys($data) {
$unwarpItem = [];
foreach ($data as $key => $value) {
if(preg_match('/Category([\d]+)CustomField(?<field_name>[\w\d]+)$/', $key, $match)) {
$key = $match["field_name"];
}
$unwarpItem[$key] = $value;
}
return $unwarpItem;
}
// Test
$data = [
"Category1CustomFieldName" => "John",
"Category2CustomFieldAge" => 25,
"OtherKey" => "Value"
];
$result = transformKeys($data);
print_r($result); |
<reponame>jiuyue8888/zbb
// import Vue from "vue/types/vue";
var MD5 = require('./md5.js');
const CryptoJS = require('crypto-js');
import { JSEncrypt } from 'jsencrypt'
import Vue from 'vue';
import router from "../router";
const vue = new Vue({
router
});
/*
* 自定义公共函数
*/
function tt() {
console.log('test');
}
// 获取当前时间戳
function getTimeStampTmp() {
var timestamp = (new Date()).getTime();
return timestamp;
}
function formatDate(now) {
var d = new Date(now);
var year=d.getFullYear(); //取得4位数的年份
var month=d.getMonth()+1; //取得日期中的月份,其中0表示1月,11表示12月
var date=d.getDate(); //返回日期月份中的天数(1到31)
var hour=d.getHours(); //返回日期中的小时数(0到23)
var minute=d.getMinutes(); //返回日期中的分钟数(0到59)
var second=d.getSeconds(); //返回日期中的秒数(0到59)
return year+"-"+month+"-"+date+" "+hour+":"+minute+":"+second;
}
function getMd5(timeStampTmp) {
var md5 = MD5.hex_md5(timeStampTmp + "ZTXWEIXINV006");
return md5;
}
function getIp() {
var url = '192.168.0.1';
return url;
}
function decodegTool(aesKey, aesData) {
let jse = new JSEncrypt();
// 设置rsa私钥
jse.setPrivateKey("<KEY>")
//解密出aes密钥
let decryptedKey = jse.decrypt(aesKey);
// 删除aesData中的换行符(\n)
aesData = aesData.replace(/\n/g, '');
let decrypt = CryptoJS.AES.decrypt(aesData, CryptoJS.enc.Utf8.parse(decryptedKey), {
mode: CryptoJS.mode.ECB,
padding: CryptoJS.pad.Pkcs7
});
console.log(decrypt);
let decryptedStr = decrypt.toString(CryptoJS.enc.Utf8);
return decryptedStr.toString();
}
function limitFileSize (file, limitSize) {
var that = this;
var arr = ["KB", "MB", "GB"]
var limit = limitSize.toUpperCase();
var limitNum = 0;
for (var i = 0; i < arr.length; i++) {
var leval = limit.indexOf(arr[i]);
if (leval > -1) {
limitNum = parseInt(limit.substr(0, leval)) * Math.pow(1024, (i + 1))
break
}
}
if (file.size > limitNum) {
console.log("文件偏大")
vue.$message({
showClose: true,
message: '图片最大支持上传1MB',
type: 'error'
})
return false;
}
return true
}
function encode(str){
// 对字符串进行编码
var encode = encodeURI(str);
// 对编码的字符串转化base64
var base64 = btoa(encode);
return base64;
};
export default {
tt: tt,
getTimeStampTmp: getTimeStampTmp,
getMd5: getMd5,
getIp: getIp,
decodegTool: decodegTool,
limitFileSize:limitFileSize,
encode:encode,
formatDate:formatDate
}
|
def find_closest_two_sum_pairs(arr):
# sort the given array
arr.sort()
min_diff = float('inf')
pairs = []
# consider each element in the array
for i in range(len(arr)):
for j in range(i+1, len(arr)):
diff = abs(arr[i] + arr[j] - (arr[i + 1] + arr[j - 1]))
if diff < min_diff:
min_diff = diff
pairs = [(arr[i], arr[j])]
elif diff == min_diff:
pairs.append((arr[i], arr[j]))
return pairs
# Test
arr = [-2, 4, 7, 8, 10]
pairs = find_closest_two_sum_pairs(arr)
print(pairs) # [(7, 8)] |
#!/bin/sh
try() {
luaenv local $1
cd cc
git checkout $2
cd ..
echo lua = $1
echo cc = $2
lua -v
unset key
read -rsp $'Press any key to continue...\n' -n1 key
lua cli.lua .
}
for lv in luajit-2.0.4 luajit-2.1.0-beta1; do
for cv in master 1.77 1.74 1.74pr17 1.74pr16 1.74pr14 1.74pr13 1.73 1.64 1.6 1.58 1.5 1.41; do
try $lv $cv
done
done
for lv in 5.2.1 5.2.3 5.3.2; do
for cv in master 1.77 1.74; do
try $lv $cv
done
done
|
import { container } from 'tsyringe';
import ICustomerRepository from '@modules/customer/Repositories/ICustomerRepository';
import CustomerRepository from '@modules/customer/infra/typeorm/repositories/CustomerRepository';
import IUsersRepository from '@modules/users/Repositories/IUsersRepository';
import UsersRepository from '@modules/users/infra/typeorm/repositories/users';
import IUpdateUsersRepository from '@modules/users/Repositories/IUpdateUsersRepository';
import UsersUpdateRepository from '@modules/users/infra/typeorm/repositories/usersUpdate';
import IUsersTokensRepository from '@modules/users/Repositories/IUserTokensRepository';
import UserTokensRepository from '@modules/users/infra/typeorm/repositories/UserTokensRepository';
import ICreateGroupRepository from '@modules/groups/repositories/groups';
import GroupRepository from '@modules/groups/infra/typeorm/repositories/Group';
import IOrderRepository from '@modules/customer/Repositories/IOrderRepository';
import OrderRepository from '@modules/customer/infra/typeorm/repositories/OrderRepository';
import IRestaurantRepository from '@modules/customer/Repositories/IRestaurantRepository';
import RestaurantRepository from '@modules/customer/infra/typeorm/repositories/RestaurantRepository';
import '@modules/users/providers';
import './providers';
container.registerSingleton<IOrderRepository>(
'OrderRepository',OrderRepository,
);
container.registerSingleton<IRestaurantRepository>(
'RestaurantRepository',RestaurantRepository,
);
container.registerSingleton<IUsersRepository>(
'UsersRepository',UsersRepository,
);
container.registerSingleton<IUpdateUsersRepository>(
'UsersUpdateRepository',UsersUpdateRepository,
);
container.registerSingleton<IUsersTokensRepository>(
'UserTokensRepository',UserTokensRepository,
);
container.registerSingleton<ICustomerRepository>(
'CustomerRepository',CustomerRepository,
);
|
#include <DTBFile.hpp>
namespace HighELF {
void DTBFile::Load(std::string filename) {
fileEndianness = Endianness::Big;
// TODO everything lol
}
} // namespace HighELF
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Reusable-iOS/Reusable.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Reusable-iOS/Reusable.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
package com.github.robindevilliers.welcometohell.steps;
import org.openqa.selenium.WebDriver;
import com.github.robindevilliers.cascade.annotations.Demands;
import com.github.robindevilliers.cascade.annotations.Narrative;
import com.github.robindevilliers.cascade.annotations.Step;
import com.github.robindevilliers.cascade.annotations.Then;
import static org.junit.Assert.assertEquals;
@Step({MentallyIll.class, YouCanGo.class, EnterHowIsHell.class, DoneTime.class})
@Narrative("Present escaped conclusion.")
public class Escaped {
@Demands
private WebDriver webDriver;
@Then
public void then() {
assertEquals("Welcome to Hell | Escape", webDriver.getTitle());
}
}
|
#!/bin/bash
if ! git remote -v | grep -q 'upstream'; then
# echo 'not found upstream'
git remote add upstream https://github.com/philipwalton/solved-by-flexbox.git
fi
git fetch upstream master
git checkout master
git merge upstream/master
|
def rgb_to_hex(r, g, b):
# Convert each color component to its two-digit hexadecimal representation
hex_r = format(r, '02X')
hex_g = format(g, '02X')
hex_b = format(b, '02X')
# Concatenate the hexadecimal representations and return the result
return f"#{hex_r}{hex_g}{hex_b}" |
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
touch log/stderror.err
ktools_monitor.sh $$ & pid0=$!
exit_handler(){
exit_code=$?
kill -9 $pid0 2> /dev/null
if [ "$exit_code" -gt 0 ]; then
echo 'Ktools Run Error - exitcode='$exit_code
else
echo 'Run Completed'
fi
set +x
group_pid=$(ps -p $$ -o pgid --no-headers)
sess_pid=$(ps -p $$ -o sess --no-headers)
script_pid=$$
printf "Script PID:%d, GPID:%s, SPID:%d
" $script_pid $group_pid $sess_pid >> log/killout.txt
ps -jf f -g $sess_pid > log/subprocess_list
PIDS_KILL=$(pgrep -a --pgroup $group_pid | awk 'BEGIN { FS = "[ \t\n]+" }{ if ($1 >= '$script_pid') print}' | grep -v celery | egrep -v *\\.log$ | egrep -v *\\.sh$ | sort -n -r)
echo "$PIDS_KILL" >> log/killout.txt
kill -9 $(echo "$PIDS_KILL" | awk 'BEGIN { FS = "[ \t\n]+" }{ print $1 }') 2>/dev/null
exit $exit_code
}
trap exit_handler QUIT HUP INT KILL TERM ERR EXIT
check_complete(){
set +e
proc_list="eve getmodel gulcalc fmcalc summarycalc eltcalc aalcalc leccalc pltcalc ordleccalc"
has_error=0
for p in $proc_list; do
started=$(find log -name "$p*.log" | wc -l)
finished=$(find log -name "$p*.log" -exec grep -l "finish" {} + | wc -l)
if [ "$finished" -lt "$started" ]; then
echo "[ERROR] $p - $((started-finished)) processes lost"
has_error=1
elif [ "$started" -gt 0 ]; then
echo "[OK] $p"
fi
done
if [ "$has_error" -ne 0 ]; then
false # raise non-zero exit code
fi
}
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
mkdir output/full_correlation/
rm -R -f fifo/*
mkdir fifo/full_correlation/
rm -R -f work/*
mkdir work/kat/
mkdir work/full_correlation/
mkdir work/full_correlation/kat/
mkfifo fifo/gul_P1
mkfifo fifo/gul_S1_summary_P1
mkfifo fifo/gul_S1_pltcalc_P1
mkfifo fifo/full_correlation/gul_P1
mkfifo fifo/full_correlation/gul_S1_summary_P1
mkfifo fifo/full_correlation/gul_S1_pltcalc_P1
# --- Do ground up loss computes ---
( pltcalc < fifo/gul_S1_pltcalc_P1 > work/kat/gul_S1_pltcalc_P1 ) 2>> log/stderror.err & pid1=$!
tee < fifo/gul_S1_summary_P1 fifo/gul_S1_pltcalc_P1 > /dev/null & pid2=$!
( summarycalc -m -i -1 fifo/gul_S1_summary_P1 < fifo/gul_P1 ) 2>> log/stderror.err &
# --- Do ground up loss computes ---
( pltcalc < fifo/full_correlation/gul_S1_pltcalc_P1 > work/full_correlation/kat/gul_S1_pltcalc_P1 ) 2>> log/stderror.err & pid3=$!
tee < fifo/full_correlation/gul_S1_summary_P1 fifo/full_correlation/gul_S1_pltcalc_P1 > /dev/null & pid4=$!
( summarycalc -m -i -1 fifo/full_correlation/gul_S1_summary_P1 < fifo/full_correlation/gul_P1 ) 2>> log/stderror.err &
( eve 1 20 | getmodel | gulcalc -S100 -L100 -r -j fifo/full_correlation/gul_P1 -a1 -i - > fifo/gul_P1 ) 2>> log/stderror.err &
wait $pid1 $pid2 $pid3 $pid4
# --- Do ground up loss kats ---
kat work/kat/gul_S1_pltcalc_P1 > output/gul_S1_pltcalc.csv & kpid1=$!
# --- Do ground up loss kats for fully correlated output ---
kat work/full_correlation/kat/gul_S1_pltcalc_P1 > output/full_correlation/gul_S1_pltcalc.csv & kpid2=$!
wait $kpid1 $kpid2
check_complete
exit_handler
|
#!/bin/sh
#find . -name "*.php" -print | xargs etags -
exec exctags \
--languages=PHP \
-h ".php" -R \
--exclude="\.git" \
--exclude="\.svn" \
--exclude="Incubator" \
--totals=yes \
--tag-relative=yes \
--PHP-kinds=+cdf \
--regex-PHP='/abstract class ([^ ]*)/\1/c/' \
--regex-PHP='/interface ([^ ]*)/\1/c/' \
--regex-PHP='/(public |static |abstract |protected |private )+function ([^ (]*)/\2/f/'
|
package com.jellehuibregtse.cah.cardservice.model;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.validation.constraints.NotNull;
import java.util.Objects;
@Entity
public class Card {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private long id;
@NotNull
private CardType cardType;
@NotNull
private String cardText;
public Card() {}
public Card(@NotNull CardType cardType, @NotNull String cardText) {
this.cardType = cardType;
this.cardText = cardText;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
var card = (Card) obj;
return Objects.equals(this.cardType, card.cardType) && Objects.equals(this.cardText, card.cardText);
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public CardType getCardType() {
return cardType;
}
public void setCardType(CardType cardType) {
this.cardType = cardType;
}
public String getCardText() {
return cardText;
}
public void setCardText(String cardText) {
this.cardText = cardText;
}
}
|
The Shunting-Yard algorithm is a better algorithm for converting an infix expression to a postfix expression. This algorithm first parses the expression from left to right, constructs a prefix expression and finally converts it to a postfix expression. |
package com.java.study.algorithm.zuo.bbasic.class_04;
public class Code_03_SuccessorNode{
} |
import React, { Component } from 'react';
class Avatar extends Component {
constructor(props) {
super(props);
this.state = {
name: '',
avatar: null
};
}
handleChange(event) {
const newState = {};
newState[event.target.name] = event.target.value;
this.setState(newState);
}
render() {
return (
<div>
<div>
<form>
<input type="text" name="name" onChange={this.handleChange.bind(this)} /><br />
<input type="text" name="avatar" onChange={this.handleChange.bind(this)} placeholder="Avatar URL" /><br />
</form>
</div>
<div>
Name: <span>{this.state.name}</span><br />
Avatar: <img src={this.state.avatar} alt="avatar" />
</div>
</div>
);
}
}
export default Avatar; |
package br.com.matheuslino.pacman;
import java.util.List;
import br.com.matheuslino.pacman.game.LabyrinthObjectVisitor;
public abstract class Player extends LabyrinthObject {
// Attributes
private Direction currentDirection;
private Coordinate initialCoordinate = new Coordinate(0,0);
Player(int x, int y) {
super(x, y);
this.initialCoordinate.changeCoordinates(x, y);
}
// Methods
public Coordinate getInitialCoordinate() {
return initialCoordinate;
}
public Direction getCurrentDirection() {
return currentDirection;
}
public void setCurrentDirection(Direction currentDirection) {
this.currentDirection = currentDirection;
}
@Override
public abstract void accept(LabyrinthObjectVisitor visitor);
}
|
package internal
import (
"fmt"
"strings"
"github.com/hashicorp/vault/api"
"gopkg.in/yaml.v2"
)
type SSHRole struct {
Name string `yaml:"name"`
Key_type string `yaml:"key_type"`
Default_user string `yaml:"default_user"`
Cidr_list []string `yaml:"cidr_list"`
Allowed_users []string `yaml:"allowed_users"`
Port int `yaml:"port"`
Excluded_cidr_list []string `yaml:"excluded_cidr_list"`
}
type SSHRoleContainer struct {
SSHRoleContainer []SSHRole `yaml:"sshroles"`
Client *api.Client `yaml:"-"`
}
func (r *SSHRoleContainer) add(sshRole SSHRole) []SSHRole {
r.SSHRoleContainer = append(r.SSHRoleContainer, sshRole)
return r.SSHRoleContainer
}
func (r *SSHRoleContainer) importYaml(yml []byte) error {
if err := yaml.Unmarshal(yml, r); err != nil {
return fmt.Errorf("Could not parse SSH yml, %s", err)
}
return nil
}
func (r *SSHRoleContainer) exists(s string) bool {
for _, sshrole := range r.SSHRoleContainer {
if sshrole.Name == s {
return true
}
}
return false
}
func (r *SSHRoleContainer) importVault() error {
c := r.Client.Logical()
rolesPath := "/ssh/roles"
roles, err := getList(c, rolesPath)
if err != nil {
return fmt.Errorf("Could not return list of roles, none installed? %v\n", err)
}
for _, roleName := range roles {
rolePath := rolesPath + "/" + roleName
data, err := c.Read(rolePath)
if err != nil {
return fmt.Errorf("Could not read role: %v\n", err)
}
content, err := yaml.Marshal(data.Data)
if err != nil {
return fmt.Errorf("Could not parse yaml")
}
m := make(map[interface{}]interface{})
if err := yaml.Unmarshal(content, &m); err != nil {
return fmt.Errorf("Could not unmarshal into map, %s", err)
}
var excluded_cidr_list []string
var allowed_users []string
var cidr_list []string
var port int
if m["excluded_cidr_list"] != nil {
excluded_cidr_list = strings.Split(m["excluded_cidr_list"].(string), ",")
}
if m["allowed_users"] != nil {
allowed_users = strings.Split(m["allowed_users"].(string), ",")
}
if m["cidr_list"] != nil {
cidr_list = strings.Split(m["cidr_list"].(string), ",")
}
if m["port"] != nil {
port = m["port"].(int)
}
_ = excluded_cidr_list
role := SSHRole{
Name: roleName,
Key_type: m["key_type"].(string),
Allowed_users: allowed_users,
Default_user: m["default_user"].(string),
Cidr_list: cidr_list,
Excluded_cidr_list: excluded_cidr_list,
Port: port,
}
r.add(role)
}
return nil
}
func (r *SSHRoleContainer) installAll() error {
for _, role := range r.SSHRoleContainer {
if err := r.add(role); err != nil {
return fmt.Errorf("Could not install role!: %v, %v", r, err)
}
}
return nil
}
func (r *SSHRoleContainer) installToVault(role SSHRole) error {
c := r.Client.Logical()
path := "/ssh/roles/" + role.Name
data := make(map[string]interface{})
data["key_type"] = role.Key_type
data["default_user"] = role.Default_user
data["allowed_users"] = strings.Join(role.Allowed_users, ",")
data["cidr_list"] = strings.Join(role.Cidr_list, ",")
data["excluded_cidr_list"] = strings.Join(role.Excluded_cidr_list, ",")
data["port"] = role.Port
if _, err := c.Write(path, data); err != nil {
return fmt.Errorf("Could not write role")
}
return nil
}
func (r *SSHRoleContainer) deleteFromVault(role SSHRole) error {
c := r.Client.Logical()
path := "/ssh/roles/" + role.Name
if _, err := c.Delete(path); err != nil {
return fmt.Errorf("Could not delete SSH Role: %v\n%v", role.Name, err)
}
return nil
}
|
<filename>services/server/src/models/middlewares/verifyUser.ts<gh_stars>0
import { NextFunction, Request, Response } from "express";
import { verify } from "jsonwebtoken";
import { getConnection } from "typeorm";
import { User } from "../../entity/User";
import { Payload } from "../../types/Payload";
export const verifyUser = async (
req: Request,
res: Response,
next: NextFunction
): Promise<void | Response> => {
if (!req.headers.authorization)
return res.status(400).send({ error: "Authorization needed" });
const authorization = req.headers.authorization.split(" ");
if (authorization.length !== 2)
return res.status(400).send({ error: "Authorization needed" });
const token = authorization[1];
try {
const { email } = verify(
token!,
process.env.ACCESS_TOKEN_SECRET!
) as Payload;
const dbConnection = getConnection();
const user = await dbConnection
.getRepository(User)
.findOne({ email, isVerified: true });
if (!user) return res.status(400).send({ error: "Not authorized" });
req.user = user;
return next();
} catch {
return res.status(400).send({ error: "Not authorized" });
}
};
|
<gh_stars>1-10
import axios, { AxiosInstance, AxiosPromise } from "axios";
export default class API {
key: string;
client: AxiosInstance;
constructor(apiKey: string) {
this.key = apiKey;
this.client = axios.create({
baseURL: "https://api.flipsidecrypto.com/api/v1",
params: { api_key: apiKey },
});
}
async _fetch(
method: string,
url: string,
params = {},
retryCount = 0,
retryMax = 1
): Promise<any> {
let res;
try {
res = await this.client.request({
url,
method,
params: params,
});
if (res.status >= 200 && res.status < 300) {
return { data: res.data, success: true };
}
} catch (e) {
console.log(
`Failed to fetch data from: "${url}". \nError message: "${e}"`
);
}
if (retryCount < retryMax) {
return await this._fetch("GET", url, params, retryCount + 1);
}
return { data: null, success: false };
}
async fetchAssetMetric(id: string, metric: string, days = 7) {
const sym = `${id}`.toUpperCase();
return await this._fetch("GET", `/assets/${sym}/metrics/${metric}`, {
change_over: days,
});
}
async fetchAssetMetrics(id: string) {
const sym = `${id}`.toUpperCase();
return await this._fetch("GET", `/assets/${sym}/metrics`);
}
async fetchFCASDistribution(fullDistribution: boolean = false) {
return await this._fetch("GET", `/metrics/FCAS/assets`, {
visual_distribution: !fullDistribution,
});
}
async fetchDynamic(id: string) {
return this._fetch("GET", `/widgets/dynamic/${id}`);
}
async fetchMetrics(payload: {
assets?: string[];
exclusions?: string[];
sort_by?: string;
sort_desc?: boolean;
page?: number;
size?: number;
metrics?: string[];
change_over?: number;
}) {
return await this.client.post(`/assets/metrics`, payload);
}
async fetchWidgetLinks(slug: WidgetLinksSlug): Promise<WidgetLinksResponse> {
return await this.client.get(`/widgets/${slug}/links`);
}
async fetchTimeseries(payload: APISeriesPayload) {
return await this.client.post("/timeseries", payload);
}
}
export type APISeries = {
symbol?: string;
asset_id?: number;
names: string[];
};
export type APISeriesPayload = {
start_date: string;
end_date: string;
series: APISeries[];
};
export type WidgetLinksSlug =
| "spectrum"
| "multi-table"
| "table"
| "score"
| "chart"
| "price-multi-table";
export type WidgetLinksLink = {
widget_id: string;
name: string;
link_html: string;
};
export type WidgetLinksResponse = {
data: WidgetLinksLink[];
};
|
<reponame>leSamo/vuln4shift-frontend<gh_stars>0
import { getRegistry } from '@redhat-cloud-services/frontend-components-utilities/Registry';
import promiseMiddleware from 'redux-promise-middleware';
import notificationsMiddleware from '@redhat-cloud-services/frontend-components-notifications/notificationsMiddleware';
import { notificationsReducer } from '@redhat-cloud-services/frontend-components-notifications/redux';
import CveListStore from './Reducers/CveListStore';
import ClusterListStore from './Reducers/ClusterListStore';
import CveDetailStore from './Reducers/CveDetailStore';
import ClusterDetailStore from './Reducers/ClusterDetailStore';
let registry;
export function init(...middleware) {
registry = getRegistry({}, [
promiseMiddleware,
notificationsMiddleware({ errorDescriptionKey: ['detail', 'stack'] }),
...middleware,
]);
registry.register({ notifications: notificationsReducer });
registry.register({ CveListStore });
registry.register({ ClusterListStore });
registry.register({ CveDetailStore });
registry.register({ ClusterDetailStore });
return registry;
}
|
#!/bin/bash
#SBATCH --job-name=/data/unibas/boittier/amide_graph_2
#SBATCH --nodes=1
#SBATCH --ntasks=1
#SBATCH --partition=short
#SBATCH --output=/data/unibas/boittier/amide_graph_2_%A-%a.out
hostname
# Path to scripts and executables
cubefit=/home/unibas/boittier/fdcm_project/mdcm_bin/cubefit.x
fdcm=/home/unibas/boittier/fdcm_project/fdcm.x
ars=/home/unibas/boittier/fdcm_project/ARS.py
# Variables for the job
n_steps=2
n_charges=24
scan_name=frame_
suffix=.chk
cubes_dir=/data/unibas/boittier/fdcm/amide_graph
output_dir=/data/unibas/boittier/amide_graph_2
frames=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/frames.txt
initial_fit=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/24_charges_refined.xyz
initial_fit_cube=/home/unibas/boittier/fdcm_project/mdcms/amide/model1/amide1.pdb.chk
start_frame=58
next_frame=99
acd=/home/unibas/boittier/fdcm_project/0_fit.xyz.acd
# Go to the output directory
mkdir -p $output_dir
cd $output_dir
start=$start_frame
next=$next_frame
dir='frame_'$start
mkdir -p $dir
cd $dir
# Do Initial Fit
# for initial fit
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
esp=$cubes_dir/$scan_name'0'$suffix'.p.cube'
dens=$cubes_dir/$scan_name'0'$suffix'.d.cube'
# adjust reference frame
python $ars -charges $initial_fit -pcube $initial_fit_cube.d.cube -pcube2 $esp -frames $frames -output 0_fit.xyz -acd $acd > $output_name.ARS.log
# do gradient descent fit
$fdcm -xyz 0_fit.xyz.global -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
# re-adjust to local
python $ars -charges $output_name -pcube $initial_fit_cube.d.cube -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -esp $esp -dens $dens -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
initial_fit=$output_name
cd ..
dir='frame_'$next
output_name=$output_dir/$dir/$dir'-'$start'-'$next'.xyz'
dens=$cubes_dir/$scan_name$next$suffix'.d.cube'
esp=$cubes_dir/$scan_name$next$suffix'.p.cube'
mkdir -p $dir
cd $dir
# Adjust reference frame
python $ars -charges $initial_fit -pcube $cubes_dir/$scan_name'0'$suffix'.d.cube' -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS.log
cp $output_name'.global' refined.xyz
$fdcm -xyz refined.xyz -dens $dens -esp $esp -stepsize 0.2 -n_steps $n_steps -learning_rate 0.5 -output $output_name > $output_name.GD.log
cp refined.xyz $next'_final.xyz'
# re-adjust to local
python $ars -charges $output_name -pcube $dens -pcube2 $dens -frames $frames -output $output_name -acd $acd > $output_name.ARS-2.log
# make a cube file for the fit
$cubefit -v -generate -dens $dens -esp $esp -xyz refined.xyz > $output_name.cubemaking.log
# do analysis
$cubefit -v -analysis -esp $esp -esp2 $n_charges'charges.cube' -dens $dens > $output_name.analysis.log
echo $PWD
|
var classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_rest_in_area =
[
[ "InternalEvaluate", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_rest_in_area.html#a3d893f5a24c43689d847d5b37c6870d6", null ],
[ "InternalReset", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_rest_in_area.html#a045d46de5663ee154b013cdebbda4711", null ],
[ "PostSetup", "classdroid_1_1_runtime_1_1_prototyping_1_1_evaluation_1_1_rest_in_area.html#a1789878869db13ddf8c2667c27b0dcb4", null ]
]; |
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Copyright (c) 2014-2015 The Dash developers
# Copyright (c) 2015-2017 The ORO developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Functions used by more than one test
function echoerr {
echo "$@" 1>&2;
}
# Usage: ExtractKey <key> "<json_object_string>"
# Warning: this will only work for the very-well-behaved
# JSON produced by orod, do NOT use it to try to
# parse arbitrary/nested/etc JSON.
function ExtractKey {
echo $2 | tr -d ' "{}\n' | awk -v RS=',' -F: "\$1 ~ /$1/ { print \$2}"
}
function CreateDataDir {
DIR=$1
mkdir -p $DIR
CONF=$DIR/oro.conf
echo "regtest=1" >> $CONF
echo "keypool=2" >> $CONF
echo "rpcuser=rt" >> $CONF
echo "rpcpassword=rt" >> $CONF
echo "rpcwait=1" >> $CONF
echo "walletnotify=${SENDANDWAIT} -STOP" >> $CONF
shift
while (( "$#" )); do
echo $1 >> $CONF
shift
done
}
function AssertEqual {
if (( $( echo "$1 == $2" | bc ) == 0 ))
then
echoerr "AssertEqual: $1 != $2"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# CheckBalance -datadir=... amount account minconf
function CheckBalance {
declare -i EXPECT="$2"
B=$( $CLI $1 getbalance $3 $4 )
if (( $( echo "$B == $EXPECT" | bc ) == 0 ))
then
echoerr "bad balance: $B (expected $2)"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# Use: Address <datadir> [account]
function Address {
$CLI $1 getnewaddress $2
}
# Send from to amount
function Send {
from=$1
to=$2
amount=$3
address=$(Address $to)
txid=$( ${SENDANDWAIT} $CLI $from sendtoaddress $address $amount )
}
# Use: Unspent <datadir> <n'th-last-unspent> <var>
function Unspent {
local r=$( $CLI $1 listunspent | awk -F'[ |:,"]+' "\$2 ~ /$3/ { print \$3 }" | tail -n $2 | head -n 1)
echo $r
}
# Use: CreateTxn1 <datadir> <n'th-last-unspent> <destaddress>
# produces hex from signrawtransaction
function CreateTxn1 {
TXID=$(Unspent $1 $2 txid)
AMOUNT=$(Unspent $1 $2 amount)
VOUT=$(Unspent $1 $2 vout)
RAWTXN=$( $CLI $1 createrawtransaction "[{\"txid\":\"$TXID\",\"vout\":$VOUT}]" "{\"$3\":$AMOUNT}")
ExtractKey hex "$( $CLI $1 signrawtransaction $RAWTXN )"
}
# Use: SendRawTxn <datadir> <hex_txn_data>
function SendRawTxn {
${SENDANDWAIT} $CLI $1 sendrawtransaction $2
}
# Use: GetBlocks <datadir>
# returns number of blocks from getinfo
function GetBlocks {
$CLI $1 getblockcount
}
|
<gh_stars>1-10
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Minismac2d(MakefilePackage):
"""Proxy Application. Solves the finite-differenced 2D incompressible
Navier-Stokes equations with Spalart-Allmaras one-equation
turbulence model on a structured body conforming grid.
"""
homepage = "http://mantevo.org"
url = "http://mantevo.org/downloads/releaseTarballs/miniapps/MiniSMAC2D/miniSMAC2D-2.0.tgz"
tags = ['proxy-app']
version('2.0', '1bb1a52cea21bc9162bf7a71a6ddf37d')
depends_on('mpi')
parallel = False
@property
def build_targets(self):
targets = [
'CPP=cpp',
'FC={0}'.format(self.spec['mpi'].mpifc),
'LD={0}'.format(self.spec['mpi'].mpifc),
'MPIDIR=-I{0}'.format(self.spec['mpi'].headers.directories[0]),
'CPPFLAGS=-P -traditional -DD_PRECISION',
'FFLAGS=-O3 -c -g -DD_PRECISION',
'LDFLAGS=-O3',
'--file=Makefile_mpi_only'
]
return targets
def edit(self, spec, prefix):
# Editing input file to point to installed data files
param_file = FileFilter('smac2d.in')
param_file.filter('bcmain_directory=.*', "bcmain_directory='.'")
param_file.filter('bcmain_filename=.*',
"bcmain_filename='bcmain.dat_original_119x31'")
param_file.filter('xygrid_directory=.*', "xygrid_directory='.'")
param_file.filter('xygrid_filename=.*',
"xygrid_filename='xy.dat_original_119x31'")
def install(self, spec, prefix):
# Manual Installation
mkdirp(prefix.bin)
mkdirp(prefix.doc)
install('smac2d_mpi_only', prefix.bin)
install('bcmain.dat_original_119x31', prefix.bin)
install('xy.dat_original_119x31', prefix.bin)
install('smac2d.in', prefix.bin)
install('README.txt', prefix.doc)
|
import "./Title.css"
export default function Title(){
return(
<div className="Title">
<div className="trai">
<h1>Học tập tr<NAME>ến</h1>
</div>
<div className="phai">
<button>Login</button>
<button>Sign up</button>
</div>
</div>
)
} |
export $(cat env/env-${HW_ENV})
export SW_ENV=dev
docker-compose --project-name calvincaulfield-bench "$@" |
<reponame>wolverineks/react-query<filename>src/react/useBaseQuery.js
import React from 'react'
//
import { useQueryCache } from './ReactQueryCacheProvider'
import { useMountedCallback } from './utils'
export function useBaseQuery(queryKey, config = {}) {
// Make a rerender function
const rerender = useMountedCallback(React.useState()[1])
// Get the query cache
const queryCache = useQueryCache()
// Build the query for use
const query = queryCache.buildQuery(queryKey, config)
// Create a query instance ref
const instanceRef = React.useRef()
// Subscribe to the query when the subscribe function changes
React.useEffect(() => {
instanceRef.current = query.subscribe(() => rerender({}))
// Unsubscribe when things change
return instanceRef.current.unsubscribe
}, [query, rerender])
// Always update the config
React.useEffect(() => {
instanceRef.current.updateConfig(config)
})
const enabledBool = Boolean(config.enabled)
// Run the instance when the query or enabled change
React.useEffect(() => {
if (enabledBool && query) {
// Just for change detection
}
instanceRef.current.run()
}, [enabledBool, query])
return {
...query,
...query.state,
query,
}
}
|
<filename>Demo/Classes/DemoAppDelegate.h
//
// DemoAppDelegate.h
// Demo
//
// Created by <NAME> on 10/22/10.
// Copyright 2010 Results Direct. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Results Direct nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#import <UIKit/UIKit.h>
@class DemoViewController;
@interface DemoAppDelegate : NSObject <UIApplicationDelegate>
@property (nonatomic, strong) IBOutlet UIWindow *window;
@property (nonatomic, strong) IBOutlet DemoViewController *viewController;
@end
|
# coding=utf-8
import pytest
import logging
from data_packer import checker
from data_packer import err
from data_packer.err import DataPackerCheckError
from _common import verify
log = logging.getLogger()
class TestIPChecker:
"""
testcase, see also: https://en.wikipedia.org/wiki/Module:IPAddress/testcases
"""
valid_ipv4 = [
'192.168.127.12',
'255.255.255.255',
'0.0.0.0',
]
invalid_ipv4 = [
' 192.168.127.12', # whitespace not currently allowed
'192.168.127.12 ', # whitespace not currently allowed
'200.200.256.200',
'192.168.127.12.',
'200.200.200',
'200.200.200.2d0',
# '00.00.00.00', # according to talkpage, leading zeroes unacceptable.
# '100.100.020.100', # according to talkpage, leading zeroes unacceptable.
'-1.0.0.0',
'200000000000000000000000000000000000000000000000000000000000000000000000000000.200.200.200',
'00000000000005.10.10.10',
]
valid_ipv6 = [
'::', # unassigned IPv6 address
'::1', # loopback IPv6 address
'0::', # another name for unassigned IPv6 address
'0::0', # another name for unassigned IPv6 address
'fc00:db20:35b:7399::5', # full length
'fc00:db20:35b:7399::5', # lowercase
'fc00:db20:35b:7399::5', # mixed case
'fc00:db20:35b:7399::5', # abbreviated
'fc00:e968:6179::de52:7100:8CFD', # correct use of ::
]
invalid_ipv6 = [
'00AB:00002:3008:8CFD:00AB:0002:3008:8CFD', # at most 4 digits per segment
':0002:3008:8CFD:00AB:0002:3008:8CFD', # can't remove all 0s from first segment unless using ::
'00AB:0002:3008:8CFD:00AB:0002:3008:', # can't remove all 0s from last segment unless using ::
'AB:02:3008:8CFD:AB:02:3008:8CFD:02', # too long
'fc00:e968:6179::de52:7100:8CFD:02', # too long
'AB:02:3008:8CFD::02::8CFD', # can't have two ::s
'GB:02:3008:8CFD:AB:02:3008:8CFD', # Invalid character G
'2:::3', # illegal: three colons
]
def test_valid_ipv4(self):
for ip in self.valid_ipv4:
log.debug('now check ip: %s', ip)
checker.ipv4_checker.verify('', '', ip)
def test_invalid_ipv4(self):
for ip in self.invalid_ipv4:
log.debug('now check ip: %s', ip)
with pytest.raises(DataPackerCheckError):
checker.ipv4_checker.verify('', '', ip)
def test_valid_ipv6(self):
for ip in self.valid_ipv6:
log.debug('now check ip: %s', ip)
checker.ipv6_checker.verify('', '', ip)
def test_invalid_ipv6(self):
for ip in self.invalid_ipv6:
log.debug('now check ip: %s', ip)
with pytest.raises(DataPackerCheckError):
checker.ipv6_checker.verify('', '', ip)
class TestEmailChecker:
"""
test case, see also: https://blogs.msdn.microsoft.com/testing123/2009/02/06/email-address-test-cases/
"""
valid_emails = [
'<EMAIL>', # Valid email
'<EMAIL>', # Email contains dot in the address field
'<EMAIL>', # Email contains dot with subdomain
'<EMAIL>', # Plus sign is considered valid character
'email@192.168.3.11', # Domain is valid IP address
'email@[123.123.123.123]', # Square bracket around IP address is considered valid
# '“<EMAIL>', # Quotes around email is considered valid
'123456<EMAIL>', # Digits in address are valid
'<EMAIL>', # Dash in domain name is valid
'_______<EMAIL>', # Underscore in the address field is valid
'<EMAIL>', # '.name is valid Top Level Domain name
'<EMAIL>', # Dot in Top Level Domain name also considered valid (use co.jp as example here)
'<EMAIL>', # Dash in address field is valid
'email@111.222.333.44444', # Invalid IP format, BUT can be gregard as valid domain
]
invalid_emails = [
'plainaddress', # Missing @ sign and domain
'#@%^%#$@#$@#.com', # Garbage
'@domain.com', # Missing username
'<NAME> <<EMAIL>>', # Encoded html within email is invalid
'email.domain.com', # Missing @
'<EMAIL>', # Two @ sign
<EMAIL>', # Leading dot in address is not allowed
'<EMAIL>', # Trailing dot in address is not allowed
'email..<EMAIL>', # Multiple dots
'あいうえ<EMAIL>', # Unicode char as address
'<EMAIL> (<NAME>)', # Text followed email is not allowed
'email@domain', # Missing top level domain (.com/.net/.org/etc)
'<EMAIL>@-<EMAIL>', # Leading dash in front of domain is invalid
# '<EMAIL>', # '.web is not a valid top level domain
'<EMAIL>', # Multiple dot in the domain portion is invalid
]
def test_valid_email(self):
for email in self.valid_emails:
log.debug('Current test valid email: %s', email)
checker.email_checker.verify('', '', email)
def test_invalid_email(self):
for email in self.invalid_emails:
with pytest.raises(DataPackerCheckError):
log.debug('Current test invalid email: %s', email)
checker.email_checker.verify('', '', email)
class TestURLChecker:
valid_urls = [
'http://192.168.0.1',
'HTTP://192.168.0.1',
'hTTp://192.168.0.1',
'HttP://192.168.0.1',
'https://192.168.0.1',
'ftp://192.168.0.1',
'ftps://192.168.0.1',
'http://example.com/path#fragment',
'http://example.com/path?a=b#fragment',
'http://example.com/path?key=value#fragment',
'http://example.com/path?key=value&a=b#fragment',
'http://example.com/?z=1&a=1&k=1&d=1',
'http://example.com',
]
invalid_urls = [
'hiwpefhipowhefopw', # MissingSchema
'localhost:3128', # InvalidSchema
'localhost.localdomain:3128/', # InvalidSchema
'10.122.1.1:3128/', # InvalidSchema
'http://', # InvalidURL
'http://0.0.0.0/get/test case',
]
def test_valid_url(self):
for url in self.valid_urls:
log.debug('now test url: %s', url)
checker.url_checker.verify('', '', url)
def test_invalid_url(self):
for url in self.invalid_urls:
with pytest.raises(DataPackerCheckError):
log.debug('now test url: %s', url)
checker.url_checker.verify('', '', url)
class TestTextChecker:
def test_valid(self):
ck = checker.text_checker
verify(ck, 'hello')
verify(ck, '你好')
verify(ck, u'hello')
verify(ck, u'你好')
def test_invalid(self):
ck = checker.text_checker
invalid_values = [
1, object(), [], {}, (1,), 1.0
]
for value in invalid_values:
with pytest.raises(err.DataPackerCheckError):
verify(ck, value)
|
package com.createchance.imageeditor.transitions;
import com.createchance.imageeditor.drawers.RadialTransDrawer;
/**
* Radial transition.
*
* @author createchance
* @date 2019/1/1
*/
public class RadialTransition extends AbstractTransition {
private static final String TAG = "RadialTransition";
private float mSmoothness = 1.0f;
public RadialTransition() {
super(RadialTransition.class.getSimpleName(), TRANS_RADIAL);
}
@Override
protected void getDrawer() {
mDrawer = new RadialTransDrawer();
}
@Override
protected void setDrawerParams() {
super.setDrawerParams();
((RadialTransDrawer) mDrawer).setSmoothness(mSmoothness);
}
}
|
def reverseBitwiseAnd(result: int) -> int:
return result | 0xffffff00 # Performing bitwise OR with 0xffffff00 to set the higher 24 bits to 1, effectively reversing the original bitwise AND operation |
<reponame>modax/ssh-vault<gh_stars>100-1000
package sshvault
import "bytes"
// Encode return base64 string with line break every 64 chars
func (v *vault) Encode(b string, n int) []byte {
a := []rune(b)
var buffer bytes.Buffer
for i, r := range a {
buffer.WriteRune(r)
if i > 0 && (i+1)%64 == 0 {
buffer.WriteRune('\n')
}
}
return buffer.Bytes()
}
|
class EnrichedAirCalculator {
func bestBlend(for depth: Int, fractionOxygen: Double) throws -> String {
guard depth >= 0 else {
throw EnrichedAirCalculationError.invalidDepth
}
guard fractionOxygen >= 0 && fractionOxygen <= 1 else {
throw EnrichedAirCalculationError.invalidFractionOxygen
}
// Calculate the best blend of enriched air based on the depth and fraction of oxygen
// Your calculation logic here
return "Best blend calculated for depth \(depth) meters and fraction of oxygen \(fractionOxygen)"
}
}
enum EnrichedAirCalculationError: Error {
case invalidDepth
case invalidFractionOxygen
} |
git config --global user.email "victor.alveflo@gmail.com"
git config --global user.name "Travis CI"
cd app
rm -rf .git
cd _site
git init
git add --all
git commit -m "Travis CI deploy (Build $TRAVIS_BUILD_NUMBER)"
git push --force https://${TOKEN}@github.com/alveflo/alveflo.github.io.git master |
#!/bin/bash
# Copyright Istio Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
WD=$(dirname "$0")
WD=$(cd "$WD"; pwd)
DIRNAME="${WD}/tmp"
mkdir -p "${DIRNAME}"
export GO111MODULE=on
case "${OSTYPE}" in
darwin*) export ARCH_SUFFIX="${ARCH_SUFFIX:-osx}" ;;
linux*) export ARCH_SUFFIX="${ARCH_SUFFIX:-linux-amd64}" ;;
*) echo "unsupported: ${OSTYPE}" ;;
esac
# We support many ways to reference the source to install from
# This logic ultimately sets up the following variables:
# * OUT_FILE: where the download will be stored. Note this is cached.
# * RELEASE_URL: where to download the release from.
# Passing a tag, like latest or 1.4-dev
if [[ -n "${TAG:-}" ]]; then
VERSION=$(curl -sL https://gcsweb.istio.io/gcs/istio-build/dev/"${TAG}")
OUT_FILE="istio-${VERSION}"
RELEASE_URL="https://storage.googleapis.com/istio-build/dev/${VERSION}/istio-${VERSION}-${ARCH_SUFFIX}.tar.gz"
# Passing a dev version, like 1.4-alpha.41dee99277dbed4bfb3174dd0448ea941cf117fd
elif [[ -n "${DEV_VERSION:-}" ]]; then
OUT_FILE="istio-${DEV_VERSION}"
RELEASE_URL="https://storage.googleapis.com/istio-build/dev/${DEV_VERSION}/istio-${DEV_VERSION}-${ARCH_SUFFIX}.tar.gz"
# Passing a version, like 1.4.2
elif [[ -n "${VERSION:-}" ]]; then
OUT_FILE="istio-${VERSION}"
RELEASE_URL="https://storage.googleapis.com/istio-prerelease/prerelease/${VERSION}/istio-${VERSION}-${ARCH_SUFFIX}.tar.gz"
# Passing a release url, like https://storage.googleapis.com/istio-prerelease/prerelease/1.4.1/istio-1.4.1-linux-amd64.tar.gz
elif [[ -n "${RELEASE_URL:-}" ]]; then
OUT_FILE=${OUT_FILE:-"$(basename "${RELEASE_URL}" "-${ARCH_SUFFIX}.tar.gz")"}
# Passing a gcs url, like gs://istio-build/dev/1.4-alpha.41dee99277dbed4bfb3174dd0448ea941cf117fd
elif [[ -n "${GCS_URL:-}" ]]; then
RELEASE_URL="${GCS_URL}"
OUT_FILE=${OUT_FILE:-"$(basename "${RELEASE_URL}" "-${ARCH_SUFFIX}.tar.gz")"}
fi
if [[ -z "${RELEASE_URL:-}" ]]; then
echo "Must set one of TAG, VERSION, DEV_VERSION, RELEASE_URL, GCS_URL"
exit 2
fi
function download_release() {
outfile="${DIRNAME}/${OUT_FILE}"
if [[ ! -d "${outfile}" ]]; then
tmp=$(mktemp -d)
if [[ "${RELEASE_URL}" == gs://* ]]; then
gsutil cp "${RELEASE_URL}" "${tmp}/out.tar.gz"
tar xvf "${tmp}/out.tar.gz" -C "${DIRNAME}"
else
curl -fJLs -o "${tmp}/out.tar.gz" "${RELEASE_URL}"
tar xvf "${tmp}/out.tar.gz" -C "${DIRNAME}"
fi
else
echo "${outfile} already exists, skipping download"
fi
}
function install_istioctl() {
release=${1:?release folder}
shift
"${release}/bin/istioctl" manifest apply --skip-confirmation -d "${release}/manifests" "${@}"
}
function install_extras() {
local domain=${DNS_DOMAIN:-"DNS_DOMAIN like v104.qualistio.org"}
kubectl create namespace istio-prometheus || true
# Deploy the gateways and prometheus operator.
# We install the prometheus operator first, then deploy the CR, to wait for the CRDs to get created
helm template --set domain="${domain}" --set prometheus.deploy=false "${WD}/base" | kubectl apply -f -
# Check CRD
CMDs_ARR=('kubectl get crds/prometheuses.monitoring.coreos.com' 'kubectl get crds/alertmanagers.monitoring.coreos.com'
'kubectl get crds/podmonitors.monitoring.coreos.com' 'kubectl get crds/prometheusrules.monitoring.coreos.com'
'kubectl get crds/servicemonitors.monitoring.coreos.com')
for CMD in "${CMDs_ARR[@]}"
do
MAXRETRIES=0
until $CMD || [ $MAXRETRIES -eq 60 ]
do
MAXRETRIES=$((MAXRETRIES + 1))
sleep 5
done
if [[ $MAXRETRIES -eq 60 ]]; then
echo "crds were not created successfully"
exit 1
fi
done
# Redeploy, this time with the Prometheus resource created
helm template --set domain="${domain}" "${WD}/base" | kubectl apply -f -
# Also deploy relevant ServiceMonitors
"${release}/bin/istioctl" manifest generate --set profile=empty --set addonComponents.prometheusOperator.enabled=true -d "${release}/manifests" | kubectl apply -f -
}
download_release
install_istioctl "${DIRNAME}/${OUT_FILE}" "${@}"
if [[ -z "${SKIP_EXTRAS:-}" ]]; then
install_extras
fi
|
/*
* Copyright 2019 Wultra s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.getlime.security.powerauth.app.tppengine.repository.model.entity;
import io.getlime.security.powerauth.app.tppengine.model.enumeration.ConsentChange;
import javax.persistence.*;
import java.io.Serializable;
import java.util.Date;
/**
* Database entity representing a historic event of consent approval or rejection,
* useful mainly for auditing purposes and for informing the end user.
*
* @author <NAME>, <EMAIL>
*/
@Entity
@Table(name = "tpp_user_consent_history")
public class UserConsentHistoryEntity implements Serializable {
private static final long serialVersionUID = 6697728608700209704L;
@Id
@SequenceGenerator(name = "tpp_user_consent_history", sequenceName = "tpp_user_consent_history_seq")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "tpp_user_consent_history")
@Column(name = "id", nullable = false)
private Long id;
@Column(name = "user_id")
private String userId;
@Column(name = "client_id")
private String clientId;
@Column(name = "consent_id")
private String consentId;
@Column(name = "consent_change")
@Enumerated(EnumType.STRING)
private ConsentChange change;
@Column(name = "external_id")
private String externalId;
@Column(name = "consent_parameters")
private String parameters;
@Column(name = "timestamp_created")
private Date timestampCreated;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getConsentId() {
return consentId;
}
public void setConsentId(String consentId) {
this.consentId = consentId;
}
public ConsentChange getChange() {
return change;
}
public void setChange(ConsentChange change) {
this.change = change;
}
public String getExternalId() {
return externalId;
}
public void setExternalId(String externalId) {
this.externalId = externalId;
}
public String getParameters() {
return parameters;
}
public void setParameters(String parameters) {
this.parameters = parameters;
}
public Date getTimestampCreated() {
return timestampCreated;
}
public void setTimestampCreated(Date timestampCreated) {
this.timestampCreated = timestampCreated;
}
}
|
import React from 'react'
import {Link, withRouter} from 'react-router-dom'
import {connect} from 'react-redux'
const mapState = (state, ownProps) => {
return {
isLoggedIn: !!state.user.id,
...ownProps
}
}
// AuthLink: works just like a Link, only it's smarter about
// the logged in user. Only logged in users should see these
// types of links. We could even extend this pattern to include
// other kinds of permissions as well!
export const AuthLink = ({isLoggedIn, to, children}) => {
return isLoggedIn && <Link to={to}>{children}</Link>
}
export default withRouter(connect(mapState)(AuthLink))
|
package version
import (
"fmt"
"runtime"
)
var (
// GitVersion returns the git version
GitVersion = "UNKNOWN"
// BuildDate returns the build date
BuildDate = "UNKNOWN"
// GitCommit returns the short sha from git
GitCommit = "UNKNOWN"
)
// version returns information about the release.
func Version() string {
return fmt.Sprintf(`-------------------------------------------------------------------------------
Meliodas cni plugin
GitVersion: %v
GitCommit: %v
BuildDate: %v
Go: %v
-------------------------------------------------------------------------------
`, GitVersion, GitCommit, BuildDate, runtime.Version())
} |
#!/bin/bash
## output usage
usage () {
echo "usage: github-events [-h]"
echo " or: github-events"
echo " or: github-events <user|org>"
echo " or: github-events <user|org>/<repo> [-n|--network] [-o|--org]"
return 0
}
## main
github_events () {
return 0
}
## export
if [[ ${BASH_SOURCE[0]} != $0 ]]; then
export -f github_events
else
github_events "${@}"
exit $?
fi
|
<reponame>Garciaj007/AEngine
#pragma once
class Window
{
public:
Window();
~Window();
Window(const Window&) = delete;
Window(Window&&) = delete;
Window& operator=(const Window&) = delete;
Window& operator=(Window&&) = delete;
// ====================================================
bool Create(std::string, std::pair<GLuint, GLuint>);
// ====================================================
[[nodiscard]] inline GLFWwindow* GetWindow() const { return window; }
[[nodiscard]] inline GLuint GetWidth() const { return width; }
[[nodiscard]] inline GLuint GetHeight() const { return height; }
[[nodiscard]] inline void SetDimensions(std::pair<GLuint, GLuint> dimensions)
{
width = dimensions.first; height = dimensions.second;
}
inline void SetVSync(bool vsyncEnabled) const
{
if (vsyncEnabled) glfwSwapInterval(1); else glfwSwapInterval(0);
}
private:
GLuint width, height;
GLFWwindow* window;
std::string name;
};
|
class ReadmeModel:
def __init__(self,
project_name: str =None,
version: str =None,
description: str=None,
snap_store_name: str=None,
icon_src: str=None,
screenshot_src: dict=None,
author: str=None,
github_username: str=None,
author_website: str=None,
author_linkedin_username: str=None,
author_twitter_username: str=None,
homepage: str=None,
project_demo_url: str=None,
repository_url: str=None,
contributing_url: str=None,
documentation_url: str=None,
license_url: str=None,
issues_url: str=None,
license: str=None,
install_command: list=None,
usage_command: list=None,
test_command: list=None,
repository: dict=None,
credits: list=None,
is_github_repos: bool=None,
has_start_command: bool=None,
has_test_command: bool=None
):
self.project_name = project_name
self.version = version
self.description = description
self.snap_store_name = snap_store_name
self.icon_src = icon_src
self.screenshot_src = screenshot_src
self.author = author
self.github_username = github_username
self.author_linkedin_username = author_linkedin_username
self.author_twitter_username = author_twitter_username
self.author_website = author_website
self.homepage = homepage
self.project_demo_url = project_demo_url
self.repository_url = repository_url
self.contributing_url = contributing_url
self.documentation_url = documentation_url
self.license_url = license_url
self.issues_url = issues_url
self.license = license
self.install_command = install_command
self.usage_command = usage_command
self.test_command = test_command
self.repository = repository
self.credits = credits
self.is_github_repos = is_github_repos
self.has_start_command = has_start_command
self.has_test_command = has_test_command
def to_json(self):
return {
'project_name': self.project_name,
'version': self.version,
'description': self.description,
'snap_store_name': self.snap_store_name,
'icon_src': self.icon_src,
'screenshot_src': self.screenshot_src,
'author': self.author,
'github_username': self.github_username,
'author_linkedin_username': self.author_linkedin_username,
'author_twitter_username': self.author_twitter_username,
'author_website': self.author_website,
'homepage': self.homepage,
'project_demo_url': self.project_demo_url,
'repository_url': self.repository_url,
'contributing_url': self.contributing_url,
'documentation_url': self.documentation_url,
'license_url': self.license_url,
'issues_url': self.issues_url,
'license': self.license,
'install_command': self.install_command,
'usage_command': self.usage_command,
'test_command': self.test_command,
'repository': self.repository,
'credits': self.credits,
'is_github_repos': self.is_github_repos,
'has_start_command': self.has_start_command,
'has_test_command': self.has_test_command
}
@staticmethod
def from_json(data: dict):
return ReadmeModel(
project_name=data.get("project_name", None),
version=data.get("version", None),
description=data.get("description", None),
snap_store_name=data.get("snap_store_name", None),
icon_src=data.get("icon_src", None),
screenshot_src=data.get("screenshot_src", None),
author=data.get("author", None),
github_username=data.get("github_username", None),
author_linkedin_username=data.get("author_linkedin_username", None),
author_twitter_username=data.get("author_twitter_username", None),
author_website=data.get("author_website", None),
homepage=data.get("homepage", None),
project_demo_url=data.get("project_demo_url", None),
repository_url=data.get("repository_url", None),
contributing_url=data.get("contributing_url", None),
documentation_url=data.get("documentation_url", None),
license_url=data.get("license_url", None),
issues_url=data.get("issues_url", None),
license=data.get("license", None),
install_command=data.get("install_command", None),
usage_command=data.get("usage_command", None),
test_command=data.get("test_command", None),
repository=data.get("repository", None),
credits=data.get("credits", None),
is_github_repos=data.get("is_github_repos", None),
has_start_command=data.get("has_start_command", None),
has_test_command=data.get("has_test_command", None)
)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_emoji_emotions = void 0;
var ic_emoji_emotions = {
"viewBox": "0 0 24 24",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24"
},
"children": []
}]
}]
}, {
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M11.99,2C6.47,2,2,6.48,2,12c0,5.52,4.47,10,9.99,10C17.52,22,22,17.52,22,12C22,6.48,17.52,2,11.99,2z M8.5,8 C9.33,8,10,8.67,10,9.5S9.33,11,8.5,11S7,10.33,7,9.5S7.67,8,8.5,8z M12,18c-2.28,0-4.22-1.66-5-4h10C16.22,16.34,14.28,18,12,18z M15.5,11c-0.83,0-1.5-0.67-1.5-1.5S14.67,8,15.5,8S17,8.67,17,9.5S16.33,11,15.5,11z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M11.99,2C6.47,2,2,6.48,2,12c0,5.52,4.47,10,9.99,10C17.52,22,22,17.52,22,12C22,6.48,17.52,2,11.99,2z M8.5,8 C9.33,8,10,8.67,10,9.5S9.33,11,8.5,11S7,10.33,7,9.5S7.67,8,8.5,8z M12,18c-2.28,0-4.22-1.66-5-4h10C16.22,16.34,14.28,18,12,18z M15.5,11c-0.83,0-1.5-0.67-1.5-1.5S14.67,8,15.5,8S17,8.67,17,9.5S16.33,11,15.5,11z"
},
"children": []
}]
}]
}]
};
exports.ic_emoji_emotions = ic_emoji_emotions; |
import { interfaces } from 'inversify';
import { Component } from './component';
export const Rpc = (id: interfaces.ServiceIdentifier<any>) => Component({ id, rpc: true, proxy: true });
|
export * from './favorite-dish.model';
export * from './dish.model';
export * from './dish-rating.model';
|
<reponame>mighteejim/manager<gh_stars>0
import React, { PropTypes } from 'react';
import SecondaryButton from './SecondaryButton';
export default function CancelButton(props) {
return (
<SecondaryButton {...props} >
{props.children}
</SecondaryButton>
);
}
CancelButton.propTypes = {
children: PropTypes.node,
};
CancelButton.defaultProps = {
children: 'Cancel',
};
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the LICENSE
* file in the root directory of this source tree.
*/
#include "ProxyBase.h"
#include "mcrouter/CarbonRouterInstanceBase.h"
#include "mcrouter/config-impl.h"
#include "mcrouter/config.h"
#include "mcrouter/options.h"
namespace facebook {
namespace memcache {
namespace mcrouter {
FOLLY_TLS bool ProxyBase::isProxyThread_{false};
const McrouterOptions& ProxyBase::getRouterOptions() const {
return router_.opts();
}
folly::fibers::FiberManager::Options ProxyBase::getFiberManagerOptions(
const McrouterOptions& opts) {
folly::fibers::FiberManager::Options fmOpts;
fmOpts.stackSize = opts.fibers_stack_size;
fmOpts.recordStackEvery = opts.fibers_record_stack_size_every;
fmOpts.maxFibersPoolSize = opts.fibers_max_pool_size;
fmOpts.useGuardPages = opts.fibers_use_guard_pages;
fmOpts.fibersPoolResizePeriodMs = opts.fibers_pool_resize_period_ms;
return fmOpts;
}
void ProxyBase::FlushCallback::runLoopCallback() noexcept {
// Always reschedlue until the end of event loop.
if (!rescheduled_) {
rescheduled_ = true;
proxy_.eventBase().getEventBase().runInLoop(this, true /* thisIteration */);
return;
}
rescheduled_ = false;
auto cbs = std::move(flushList_);
while (!cbs.empty()) {
folly::EventBase::LoopCallback* callback = &cbs.front();
cbs.pop_front();
callback->runLoopCallback();
}
}
} // mcrouter
} // memcache
} // facebook
|
#!/bin/sh
#
#Copyright (c) 2021, Oracle and/or its affiliates.
#
#Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
if type dnf > /dev/null 2>&1; then
echo packageManager=DNF
elif type yum > /dev/null 2>&1; then
echo packageManager=YUM
elif type microdnf > /dev/null 2>&1; then
echo packageManager=MICRODNF
elif type apt-get > /dev/null 2>&1; then
echo packageManager=APTGET
elif type apk > /dev/null 2>&1; then
echo packageManager=APK
elif type zypper > /dev/null 2>&1; then
echo packageManager=ZYPPER
else
echo packageManager=NONE
fi
if [ -n "$JAVA_HOME" ]; then
echo javaHome="$JAVA_HOME"
javaVersion="$("$JAVA_HOME"/bin/java -version 2>&1 | awk -F '\"' '/version/ {print $2}')"
else
javaVersion="$(java -version 2>&1 | awk -F '\"' '/version/ {print $2}')"
fi
if [ -n "$javaVersion" ]; then
echo javaVersion="$javaVersion"
fi
if [ -n "$DOMAIN_HOME" ]; then
echo domainHome="$DOMAIN_HOME"
if [ ! -d "$DOMAIN_HOME" ] || [ -z "$(ls -A "$DOMAIN_HOME")" ]; then
echo wdtModelOnly=true
fi
fi
if [ -n "$WDT_MODEL_HOME" ]; then
echo wdtModelHome="$WDT_MODEL_HOME"
fi
if [ -n "$WDT_HOME" ]; then
echo wdtHome="$WDT_HOME"
echo wdtVersion="$(sed 's/.* //' "$WDT_HOME"/weblogic-deploy/VERSION.txt )"
elif [ -f "/u01/wdt/weblogic-deploy/VERSION.txt" ]; then
echo wdtHome="/u01/wdt"
echo wdtVersion="$(sed 's/.* //' /u01/wdt/weblogic-deploy/VERSION.txt)"
fi
if [ -n "$ORACLE_HOME" ]; then
echo oracleHome="$ORACLE_HOME"
if [ -n "$JAVA_HOME" ]; then
echo wlsVersion="$("$JAVA_HOME"/bin/java -cp "$ORACLE_HOME"/wlserver/server/lib/weblogic.jar weblogic.version 2> /dev/null | grep -oE -m 1 '([[:digit:]\.]+)' | head -1)"
fi
echo oracleHomeUser="$(stat -c '%U' "$ORACLE_HOME")"
echo oracleHomeGroup="$(stat -c '%G' "$ORACLE_HOME")"
echo opatchVersion="$("$ORACLE_HOME"/OPatch/opatch version 2> /dev/null | grep -oE -m 1 '([[:digit:]\.]+)')"
echo oraclePatches="$("$ORACLE_HOME"/OPatch/opatch lsinventory |
awk 'BEGIN { ORS=";" }
/^Unique Patch ID/ { print $4 }
/^Patch description/ {
x = substr($0, 21)
print x
descriptionNeeded = 0
}
/^Patch\s*[0-9]+/ {
if (descriptionNeeded)
print "None"
print $2
descriptionNeeded = 1
}
END {
if (descriptionNeeded)
print "None"
}' | sed 's/;$//')"
echo oracleInstalledProducts="$(awk -F\" '{ORS=","} /product-family/ { print $2 }' "$ORACLE_HOME"/inventory/registry.xml | sed 's/,$//')"
fi
if [ -f "/etc/os-release" ]; then
grep '=' /etc/os-release | sed 's/^/__OS__/'
elif type busybox > /dev/null 2>&1; then
echo __OS__ID="bb"
echo __OS__NAME="$(busybox | head -1 | awk '{ print $1 }')"
echo __OS__VERSION="$(busybox | head -1 | awk '{ print $2 }')"
fi
|
<filename>dyno-core/src/main/java/com/netflix/dyno/connectionpool/DecoratingFuture.java<gh_stars>0
package com.netflix.dyno.connectionpool;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
public class DecoratingFuture<V> implements Future<V> {
private final ListenableFuture<OperationResult<V>> innerFuture;
public DecoratingFuture(final ListenableFuture<OperationResult<V>> listenableFuture) {
innerFuture = listenableFuture;
}
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
return innerFuture.cancel(mayInterruptIfRunning);
}
@Override
public boolean isCancelled() {
return innerFuture.isCancelled();
}
@Override
public boolean isDone() {
return innerFuture.isDone();
}
@Override
public V get() throws InterruptedException, ExecutionException {
OperationResult<V> opResult = innerFuture.get();
return opResult.getResult();
}
@Override
public V get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException {
OperationResult<V> opResult = innerFuture.get(timeout, unit);
return opResult.getResult();
}
}
|
#!/bin/bash
# Load Exp Settings
source exp_setting.sh
# Remove previous files
echo $exp_path
# Search Universal Perturbation and build datasets
cd ../../../../
pwd
rm -rf $exp_path
python3 perturbation.py --config_path $config_path \
--exp_name $exp_path \
--version $base_version \
--train_data_type $dataset_type \
--noise_shape 10 3 32 32 \
--epsilon $epsilon \
--num_steps $num_steps \
--step_size $step_size \
--attack_type $attack_type \
--perturb_type $perturb_type \
--universal_train_target $universal_train_target\
--universal_stop_error $universal_stop_error\
--use_subset
|
from collections import deque
tasks = deque()
def add_task(name):
tasks.appendleft(name)
def remove_task(index):
tasks.pop(index)
def list_tasks():
for task in tasks:
print("- {}".format(task)) |
import * as core from '../../core';
import generateBlurVertSource from './generateBlurVertSource';
import generateBlurFragSource from './generateBlurFragSource';
import getMaxBlurKernelSize from './getMaxBlurKernelSize';
/**
* The BlurYFilter applies a horizontal Gaussian blur to an object.
*
* @class
* @extends PIXI.Filter
* @memberof PIXI.filters
*/
export default class BlurYFilter extends core.Filter
{
/**
* @param {number} strength - The strength of the blur filter.
* @param {number} quality - The quality of the blur filter.
* @param {number} resolution - The resolution of the blur filter.
* @param {number} [kernelSize=5] - The kernelSize of the blur filter.Options: 5, 7, 9, 11, 13, 15.
*/
constructor(strength, quality, resolution, kernelSize)
{
kernelSize = kernelSize || 5;
const vertSrc = generateBlurVertSource(kernelSize, false);
const fragSrc = generateBlurFragSource(kernelSize);
super(
// vertex shader
vertSrc,
// fragment shader
fragSrc
);
this.resolution = resolution || core.settings.RESOLUTION;
this._quality = 0;
this.quality = quality || 4;
this.strength = strength || 8;
this.firstRun = true;
}
/**
* Applies the filter.
*
* @param {PIXI.FilterManager} filterManager - The manager.
* @param {PIXI.RenderTarget} input - The input target.
* @param {PIXI.RenderTarget} output - The output target.
* @param {boolean} clear - Should the output be cleared before rendering?
*/
apply(filterManager, input, output, clear)
{
if (this.firstRun)
{
const gl = filterManager.renderer.gl;
const kernelSize = getMaxBlurKernelSize(gl);
this.vertexSrc = generateBlurVertSource(kernelSize, false);
this.fragmentSrc = generateBlurFragSource(kernelSize);
this.firstRun = false;
}
this.uniforms.strength = (1 / output.size.height) * (output.size.height / input.size.height);
this.uniforms.strength *= this.strength;
this.uniforms.strength /= this.passes;
if (this.passes === 1)
{
filterManager.applyFilter(this, input, output, clear);
}
else
{
const renderTarget = filterManager.getRenderTarget(true);
let flip = input;
let flop = renderTarget;
for (let i = 0; i < this.passes - 1; i++)
{
filterManager.applyFilter(this, flip, flop, true);
const temp = flop;
flop = flip;
flip = temp;
}
filterManager.applyFilter(this, flip, output, clear);
filterManager.returnRenderTarget(renderTarget);
}
}
/**
* Sets the strength of both the blur.
*
* @member {number}
* @default 2
*/
get blur()
{
return this.strength;
}
set blur(value) // eslint-disable-line require-jsdoc
{
this.padding = Math.abs(value) * 2;
this.strength = value;
}
/**
* Sets the quality of the blur by modifying the number of passes. More passes means higher
* quaility bluring but the lower the performance.
*
* @member {number}
* @default 4
*/
get quality()
{
return this._quality;
}
set quality(value) // eslint-disable-line require-jsdoc
{
this._quality = value;
this.passes = value;
}
}
|
package org.pantsbuild.testproject.dummies;
import org.junit.Test;
public class PassingTest {
@Test
public void testPass1() {
// used in JunitTestsIntegrationTest#test_junit_test_suppress_output_flag
System.out.println("Hello from test1!");
}
@Test
public void testPass2() {
// used in JunitTestsIntegrationTest#test_junit_test_suppress_output_flag
System.out.println("Hello from test2!");
}
}
|
#!/bin/bash
# Pass in name and status
function die { echo $1: status $2 ; exit $2; }
F1=${LOCAL_TEST_DIR}/test_global_modules_cfg.py
F2=${LOCAL_TEST_DIR}/test_stream_modules_cfg.py
F3=${LOCAL_TEST_DIR}/test_one_modules_cfg.py
(cmsRun $F1 ) || die "Failure using $F1" $?
(cmsRun $F2 ) || die "Failure using $F2" $?
(cmsRun $F3 ) || die "Failure using $F3" $?
#the last few lines of the output are the printout from the
# ConcurrentModuleTimer service detailing how much time was
# spent in 2,3 or 4 modules running simultaneously.
touch empty_file
(cmsRun ${LOCAL_TEST_DIR}/test_no_concurrent_module_cfg.py 2>&1) | tail -n 3 | grep -v ' 0 ' | grep -v 'e-' | diff - empty_file || die "Failure using test_no_concurrent_module_cfg.py" $?
|
package rawhttp.core.body.encoding;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.zip.GZIPInputStream;
final class GZipUncompressorOutputStream extends DecodingOutputStream {
private final PipedInputStream encodedBytesReceiver;
private final PipedOutputStream encodedBytesSink;
private final AtomicBoolean readerRunning = new AtomicBoolean(false);
private final ExecutorService executorService;
private final int bufferSize;
private Future<?> readerExecution;
GZipUncompressorOutputStream(OutputStream out, int bufferSize) {
super(out);
this.bufferSize = bufferSize;
this.encodedBytesReceiver = new PipedInputStream();
this.encodedBytesSink = new PipedOutputStream();
this.executorService = Executors.newSingleThreadExecutor();
}
@Override
public void write(int b) throws IOException {
byte[] buffer = new byte[1];
buffer[0] = (byte) (b & 0xFF);
write(buffer, 0, 1);
}
@Override
public void write(byte[] b, int off, int len) throws IOException {
if (!readerRunning.getAndSet(true)) {
encodedBytesSink.connect(encodedBytesReceiver);
startReader();
}
encodedBytesSink.write(b, off, len);
}
private void startReader() {
readerExecution = executorService.submit(() -> {
int bytesRead;
byte[] buffer = new byte[bufferSize];
try (GZIPInputStream decoderStream = new GZIPInputStream(encodedBytesReceiver)) {
while ((bytesRead = decoderStream.read(buffer, 0, bufferSize)) >= 0) {
out.write(buffer, 0, bytesRead);
}
} catch (IOException e) {
e.printStackTrace();
}
});
}
@Override
public void flush() throws IOException {
encodedBytesSink.flush();
super.flush();
}
@Override
public void finishDecoding() throws IOException {
super.finishDecoding();
encodedBytesSink.close();
try {
readerExecution.get(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
executorService.shutdownNow();
throw new RuntimeException(e);
} catch (ExecutionException e) {
executorService.shutdownNow();
throw new RuntimeException(e.getCause());
} catch (TimeoutException e) {
executorService.shutdownNow();
throw new RuntimeException("Timeout waiting for stream to close");
}
executorService.shutdown();
}
} |
<gh_stars>0
// 1º exercício
console.log('=============== 1º exercício ===============')
function checaIdade(idade) {
return new Promise((resolve, reject) => {
setTimeout(() => {
if (idade > 18) {
resolve("Maior de 18");
} else {
reject("Menor de 18");
}
}, 2000);
});
}
checaIdade(2)
.then((response) => {
console.log(response);
}).catch((error) => {
console.log(error);
});
console.log('carregando...')
// 2º exercício
function github(user) {
return new Promise((resolve, reject) => {
var xhr = new XMLHttpRequest();
xhr.open('GET', `https://api.github.com/users/${user}/repos`);
xhr.send(null);
xhr.onreadystatechange = () => {
if (xhr.readyState === 4) {
if (xhr.status === 200) {
resolve(JSON.parse(xhr.responseText));
} else {
reject(xhr.status);
}
}
};
});
};
function adicionar() {
const user = document.querySelector('#user input').value;
const repo_list = document.querySelector('#user ul');
repo_list.innerHTML = '';
var ulElement = document.createElement('li');
ulElement.innerText = 'carregando...';
repo_list.appendChild(ulElement);
github(user).then((response) => {
repo_list.innerHTML = '';
for (res of response) {
var ulElement = document.createElement('li');
ulElement.innerText = res.name;
repo_list.appendChild(ulElement);
}
}).catch((error) => {
repo_list.innerHTML = '';
var ulElement = document.createElement('li');
ulElement.innerText = `erro de solicitação: ${error}`;
repo_list.appendChild(ulElement);
});
} |
#!/usr/bin/env bash
set -e
# TODO: Set to URL of git repo.
PROJECT_GIT_URL='git@github.com:MrNtlu/DjangoBackend.git'
PROJECT_BASE_PATH='/usr/local/apps/profiles-rest-api'
echo "Installing dependencies..."
apt-get update
apt-get install -y python3-dev python3-venv sqlite python-pip supervisor nginx git
# Create project directory
mkdir -p $PROJECT_BASE_PATH
git clone $PROJECT_GIT_URL $PROJECT_BASE_PATH
# Create virtual environment
mkdir -p $PROJECT_BASE_PATH/env
python3 -m venv $PROJECT_BASE_PATH/env
# Install python packages
$PROJECT_BASE_PATH/env/bin/pip install -r $PROJECT_BASE_PATH/requirements.txt
$PROJECT_BASE_PATH/env/bin/pip install uwsgi==2.0.18
# Run migrations and collectstatic
cd $PROJECT_BASE_PATH
$PROJECT_BASE_PATH/env/bin/python manage.py migrate
$PROJECT_BASE_PATH/env/bin/python manage.py collectstatic --noinput
# Configure supervisor
cp $PROJECT_BASE_PATH/deploy/supervisor_profiles_api.conf /etc/supervisor/conf.d/profiles_api.conf
supervisorctl reread
supervisorctl update
supervisorctl restart profiles_api
# Configure nginx
cp $PROJECT_BASE_PATH/deploy/nginx_profiles_api.conf /etc/nginx/sites-available/profiles_api.conf
rm /etc/nginx/sites-enabled/default
ln -s /etc/nginx/sites-available/profiles_api.conf /etc/nginx/sites-enabled/profiles_api.conf
systemctl restart nginx.service
echo "DONE! :)"
|
<gh_stars>1-10
import Vue from "vue";
import VueRouter from "vue-router";
Vue.use(VueRouter);
import InitCom from '@/components/InitCom.vue'
import User from '@/components/User.vue'
import UserProfile from '@/components/UserProfile.vue'
import UserPosts from '@/components/UserPosts.vue'
const routes = [
{
path: "/",
component: InitCom
},
{
path: "/user/:id",
name: 'user',
// <router-link :to="{ name: 'user', params: { userId: 123 }}">User</router-link>
// router.push({ name: 'user', params: { userId: 123 }})
// redirect: '/b',
// redirect: { name: 'foo' },
component: User,
beforeEnter: (to, from, next) => {
// ...
next()
},
children: [
{
// 当 /user/:id/profile 匹配成功,
// UserProfile 会被渲染在 User 的 <router-view> 中
path: "profile",
component: UserProfile,
meta: { requiresAuth: true }
},
{
// 当 /user/:id/posts 匹配成功
// UserPosts 会被渲染在 User 的 <router-view> 中
path: "posts",
component: UserPosts,
},
],
},
];
const router = new VueRouter({
mode: 'history',
routes,
// 注意: 这个功能只在支持 history.pushState 的浏览器中可用。
scrollBehavior (to, from, savedPosition) {
if (to.hash) {
return {
selector: to.hash
}
} else {
if (savedPosition) {
return savedPosition
} else {
return { x: 0, y: 0 }
}
}
}
})
// router.beforeEach((to, from, next) => {
// var isAuthenticated = true // 根据实际情况判断
// if (to.matched.some(record => record.meta.requiresAuth)) {
// if (to.name !== 'Login' && !isAuthenticated) next({ name: 'Login' })
// else next() // 确保一定要调用 next()
// } else {
// next() // 确保一定要调用 next()
// }
// })
export default router |
#!/bin/bash
# Should be launched by cron (every nights)
# How to use : /path/to/scripts/clonescheduled.sh /path/to/v_env
if [ "$1" == "" ]
then
echo "ERROR : Virtualenv path is required"
exit 1
else
V_ENV_PATH=$1
fi
source "$V_ENV_PATH"/bin/activate
cd "$(dirname "$0")/.."
python manage.py clonescheduled --settings=mymoney.settings.production
deactivate
logger "[MYMONEY] Scheduled bank transactions cloned."
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-N-VB/model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-N-VB/512+0+512-SS-N-VB-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_remove_all_but_nouns_and_verbs_first_half_full --eval_function last_element_eval |
<reponame>alinz/baker.go
package docker
import (
"encoding/json"
"fmt"
"strconv"
"github.com/alinz/baker.go"
)
type Watcher struct {
unixClient Client
remoteClient Client
closed chan struct{}
}
var _ baker.Watcher = (*Watcher)(nil)
func (w *Watcher) load(id string) (*Container, error) {
r, err := w.unixClient.Get("http://localhost/containers/" + id + "/json")
if err != nil {
return nil, err
}
defer r.Close()
payload := struct {
Config struct {
Labels struct {
Enable string `json:"baker.enable"`
Network string `json:"baker.network"`
ServicePort string `json:"baker.service.port"`
ServicePing string `json:"baker.service.ping"`
} `json:"Labels"`
} `json:"Config"`
NetworkSettings struct {
Networks map[string]struct {
IPAddress string `json:"IPAddress"`
} `json:"Networks"`
} `json:"NetworkSettings"`
ID string `json:"Id"`
}{}
err = json.NewDecoder(r).Decode(&payload)
if err != nil {
return nil, err
}
if payload.Config.Labels.Enable != "true" {
return nil, fmt.Errorf("label 'baker.enable' is not set to true")
}
network, ok := payload.NetworkSettings.Networks[payload.Config.Labels.Network]
if !ok {
return nil, fmt.Errorf("network '%s' not exists in labels", payload.Config.Labels.Network)
}
port, err := strconv.ParseInt(payload.Config.Labels.ServicePort, 10, 32)
if err != nil {
return nil, fmt.Errorf("failed to parse port for container '%s' because %s", id, err)
}
addr := ""
if network.IPAddress != "" {
addr = fmt.Sprintf("%s:%d", network.IPAddress, port)
}
return &Container{
id: id,
addr: addr,
client: w.remoteClient,
configURL: fmt.Sprintf("http://%s%s", addr, payload.Config.Labels.ServicePing),
}, nil
}
func (w *Watcher) currentContainers(containers chan<- baker.Container, errs chan<- error) {
r, err := w.unixClient.Get("http://localhost/containers/json")
if err != nil {
errs <- err
return
}
defer r.Close()
events := []struct {
ID string `json:"Id"`
State string `json:"State"`
}{}
err = json.NewDecoder(r).Decode(&events)
if err != nil {
errs <- fmt.Errorf("failed to decode events: %w", err)
return
}
for _, event := range events {
var container *Container
if event.State != "running" {
continue
}
container, err := w.load(event.ID)
if err != nil {
errs <- fmt.Errorf("failed to load event %s: %w", event.ID, err)
continue
}
select {
case <-w.closed:
return
default:
containers <- container
}
}
}
func (w *Watcher) futureContainers(containers chan<- baker.Container, errs chan<- error) {
r, err := w.unixClient.Get("http://localhost/events")
if err != nil {
errs <- err
return
}
defer r.Close()
decoder := json.NewDecoder(r)
event := struct {
ID string `json:"id"`
Status string `json:"status"`
}{}
for {
event.ID = ""
event.Status = ""
if err := decoder.Decode(&event); err != nil {
errs <- fmt.Errorf("failed to decode json event stream: %w", err)
continue
}
if event.Status != "die" && event.Status != "start" {
continue
}
container, err := w.load(event.ID)
if err != nil {
errs <- fmt.Errorf("failed to load event %s: %w", event.ID, err)
continue
}
select {
case <-w.closed:
return
default:
containers <- container
}
}
}
func (w *Watcher) Watch(errs chan<- error) <-chan baker.Container {
containers := make(chan baker.Container, 10)
w.closed = make(chan struct{}, 1)
go func() {
defer close(w.closed)
w.currentContainers(containers, errs)
w.futureContainers(containers, errs)
}()
return containers
}
func NewWatcher(client Client) *Watcher {
watcher := &Watcher{
unixClient: client,
remoteClient: RemoteClient(),
}
return watcher
}
|
if [[ -z ${dontAddGeobacterRustFlags-} ]]; then
export RUSTFLAGS="-Z always-encode-mir -Z always-emit-metadata ${RUSTFLAGS-}";
fi
# Fix 'failed to open: /homeless-shelter/.cargo/.package-cache' in rust 1.36.
if [[ -z ${IN_NIX_SHELL-} && -z ${CARGO_HOME-} ]]; then
export CARGO_HOME=$TMPDIR
fi
|
#!/bin/sh
# Install libdb4.8 (Berkeley DB).
export LC_ALL=C
set -e
if [ -z "${1}" ]; then
echo "Usage: $0 <base-dir> [<extra-bdb-configure-flag> ...]"
echo
echo "Must specify a single argument: the directory in which db4 will be built."
echo "This is probably \`pwd\` if you're at the root of the luckcoin repository."
exit 1
fi
expand_path() {
echo "$(cd "${1}" && pwd -P)"
}
BDB_PREFIX="$(expand_path ${1})/db4"; shift;
BDB_VERSION='db-4.8.30.NC'
BDB_HASH='12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef'
BDB_URL="https://download.oracle.com/berkeley-db/${BDB_VERSION}.tar.gz"
check_exists() {
which "$1" >/dev/null 2>&1
}
sha256_check() {
# Args: <sha256_hash> <filename>
#
if check_exists sha256sum; then
echo "${1} ${2}" | sha256sum -c
elif check_exists sha256; then
if [ "$(uname)" = "FreeBSD" ]; then
sha256 -c "${1}" "${2}"
else
echo "${1} ${2}" | sha256 -c
fi
else
echo "${1} ${2}" | shasum -a 256 -c
fi
}
http_get() {
# Args: <url> <filename> <sha256_hash>
#
# It's acceptable that we don't require SSL here because we manually verify
# content hashes below.
#
if [ -f "${2}" ]; then
echo "File ${2} already exists; not downloading again"
elif check_exists curl; then
curl --insecure --retry 5 "${1}" -o "${2}"
else
wget --no-check-certificate "${1}" -O "${2}"
fi
sha256_check "${3}" "${2}"
}
mkdir -p "${BDB_PREFIX}"
http_get "${BDB_URL}" "${BDB_VERSION}.tar.gz" "${BDB_HASH}"
tar -xzvf ${BDB_VERSION}.tar.gz -C "$BDB_PREFIX"
cd "${BDB_PREFIX}/${BDB_VERSION}/"
# Apply a patch necessary when building with clang and c++11 (see https://community.oracle.com/thread/3952592)
CLANG_CXX11_PATCH_URL='https://gist.githubusercontent.com/LnL7/5153b251fd525fe15de69b67e63a6075/raw/7778e9364679093a32dec2908656738e16b6bdcb/clang.patch'
CLANG_CXX11_PATCH_HASH='7a9a47b03fd5fb93a16ef42235fa9512db9b0829cfc3bdf90edd3ec1f44d637c'
http_get "${CLANG_CXX11_PATCH_URL}" clang.patch "${CLANG_CXX11_PATCH_HASH}"
patch -p2 < clang.patch
cd build_unix/
"${BDB_PREFIX}/${BDB_VERSION}/dist/configure" \
--enable-cxx --disable-shared --disable-replication --with-pic --prefix="${BDB_PREFIX}" \
"${@}"
make install
echo
echo "db4 build complete."
echo
echo 'When compiling luckcoind, run `./configure` in the following way:'
echo
echo " export BDB_PREFIX='${BDB_PREFIX}'"
echo ' ./configure BDB_LIBS="-L${BDB_PREFIX}/lib -ldb_cxx-4.8" BDB_CFLAGS="-I${BDB_PREFIX}/include" ...'
|
<filename>docs/cvs/structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point.js
var structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point =
[
[ "StringPoint", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point.html#a96a7181c78880ed81f3dca9e22b9b9d1", null ],
[ "_Pos", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point.html#a9880a2e3e6d7456d29fd6335a1b813a3", null ],
[ "_Size", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point.html#a96c040242e476fbadcb4fad4f07672c6", null ],
[ "_Val", "structdroid_1_1_runtime_1_1_utilities_1_1_structs_1_1_points_1_1_string_point.html#aa9b46e9327cd24bc575d75e9dd815b03", null ]
]; |
import threading
def execute_build_tasks(tasks):
results = []
# Function to execute a build task and append the result to the results list
def execute_task(task):
result = task() # Execute the build task
results.append(result) # Append the result to the results list
threads = [] # List to store the thread objects
# Create and start a thread for each build task
for task in tasks:
thread = threading.Thread(target=execute_task, args=(task,))
threads.append(thread)
thread.start()
# Wait for all threads to complete
for thread in threads:
thread.join()
return results |
#!/usr/bin/env bash
Describe "node src/get-workspaces.sh" get-workspaces
It "should return empty string if insufficient argument is given"
When run command node "./src/get-workspaces.js"
The output should equal ''
The status should equal 1
End
It "should return the error status indicating the module is unable to be found"
When run command node "./src/get-workspaces.js" "/tmp/123/package.json"
The output should equal ''
The status should equal 4
End
It "should return the error status indicating the content is malformed"
run_test() {
mkdir -p "/tmp/pkg1"
echo "12345" >"/tmp/pkg1/package.json"
node "./src/get-workspaces.js" "/tmp/pkg1/package.json"
}
When call run_test
The output should equal ''
The status should equal 2
End
It "should return the workspaces in the workspaces field"
run_test() {
mkdir -p "/tmp/pkg2"
echo '{"workspaces": ["packages/*"]}' >"/tmp/pkg2/package.json"
node "./src/get-workspaces.js" "/tmp/pkg2/package.json"
}
When call run_test
The output should equal 'packages/*'
The status should equal 0
End
It "should return the workspaces in the workspaces.packages field"
run_test() {
mkdir -p "/tmp/pkg3"
echo '{"workspaces": {"packages": ["packages/*"]}}' >"/tmp/pkg3/package.json"
node "./src/get-workspaces.js" "/tmp/pkg3/package.json"
}
When call run_test
The output should equal 'packages/*'
The status should equal 0
End
It "should return empty string when the workspaces field is missing"
run_test() {
mkdir -p "/tmp/pkg4"
echo '{"name": "test"}' >"/tmp/pkg4/package.json"
node "./src/get-workspaces.js" "/tmp/pkg4/package.json"
}
When call run_test
The output should equal ''
The status should equal 3
End
It "should return empty string when the workspaces field's value is unexpected"
run_test() {
mkdir -p "/tmp/pkg5"
echo '{"name": "test", "workspaces": "12345"}' >"/tmp/pkg5/package.json"
node "./src/get-workspaces.js" "/tmp/pkg5/package.json"
}
When call run_test
The output should equal ''
The status should equal 3
End
End
|
var MAX_WEIGHT = 100;
var MAX_RADIUS = 50;
var weightMutation = d3.random.normal(0, C.WEIGHT_MUTATION_CONSTANT);
var radiusMutation = d3.random.normal(0, C.RADIUS_MUTATION_CONSTANT);
var colorMutation = d3.random.normal(0, C.COLOR_MUTATION_CONSTANT);
function bound(x, a, b) {
if (x < a) return a;
if (x > b) return b;
return x;
}
function geneticChoice(a, b) {
var r = Math.random();
if (r < 0.33) {
return a;
} else if (r < 0.66) {
return b;
} else {
return (a+b)/2;
}
}
class FlockConfig {
public seperationWeight: number;
public alignmentWeight: number;
public cohesionWeight: number;
public seperationRadius: number;
constructor(seperationWeight, alignmentWeight, cohesionWeight, seperationRadius) {
this.seperationWeight = seperationWeight;
this.seperationRadius = seperationRadius;
this.alignmentWeight = alignmentWeight;
this.cohesionWeight = cohesionWeight;
}
clone(): FlockConfig {
return new FlockConfig(this.seperationWeight, this.alignmentWeight, this.cohesionWeight, this.seperationRadius)
}
reproduceWith(other: FlockConfig): FlockConfig {
var seperationWeight = geneticChoice(this.seperationWeight, other.seperationWeight);
var seperationRadius = geneticChoice(this.seperationRadius, other.seperationRadius);
var alignmentWeight = geneticChoice(this.alignmentWeight, other.alignmentWeight);
var cohesionWeight = geneticChoice(this.cohesionWeight, other.cohesionWeight);
return new FlockConfig(seperationWeight, alignmentWeight, cohesionWeight, seperationRadius);
}
mutate(): FlockConfig {
this.seperationWeight += weightMutation();
this.alignmentWeight += weightMutation();
this.cohesionWeight += weightMutation();
this.seperationWeight = bound(this.seperationWeight, -MAX_WEIGHT, MAX_WEIGHT);
this.alignmentWeight = bound(this.alignmentWeight, -MAX_WEIGHT, MAX_WEIGHT);
this.cohesionWeight = bound(this.cohesionWeight, -MAX_WEIGHT, MAX_WEIGHT);
this.seperationRadius += radiusMutation();
this.seperationRadius = bound(this.seperationRadius, 0, MAX_RADIUS);
return this;
}
}
class Genetics {
public preyFlocking: FlockConfig; // how to flock in response to presence of prey
public predatorFlocking: FlockConfig; // how to flock in response to presence of predators
public closestFlocking: FlockConfig; // predator only: how to flock in response to single closest prey
public color: number;
constructor(preyFlocking, predatorFlocking, closestFlocking, color) {
this.preyFlocking = preyFlocking;
this.predatorFlocking = predatorFlocking;
this.closestFlocking = closestFlocking;
this.color = color;
}
public mutate(): Genetics {
this.preyFlocking.mutate();
this.predatorFlocking.mutate();
this.closestFlocking.mutate();
this.color = bound(Math.round(this.color + colorMutation()), 0, 255)
return this;
}
public reproduceWith(otherParent: Genetics): Genetics {
var preyFlocking = this.preyFlocking.reproduceWith(otherParent.preyFlocking);
var predatorFlocking = this.predatorFlocking.reproduceWith(otherParent.predatorFlocking);
var closestFlocking = this.closestFlocking.reproduceWith(otherParent.closestFlocking);
var color = (this.color + otherParent.color) / 2;
return new Genetics(preyFlocking, predatorFlocking, closestFlocking, color).mutate();
}
}
function randInt256() {
return Math.floor(Math.random() * 256);
}
function randomGenetics() {
return new Genetics(randomFlocking(), randomFlocking(), randomFlocking(), randInt256());
}
function flockingPreyGenetics() {
var prey = new FlockConfig(1, 1, 1, 10);
var predator = new FlockConfig(2, -1, -1, 50);
var closest = new FlockConfig(0, 0, 0, 0);
return new Genetics(prey, predator, closest, 240);
}
function nonFlockingPreyGenetics() {
var prey = new FlockConfig(1, 0, 0, 10);
var predator = new FlockConfig(2, -1, -1, 50);
var closest = new FlockConfig(0, 0, 0, 0);
return new Genetics(prey, predator, closest, 180);
}
function predatorGenetics() {
var prey = new FlockConfig(-3, 1, 1, 500);
var predator = new FlockConfig(1, 1, 1, 30);
var closest = new FlockConfig(-6, 2, 2, 50);
return new Genetics(prey, predator, closest, 0);
}
function randomFlocking() {
var sW = Math.random() * MAX_WEIGHT * 2 - MAX_WEIGHT;
var aW = Math.random() * MAX_WEIGHT * 2 - MAX_WEIGHT;
var cW = Math.random() * MAX_WEIGHT * 2 - MAX_WEIGHT;
var sR = Math.random() * MAX_RADIUS;
return new FlockConfig(sW, aW, cW, sR);
}
|
<filename>src/configuration/Model.ts
import { keypair } from "@o1labs/client-sdk";
export interface PaymentConfiguration {
commissionRate: number
stakingPoolPublicKey: string,
payoutMemo: string,
payorSendTransactionFee : number,
senderKeys: keypair,
minimumConfirmations : number,
minimumHeight: number,
configuredMaximum: number,
blockDataSource: string,
verbose: boolean,
payoutHash: string,
payoutThreshold: number
} |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.volatility.surface;
import org.apache.commons.lang.Validate;
/**
* This is defined as strike/forward
*/
public class Moneyness implements StrikeType {
private final double _value;
public Moneyness(final double value) {
Validate.isTrue(value >= 0, "negative moneyness");
_value = value;
}
public Moneyness(final double strike, final double forward) {
Validate.isTrue(strike >= 0, "negative strike");
Validate.isTrue(forward > 0, "negative or zero forward");
_value = strike / forward;
}
@Override
public double value() {
return _value;
}
@Override
public Moneyness with(double value) {
return new Moneyness(value);
}
}
|
package com.ricky.project.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.ricky.framework.web.controller.BaseController;
import com.ricky.framework.web.domain.AjaxResult;
import com.ricky.project.domain.SysConfig;
import com.ricky.project.service.ISysConfigService;
/**
* 系统配置信息
*
* @author ruoyi
*/
@Controller
@RequestMapping("/system/config")
public class SysConfigController extends BaseController
{
private String prefix = "system/config";
@Autowired
private ISysConfigService configService;
/**
* 设置系统配置
*/
@GetMapping()
public String config(ModelMap mmap)
{
SysConfig config = configService.selectSysConfig();
mmap.put("config", config);
return prefix + "/config";
}
/**
* 设置系统配置(保存)
*/
@PostMapping("/save")
@ResponseBody
public AjaxResult save(SysConfig config) {
return toAjax(configService.updateSysConfig(config));
}
}
|
<gh_stars>0
package com.winterbe.java8.samples.stream.optional;
import com.winterbe.java8.samples.stream.optional.exceptions.PersonNotFoundException;
import java.util.ArrayList;
import java.util.List;
public class OptionalOrElseThrow {
public static String getPersonById(List<Person> persons, Long personId) throws PersonNotFoundException {
return persons.stream()
.filter(person -> person.getId() == personId)
.findFirst()
.map(Person::getpersonName).orElseThrow(PersonNotFoundException::new);
}
public static void main(String[] args) {
persons.add(new Person(1L, "Ercan"));
persons.add(new Person(2L, "Jack"));
persons.add(new Person(3L,"David"));
System.out.println(getPersonById(persons,2L));
}
// Person Class
public static class Person {
private Long id;
private String personName;
public Person(Long id, String personName) {
this.id = id;
this.personName = personName;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getpersonName() {
return personName;
}
public void setpersonName(String personName) {
this.personName = personName;
}
}
}
|
<gh_stars>1-10
const LoadingBlock = () => <div style={{ height: "256px" }}></div>;
export default LoadingBlock;
|
#include <iostream>
using namespace std;
int main()
{
int sum = 0;
int nums[] = {2, 5, 9, 3};
int n = sizeof(nums)/sizeof(nums[0]);
// Iterate through all elements of nums
// and add the element to sum
for (int i=0; i<n; i++)
sum += nums[i];
cout << "Sum = " << sum;
return 0;
} |
import java.util.List;
public class UserRepositoryImpl implements UserRepository {
private UserDataDAO userDataDAO; // Assume the existence of a UserDataDAO for data access
public UserRepositoryImpl(UserDataDAO userDataDAO) {
this.userDataDAO = userDataDAO;
}
// Implement the count method to return the total count of user data for the given appUserId
public long count(Integer appUserId) {
return userDataDAO.getCountByUserId(appUserId);
}
// Implement the findPage method to return a paginated list of user data based on the appUserId, query, and rowBounds
public List<UserData> findPage(Integer appUserId, String query, RowBounds rowBounds) {
return userDataDAO.findUserPage(appUserId, query, rowBounds.getOffset(), rowBounds.getLimit());
}
} |
import os
from tensorflow_asr.augmentations.augments import Augmentation
from tensorflow_asr.featurizers.speech_featurizers import read_raw_audio
from tensorflow_asr.configs.config import Config
def process_audio_data(audio_path, augmentation_type, config_path):
os.environ["CUDA_VISIBLE_DEVICES"] = "-1" # Set CUDA_VISIBLE_DEVICES to -1 to disable GPU usage
# Read raw audio data
audio_data, sample_rate = read_raw_audio(audio_path)
# Apply augmentation
augmentation = Augmentation(augmentation_type)
augmented_audio = augmentation(audio_data, sample_rate)
# Load configuration
config = Config(config_path)
# Extract speech features based on the configuration
speech_features = config.featurizer(audio_data)
return speech_features |
#!/bin/bash
# This script will install the latest version of MongoDB
# Check if MongoDB is installed
if which mongod >/dev/null; then
echo "MongoDB is already installed!"
exit 0
fi
# OS detection
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
# Determine which Linux distribution we are running
DISTRO=$(lsb_release -i | cut -d ':' -f 2 | tr -s '\t' ' ' | sed 's/ //g')
# Install required packages
if [[ "$DISTRO" == "Ubuntu" ]]; then
sudo apt-get update
sudo apt-get install -y mongodb
elif [[ "$DISTRO" == "CentOS" ]]; then
sudo yum -y update
sudo yum -y install mongodb- org-
else
echo "Sorry, this OS is not yet supported"
exit 0
fi
elif [[ "$unamestr" == 'Darwin' ]]; then
# Install Homebrew if it doesn't exist
if which brew >/dev/null; then
echo "Homebrew already installed"
else
echo -e "\033[34;1m Installing Homebrew \033[0m"
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
# Install the latest version of MongoDB
echo -e "\033[34;1m Installing the latest version of MongoDB \033[0m"
brew install mongodb
else
echo "Sorry, this OS is not yet supported"
fi |
const axios = require('axios');
class StatusTracker {
/**
* @typedef StatusTrackerOpts
* @property {boolean} userInitialized
* @property {IThimbleBot} client
*/
/**
* @constructor
* @param {StatusTrackerOpts} opts
*/
constructor(opts) {
this.config = opts.client.config.custom && opts.client.config.custom.StatusTracker;
this.domains = this.config && this.config.domains;
this.timeout = this.config && this.config.timeout;
this.down = [];
this.badStatus = [];
this.userInitialized = (opts && opts.userInitialized) || false;
}
/**
* Creates an asynchronous loop through an array.
* @param {array} arr
* @param {function} callback
*/
async asyncLoop(arr, callback) {
for (let i = 0; i < arr.length; i++) {
await callback(arr[i], i, arr);
}
}
/**
* Checks whether a given URL is accessible.
* @param {string} url
* @returns {Promise}
*/
getStatus(url) {
return new Promise((resolve, reject) => {
return axios.get(url, { timeout: this.timeout * 1000 })
.then(() => resolve('OK'))
.catch(err => err.response ? reject('BAD_STATUS') : reject('UNREACHABLE'));
});
}
/**
* Loops through an array of domains and performs a check.
*/
async checkDomains() {
return this.asyncLoop(this.domains, async (domain) => {
try {
await this.getStatus(domain);
} catch (err) {
switch (err) {
case 'BAD_STATUS':
return this.badStatus.push(domain);
case 'UNREACHABLE':
return this.down.push(domain);
default:
throw new Error(err);
}
}
});
}
/**
* Turns an array of strings into a list.
* @param {array} arr
* @returns {string}
*/
makeList(arr) {
return ` • ${arr.join('\n • ')}`;
}
/**
* Creates a Discord embed based on the given options.
* @param {object} opts
* @returns {object}
*/
generateEmbed(opts) {
const {
fields,
description,
icon,
color
} = opts;
return {
embed: {
title: 'Server Status Tracker',
description,
fields,
thumbnail: {
url: icon
},
color,
timestamp: new Date(),
footer: {
text: '<3'
}
}
};
}
/**
* Initiates the process of checking.
* @returns {Promise}
*/
track() {
return new Promise((resolve, reject) => {
return this
.checkDomains()
.then(() => {
const fields = [];
if (this.badStatus && this.badStatus.length) {
fields.push({
name: 'The following websites return a bad status code:',
value: this.makeList(this.badStatus)
});
}
if (this.down && this.down.length) {
fields.push({
name: 'The following websites are inaccessible:',
value: this.makeList(this.down)
});
}
const description = fields && fields.length
? 'There are some issues with one or more websites.'
: 'Woo-hoo, all websites are perfectly functional!';
const icon = fields && fields.length
? 'https://thimble-bot.vercel.app/assets/serverstatus/server_error.png'
: 'https://thimble-bot.vercel.app/assets/serverstatus/server_ok.png';
const color = fields && fields.length
? 0xd72828
: 0x41ad49;
const embed = this.generateEmbed({
fields,
description,
icon,
color
});
return !fields.length && this.config.quiet && !this.userInitialized
? resolve(null)
: resolve(embed);
})
.catch(err => reject(err));
});
}
};
module.exports = StatusTracker;
|
<filename>dubbo-afi/src/main/java/com/zebra/net/NIOClient.java<gh_stars>0
package com.zebra.net;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.zebra.domain.Engin2Info;
import org.apache.log4j.Logger;
public class NIOClient implements Runnable{
private static Logger logger = Logger.getLogger(NIOClient.class);
private static Selector selc = null;
private static Map<Engin2Info,SocketChannel> channelMap = null;
static{
try {
channelMap = new HashMap<Engin2Info, SocketChannel>();
selc = Selector.open();
} catch (IOException e) {
e.printStackTrace();
logger.error(e.getMessage());
}
}
public static void sendData(Engin2Info e2info){
try {
if(channelMap.containsKey(e2info)){
//之前已经创建过到此远程主机的通道,可以复用
SocketChannel sc = channelMap.get(e2info);
sc.register(selc, SelectionKey.OP_WRITE,e2info);
}else{
//之前没有创建过到此远程主机的通道,需要新建
SocketChannel sc = SocketChannel.open();
sc.configureBlocking(false);
sc.connect(new InetSocketAddress(e2info.getIp(), e2info.getPort()));
sc.register(selc, SelectionKey.OP_CONNECT | SelectionKey.OP_WRITE ,e2info);
}
} catch (Exception e) {
e.printStackTrace();
logger.error(e.getMessage());
}
}
@Override
public void run() {
try {
while(true){
selc.select(1 * 1000);
Set<SelectionKey> set = selc.selectedKeys();
Iterator<SelectionKey> it = set.iterator();
while(it.hasNext()){
SelectionKey key = it.next();
if(key.isConnectable()){
//完成连接
SocketChannel sc = (SocketChannel) key.channel();
while(!sc.finishConnect()){}
//保存到map中为了可以复用
Engin2Info e2info = (Engin2Info) key.attachment();
channelMap.put(e2info, sc);
logger.info("创建了连接到远程主机的通道:"+e2info.getIp()+":"+e2info.getPort());
//清除注册操作
sc.register(selc, key.interestOps() & (~SelectionKey.OP_CONNECT),e2info);
}
if(key.isWritable()){
SocketChannel sc = (SocketChannel) key.channel();
Engin2Info e2info = (Engin2Info) key.attachment();
byte [] body = null;
while((body = e2info.getDataQueue().poll())!=null){
ByteBuffer head_buf = ByteBuffer.wrap((body.length+"\r\n").getBytes());
ByteBuffer body_buf = ByteBuffer.wrap(body);
while(body_buf.hasRemaining()){
sc.write(new ByteBuffer[]{head_buf,body_buf});
}
logger.info("向远程主机["+e2info.getIp()+"#"+e2info.getPort()+"]发送的了数据:"+body.length+"字节。。");
}
sc.register(selc, key.interestOps() & (~SelectionKey.OP_WRITE));
}
it.remove();
}
}
} catch (IOException e) {
e.printStackTrace();
logger.error(e.getMessage());
}
}
}
|
package com.qweex.openbooklikes.fragment;
import android.app.Activity;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.PorterDuff;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.klinker.android.link_builder.LinkConsumableTextView;
import com.qweex.linkspan.LinkSpan;
import com.qweex.openbooklikes.ApiClient;
import com.qweex.openbooklikes.Titleable;
import com.qweex.openbooklikes.LoadingViewManager;
import com.qweex.openbooklikes.activity.MainActivity;
import com.qweex.openbooklikes.R;
import com.qweex.openbooklikes.model.ModelBase;
import com.qweex.openbooklikes.model.Shareable;
abstract public class FragmentBase<Primary extends ModelBase> extends Fragment implements Toolbar.OnMenuItemClickListener, Titleable,
LinkSpan.OnLinkClickListener, LinkSpan.OnLinkLongClickListener{
Primary primary;
ApiClient.ApiResponseHandler responseHandler;
LoadingViewManager loadingManager = new LoadingViewManager();
public static void optionIcon(MenuItem mi) {
if(mi.getIcon()!=null)
mi.getIcon().setColorFilter(0xffffffff, PorterDuff.Mode.SRC_ATOP);
}
@Override
public void onSaveInstanceState(Bundle outState) {
if(primary!=null)
primary.wrapInBundle(outState);
super.onSaveInstanceState(outState);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if(savedInstanceState!=null) {
setArguments(savedInstanceState);
Log.d("oac:" + getClass().getSimpleName(), "Saved data: " + primary.apiName());
} else {
Log.d("oac:" + getClass().getSimpleName(), "No saved, fetching data");
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(false); // Children should set this if they have one
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
menu.clear();
MenuItem mi;
//getActivity().onCreateOptionsMenu(menu);
if(primary instanceof Shareable || this instanceof Shareable) {
mi = menu.add(Menu.NONE, R.id.option_share, Menu.NONE, R.string.option_share)
.setIcon(R.drawable.share_np341334);
optionIcon(mi);
mi.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if(item.getItemId()==R.id.option_share) {
startActivity(Intent.createChooser(
((Shareable) (primary instanceof Shareable ? primary : this))
.share(), getResources().getString(R.string.option_share)));
return true;
}
return super.onOptionsItemSelected(item);
}
protected TextView setOrHide(View container, int tvId, String text) {
TextView tv = ((TextView)container.findViewById(tvId));
ModelBase.unHTML(tv, text);
if(tv instanceof LinkConsumableTextView)
LinkSpan.replaceURLSpans(tv, this, this);
tv.setVisibility(tv.getText() == null || tv.getText().length()==0 ? View.GONE : View.VISIBLE);
return tv;
}
protected MainActivity getMainActivity() {
return (MainActivity) getActivity();
}
protected View createProgressView(LayoutInflater inflater, ViewGroup container, View childView) {
ViewGroup loadingView = (ViewGroup) inflater.inflate(R.layout.loading, null);
View emptyView = inflater.inflate(R.layout.empty, null),
errorView = inflater.inflate(R.layout.error, null);
childView.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT));
LinearLayout linearLayout = new LinearLayout(getActivity());
linearLayout.setLayoutParams(new LinearLayout.LayoutParams(LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT));
linearLayout.setOrientation(LinearLayout.VERTICAL);
linearLayout.addView(loadingView);
linearLayout.addView(emptyView);
linearLayout.addView(errorView);
linearLayout.addView(childView);
loadingManager.setInitial(loadingView, childView, emptyView, errorView);
loadingManager.changeState(LoadingViewManager.State.INITIAL);
loadingManager.content();
return linearLayout;
}
@Override
public boolean onMenuItemClick(MenuItem item) {
return onOptionsItemSelected(item);
}
final protected float dpToPx(float dp) {
return TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp, getResources().getDisplayMetrics());
}
public LoadingViewManager getLoadingManager() { return loadingManager; }
@Override
public void onClick(LinkSpan linkSpan, String label, String link, LinkConsumableTextView textView) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(link));
startActivity(browserIntent);
}
@Override
public void onLongClick(final LinkSpan linkSpan, final String label, final String link, final LinkConsumableTextView textView) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(link);
builder.setItems(R.array.click_link, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Object choice = ((AlertDialog)dialog).getListView().getAdapter().getItem(which);
switch(which) { //heheheheheheh
default:
case 0: // Open in Browser
FragmentBase.this.onClick(linkSpan, label, link, textView);
break;
case 1: // Copy to clipboard
ClipboardManager clipboard = (ClipboardManager) getActivity().getSystemService(Activity.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText(label, link);
clipboard.setPrimaryClip(clip);
Toast.makeText(getActivity(), R.string.copied_to_clipboard, Toast.LENGTH_SHORT).show();
break;
case 2: // Share
Intent intent = new Intent(android.content.Intent.ACTION_SEND)
.setType("text/plain")
.putExtra(Intent.EXTRA_TEXT, link);
startActivity(Intent.createChooser(intent, getResources().getString(R.string.option_share)));
break;
}
}
});
builder.show();
}
}
|
<reponame>muddessir/framework<filename>machine/qemu/sources/u-boot/test/py/tests/test_ofplatdata.py
# SPDX-License-Identifier: GPL-2.0+
# Copyright (c) 2016 Google, Inc
import pytest
import u_boot_utils as util
@pytest.mark.boardspec('sandbox')
@pytest.mark.buildconfigspec('spl_of_platdata')
def test_spl_devicetree(u_boot_console):
"""Test content of spl device-tree"""
cons = u_boot_console
dtb = cons.config.build_dir + '/spl/u-boot-spl.dtb'
fdtgrep = cons.config.build_dir + '/tools/fdtgrep'
output = util.run_and_log(cons, [fdtgrep, '-l', dtb])
assert "u-boot,dm-pre-reloc" not in output
assert "u-boot,dm-pre-proper" not in output
assert "u-boot,dm-spl" not in output
assert "u-boot,dm-tpl" not in output
assert "spl-test5" not in output
assert "spl-test6" not in output
assert "spl-test7" in output
|
export * from './buildQueryURL'
export * from './buildRepositoryURL'
export * from './defaultEndpoint'
export * as predicate from './predicate'
export * as cookie from './cookie'
export * from './types'
export * as Response from './types-response'
|
public static void sortAscending(int[] array) {
int temp;
for (int i = 0; i < array.length; i++) {
for (int j = i; j < array.length; j++) {
if (array[j] < array[i]) {
temp = array[i];
array[i] = array[j];
array[j] = temp;
}
}
}
} |
<gh_stars>0
package org.adligo.models.core.shared;
import org.adligo.models.core.shared.util.VersionedValidator;
import org.adligo.models.core.shared.util.VersionValidator;
public class OrgVersionedMutant extends OrgMutant implements I_OrgVersionedMutant {
private Integer version;
public OrgVersionedMutant() {
}
public OrgVersionedMutant(I_Org p) throws InvalidParameterException {
super(p);
version = new Integer(0);
}
public OrgVersionedMutant(I_OrgVersioned p) throws InvalidParameterException {
super(p);
version = p.getVersion();
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) throws InvalidParameterException {
VersionValidator.validate(version, this);
this.version = version;
}
public void isValid() throws ValidationException {
VersionedValidator.validate(this);
super.isValid();
}
public boolean isStored() throws ValidationException {
if (super.isStored()) {
if (version != null) {
return true;
}
}
return false;
}
}
|
#!/bin/bash
sudo apt-get update
sudo apt-get install -y \
bison \
ccache \
cmake \
curl \
flex \
git-core \
gcc \
g++ \
inetutils-ping \
krb5-kdc \
krb5-admin-server \
libapr1-dev \
libbz2-dev \
libcurl4-gnutls-dev \
libevent-dev \
libkrb5-dev \
libpam-dev \
libperl-dev \
libreadline-dev \
libssl-dev \
libxerces-c-dev \
libxml2-dev \
libyaml-dev \
libzstd-dev \
locales \
net-tools \
ninja-build \
openssh-client \
openssh-server \
openssl \
python3-dev \
python3-pip \
python3-psutil \
python3-pygresql \
python3-yaml \
zlib1g-dev
pip3 install conan
sudo tee -a /etc/sysctl.conf << EOF
kernel.shmmax = 5000000000000
kernel.shmmni = 32768
kernel.shmall = 40000000000
kernel.sem = 1000 32768000 1000 32768
kernel.msgmnb = 1048576
kernel.msgmax = 1048576
kernel.msgmni = 32768
net.core.netdev_max_backlog = 80000
net.core.rmem_default = 2097152
net.core.rmem_max = 16777216
net.core.wmem_max = 16777216
vm.overcommit_memory = 2
vm.overcommit_ratio = 95
EOF
sudo sysctl -p
sudo mkdir -p /etc/security/limits.d
sudo tee -a /etc/security/limits.d/90-greenplum.conf << EOF
* soft nofile 1048576
* hard nofile 1048576
* soft nproc 1048576
* hard nproc 1048576
EOF
ulimit -n 65536 65536
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.