text stringlengths 1 1.05M |
|---|
package dao
import (
"database/sql"
)
type A struct {
Db *sql.DB `di:"db"`
Db1 *sql.DB `di:"db"`
B *B `di:"b,prototype"`
B1 *B `di:"b,prototype"`
}
func NewA() *A {
return &A{}
}
func (p *A) Version() (string, error) {
rows, err := p.Db.Query("SELECT VERSION() as version")
if err != nil {
return "", err
}
defer rows.Close()
var version string
if rows.Next() {
if err := rows.Scan(&version); err != nil {
return "", err
}
}
if err := rows.Err(); err != nil {
return "", err
}
return version, nil
}
|
<reponame>Niccodedev/React-redux-work
export class ToDo {
static schema = {
name: 'ToDo',
primaryKey: 'id',
properties: {
id: { type: 'string' },
name: { type: 'string' },
completed: { type: 'bool', default: false }
}
};
}
|
<reponame>Neapovil/deathrecap
package com.github.neapovil.deathrecap;
import java.text.DecimalFormat;
import java.util.Collection;
import org.bukkit.Material;
import org.bukkit.attribute.Attribute;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.Damageable;
import org.bukkit.inventory.meta.PotionMeta;
import org.bukkit.plugin.java.JavaPlugin;
import org.bukkit.potion.PotionType;
import net.kyori.adventure.text.Component;
import net.kyori.adventure.text.event.HoverEvent;
import net.kyori.adventure.text.format.NamedTextColor;
import net.kyori.adventure.text.format.TextDecoration;
public final class DeathRecap extends JavaPlugin implements Listener
{
private static DeathRecap instance;
@Override
public void onEnable()
{
instance = this;
this.getServer().getPluginManager().registerEvents(this, this);
}
@Override
public void onDisable()
{
}
public static DeathRecap getInstance()
{
return instance;
}
@EventHandler
private void playerDeath(PlayerDeathEvent event)
{
if (event.getPlayer().getKiller() == null)
{
return;
}
if (!(event.getPlayer().getKiller() instanceof Player))
{
return;
}
final HoverEvent<Component> hoverevent = HoverEvent.showText(this.getRecap(event.getPlayer()));
final HoverEvent<Component> hoverevent1 = HoverEvent.showText(this.getRecap(event.getPlayer().getKiller()));
final Component built = Component.text("BATTLE RECAP ! ", NamedTextColor.GREEN).decoration(TextDecoration.BOLD, true)
.append(Component.text(event.getPlayer().getName(), NamedTextColor.LIGHT_PURPLE).decoration(TextDecoration.BOLD, false).hoverEvent(hoverevent))
.append(Component.text(" - ", NamedTextColor.GRAY))
.append(Component.text(event.getPlayer().getKiller().getName(), NamedTextColor.LIGHT_PURPLE).hoverEvent(hoverevent1));
this.getServer().broadcast(built);
}
private final String getDurability(ItemStack itemStack)
{
if (itemStack == null)
{
return "Missing";
}
final short max = itemStack.getType().getMaxDurability();
return String.valueOf(max - ((Damageable) itemStack.getItemMeta()).getDamage() + " / " + max);
}
private final PotionType getPotionType(ItemStack itemStack)
{
return ((PotionMeta) itemStack.getItemMeta()).getBasePotionData().getType();
}
private final Component getRecap(Player player)
{
final String helmet = this.getDurability(player.getInventory().getHelmet());
final String chestplate = this.getDurability(player.getInventory().getChestplate());
final String leggings = this.getDurability(player.getInventory().getLeggings());
final String boots = this.getDurability(player.getInventory().getBoots());
final Collection<? extends ItemStack> splash = player.getInventory().all(Material.SPLASH_POTION).values();
final Collection<? extends ItemStack> drink = player.getInventory().all(Material.POTION).values();
final long instantheal = splash.stream().filter(i -> this.getPotionType(i).equals(PotionType.INSTANT_HEAL)).count();
final long speed = splash.stream().filter(i -> this.getPotionType(i).equals(PotionType.SPEED)).count();
final long speed1 = drink.stream().filter(i -> this.getPotionType(i).equals(PotionType.SPEED)).count();
final long fire = splash.stream().filter(i -> this.getPotionType(i).equals(PotionType.FIRE_RESISTANCE)).count();
final long fire1 = drink.stream().filter(i -> this.getPotionType(i).equals(PotionType.FIRE_RESISTANCE)).count();
final String s = (new DecimalFormat("0.00")).format(player.getHealth());
final String s1 = (new DecimalFormat("0.00")).format(player.getAttribute(Attribute.GENERIC_MAX_HEALTH).getValue());
return Component.text(player.getName() + "'s recap", NamedTextColor.LIGHT_PURPLE)
.append(Component.text("\n\u2764 ", NamedTextColor.RED))
.append(Component.text(s, NamedTextColor.WHITE))
.append(Component.text(" / ", NamedTextColor.GRAY))
.append(Component.text("\u2764 ", NamedTextColor.RED))
.append(Component.text(s1, NamedTextColor.WHITE))
.append(Component.text("\n\n< -- Armor -- >", NamedTextColor.DARK_AQUA))
.append(Component.text("\nHelmet: " + helmet, NamedTextColor.GRAY))
.append(Component.text("\nChestplate: " + chestplate, NamedTextColor.GRAY))
.append(Component.text("\nLeggings: " + leggings, NamedTextColor.GRAY))
.append(Component.text("\nBoots: " + boots, NamedTextColor.GRAY))
.append(Component.text("\n\n< -- Potions -- >", NamedTextColor.DARK_AQUA))
.append(Component.text("\nInstant Heal: " + instantheal, NamedTextColor.GRAY))
.append(Component.text("\nSpeed: " + (speed + speed1), NamedTextColor.GRAY))
.append(Component.text("\nFire Resistance: " + (fire + fire1), NamedTextColor.GRAY));
}
}
|
#!/bin/bash
# Copyright 2017 Johns Hopkins University (Shinji Watanabe)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
. ./path.sh || exit 1;
. ./cmd.sh || exit 1;
# general configuration
backend=pytorch
stage=0 # start from 0 if you need to start from data preparation
stop_stage=100
ngpu=1 # number of gpus ("0" uses cpu, otherwise use gpu)
debugmode=1
dumpdir=dump # directory to dump full features
N=0 # number of minibatches to be used (mainly for debugging). "0" uses all minibatches.
verbose=0 # verbose option
resume= # Resume the training from snapshot
seed=1
# feature configuration
do_delta=false
# sample filtering
min_io_delta=4 # samples with `len(input) - len(output) * min_io_ratio < min_io_delta` will be removed.
# config files
preprocess_config=conf/no_preprocess.yaml # use conf/specaug.yaml for data augmentation
train_config=conf/train.yaml
lm_config=conf/lm.yaml
decode_config=conf/decode.yaml
# rnnlm related
skip_lm_training=false # for only using end-to-end ASR model without LM
use_wordlm=true # false means to train/use a character LM
lm_vocabsize=65000 # effective only for word LMs
lm_resume= # specify a snapshot file to resume LM training
lmtag= # tag for managing LMs
# decoding parameter
recog_model=model.acc.best # set a model to be used for decoding: 'model.acc.best' or 'model.loss.best'
# model average realted (only for transformer)
n_average=10 # the number of ASR models to be averaged
use_valbest_average=false # if true, the validation `n_average`-best ASR models will be averaged.
# if false, the last `n_average` ASR models will be averaged.
# data
wsj0=/export/corpora5/LDC/LDC93S6B
wsj1=/export/corpora5/LDC/LDC94S13B
# exp tag
tag="" # tag for managing experiments.
. utils/parse_options.sh || exit 1;
# Set bash to 'debug' mode, it will exit on :
# -e 'error', -u 'undefined variable', -o ... 'error in pipeline', -x 'print commands',
set -e
set -u
set -o pipefail
train_set=train_si284
train_dev=test_dev93
train_test=test_eval92
recog_set="test_dev93 test_eval92"
if [ ${stage} -le 0 ] && [ ${stop_stage} -ge 0 ]; then
### Task dependent. You have to make data the following preparation part by yourself.
### But you can utilize Kaldi recipes in most cases
echo "stage 0: Data preparation"
local/wsj_data_prep.sh ${wsj0}/??-{?,??}.? ${wsj1}/??-{?,??}.?
local/wsj_format_data.sh
fi
feat_tr_dir=${dumpdir}/${train_set}/delta${do_delta}; mkdir -p ${feat_tr_dir}
feat_dt_dir=${dumpdir}/${train_dev}/delta${do_delta}; mkdir -p ${feat_dt_dir}
if [ ${stage} -le 1 ] && [ ${stop_stage} -ge 1 ]; then
### Task dependent. You have to design training and dev sets by yourself.
### But you can utilize Kaldi recipes in most cases
echo "stage 1: Feature Generation"
fbankdir=fbank
# Generate the fbank features; by default 80-dimensional fbanks with pitch on each frame
for x in train_si284 test_dev93 test_eval92; do
steps/make_fbank_pitch.sh --cmd "$train_cmd" --nj 10 --write_utt2num_frames true \
data/${x} exp/make_fbank/${x} ${fbankdir}
utils/fix_data_dir.sh data/${x}
done
# compute global CMVN
compute-cmvn-stats scp:data/${train_set}/feats.scp data/${train_set}/cmvn.ark
# dump features for training
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d ${feat_tr_dir}/storage ]; then
utils/create_split_dir.pl \
/export/b{10,11,12,13}/${USER}/espnet-data/egs/wsj/asr1/dump/${train_set}/delta${do_delta}/storage \
${feat_tr_dir}/storage
fi
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d ${feat_dt_dir}/storage ]; then
utils/create_split_dir.pl \
/export/b{10,11,12,13}/${USER}/espnet-data/egs/wsj/asr1/dump/${train_dev}/delta${do_delta}/storage \
${feat_dt_dir}/storage
fi
dump.sh --cmd "$train_cmd" --nj 32 --do_delta ${do_delta} \
data/${train_set}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/train ${feat_tr_dir}
dump.sh --cmd "$train_cmd" --nj 4 --do_delta ${do_delta} \
data/${train_dev}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/dev ${feat_dt_dir}
for rtask in ${recog_set}; do
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}; mkdir -p ${feat_recog_dir}
dump.sh --cmd "$train_cmd" --nj 4 --do_delta ${do_delta} \
data/${rtask}/feats.scp data/${train_set}/cmvn.ark exp/dump_feats/recog/${rtask} \
${feat_recog_dir}
done
fi
dict=data/lang_1char/${train_set}_units.txt
nlsyms=data/lang_1char/non_lang_syms.txt
echo "dictionary: ${dict}"
if [ ${stage} -le 2 ] && [ ${stop_stage} -ge 2 ]; then
### Task dependent. You have to check non-linguistic symbols used in the corpus.
echo "stage 2: Dictionary and Json Data Preparation"
mkdir -p data/lang_1char/
echo "make a non-linguistic symbol list"
cut -f 2- data/${train_set}/text | tr " " "\n" | sort | uniq | grep "<" > ${nlsyms}
cat ${nlsyms}
echo "make a dictionary"
echo "<unk> 1" > ${dict} # <unk> must be 1, 0 will be used for "blank" in CTC
text2token.py -s 1 -n 1 -l ${nlsyms} data/${train_set}/text | cut -f 2- -d" " | tr " " "\n" \
| sort | uniq | grep -v -e '^\s*$' | awk '{print $0 " " NR+1}' >> ${dict}
wc -l ${dict}
echo "make json files"
data2json.sh --feat ${feat_tr_dir}/feats.scp --nlsyms ${nlsyms} \
data/${train_set} ${dict} > ${feat_tr_dir}/data.json
data2json.sh --feat ${feat_dt_dir}/feats.scp --nlsyms ${nlsyms} \
data/${train_dev} ${dict} > ${feat_dt_dir}/data.json
for rtask in ${recog_set}; do
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}
data2json.sh --feat ${feat_recog_dir}/feats.scp \
--nlsyms ${nlsyms} data/${rtask} ${dict} > ${feat_recog_dir}/data.json
done
### Filter out short samples which lead to `loss_ctc=inf` during training
### with the specified configuration.
# Samples satisfying `len(input) - len(output) * min_io_ratio < min_io_delta` will be pruned.
local/filtering_samples.py \
--config ${train_config} \
--preprocess-conf ${preprocess_config} \
--data-json ${feat_tr_dir}/data.json \
--mode-subsample "asr" \
${min_io_delta:+--min-io-delta $min_io_delta} \
--output-json-path ${feat_tr_dir}/data.json
fi
# It takes a few days. If you just want to end-to-end ASR without LM,
# you can skip this by setting skip_lm_training=true
if [ ${stage} -le 3 ] && [ ${stop_stage} -ge 3 ] && ! ${skip_lm_training}; then
echo "stage 3: LM Preparation"
if [ -z ${lmtag} ]; then
lmtag=$(basename ${lm_config%.*})
if [ ${use_wordlm} = true ]; then
lmtag=${lmtag}_word${lm_vocabsize}
fi
fi
lmexpname=train_rnnlm_${backend}_${lmtag}
lmexpdir=exp/${lmexpname}
mkdir -p ${lmexpdir}
if [ ${use_wordlm} = true ]; then
lmdatadir=data/local/wordlm_train
lmdict=${lmdatadir}/wordlist_${lm_vocabsize}.txt
mkdir -p ${lmdatadir}
cut -f 2- -d" " data/${train_set}/text > ${lmdatadir}/train_trans.txt
zcat ${wsj1}/13-32.1/wsj1/doc/lng_modl/lm_train/np_data/{87,88,89}/*.z \
| grep -v "<" | tr "[:lower:]" "[:upper:]" > ${lmdatadir}/train_others.txt
cut -f 2- -d" " data/${train_dev}/text > ${lmdatadir}/valid.txt
cut -f 2- -d" " data/${train_test}/text > ${lmdatadir}/test.txt
cat ${lmdatadir}/train_trans.txt ${lmdatadir}/train_others.txt > ${lmdatadir}/train.txt
text2vocabulary.py -s ${lm_vocabsize} -o ${lmdict} ${lmdatadir}/train.txt
else
lmdatadir=data/local/lm_train
lmdict=${dict}
mkdir -p ${lmdatadir}
text2token.py -s 1 -n 1 -l ${nlsyms} data/${train_set}/text \
| cut -f 2- -d" " > ${lmdatadir}/train_trans.txt
zcat ${wsj1}/13-32.1/wsj1/doc/lng_modl/lm_train/np_data/{87,88,89}/*.z \
| grep -v "<" | tr "[:lower:]" "[:upper:]" \
| text2token.py -n 1 | cut -f 2- -d" " > ${lmdatadir}/train_others.txt
text2token.py -s 1 -n 1 -l ${nlsyms} data/${train_dev}/text \
| cut -f 2- -d" " > ${lmdatadir}/valid.txt
text2token.py -s 1 -n 1 -l ${nlsyms} data/${train_test}/text \
| cut -f 2- -d" " > ${lmdatadir}/test.txt
cat ${lmdatadir}/train_trans.txt ${lmdatadir}/train_others.txt > ${lmdatadir}/train.txt
fi
${cuda_cmd} --gpu ${ngpu} ${lmexpdir}/train.log \
lm_train.py \
--config ${lm_config} \
--ngpu ${ngpu} \
--backend ${backend} \
--verbose 1 \
--outdir ${lmexpdir} \
--tensorboard-dir tensorboard/${lmexpname} \
--train-label ${lmdatadir}/train.txt \
--valid-label ${lmdatadir}/valid.txt \
--test-label ${lmdatadir}/test.txt \
--resume ${lm_resume} \
--dict ${lmdict}
fi
if [ -z ${tag} ]; then
expname=${train_set}_${backend}_$(basename ${train_config%.*})_$(basename ${preprocess_config%.*})
if ${do_delta}; then
expname=${expname}_delta
fi
else
expname=${train_set}_${backend}_${tag}
fi
expdir=exp/${expname}
mkdir -p ${expdir}
if [ ${stage} -le 4 ] && [ ${stop_stage} -ge 4 ]; then
echo "stage 4: Network Training"
${cuda_cmd} --gpu ${ngpu} ${expdir}/train.log \
asr_train.py \
--config ${train_config} \
--preprocess-conf ${preprocess_config} \
--ngpu ${ngpu} \
--backend ${backend} \
--outdir ${expdir}/results \
--tensorboard-dir tensorboard/${expname} \
--debugmode ${debugmode} \
--dict ${dict} \
--debugdir ${expdir} \
--minibatches ${N} \
--verbose ${verbose} \
--resume ${resume} \
--seed ${seed} \
--train-json ${feat_tr_dir}/data.json \
--valid-json ${feat_dt_dir}/data.json
fi
if [ ${stage} -le 5 ] && [ ${stop_stage} -ge 5 ]; then
echo "stage 5: Decoding"
nj=32
if [[ $(get_yaml.py ${train_config} model-module) = *transformer* ]] || \
[[ $(get_yaml.py ${train_config} model-module) = *conformer* ]] || \
[[ $(get_yaml.py ${train_config} model-module) = *maskctc* ]] || \
[[ $(get_yaml.py ${train_config} etype) = transformer ]] || \
[[ $(get_yaml.py ${train_config} dtype) = transformer ]]; then
average_opts=
if ${use_valbest_average}; then
recog_model=model.val${n_average}.avg.best
average_opts="--log ${expdir}/results/log"
else
recog_model=model.last${n_average}.avg.best
fi
average_checkpoints.py --backend ${backend} \
--snapshots ${expdir}/results/snapshot.ep.* \
--out ${expdir}/results/${recog_model} \
--num ${n_average} \
${average_opts}
fi
pids=() # initialize pids
for rtask in ${recog_set}; do
(
recog_opts=
if ${skip_lm_training}; then
if [ -z ${lmtag} ]; then
lmtag="nolm"
fi
else
if [ ${use_wordlm} = true ]; then
recog_opts="--word-rnnlm ${lmexpdir}/rnnlm.model.best"
else
recog_opts="--rnnlm ${lmexpdir}/rnnlm.model.best"
fi
fi
decode_dir=decode_${rtask}_$(basename ${decode_config%.*})_${lmtag}
feat_recog_dir=${dumpdir}/${rtask}/delta${do_delta}
# split data
splitjson.py --parts ${nj} ${feat_recog_dir}/data.json
#### use CPU for decoding
ngpu=0
${decode_cmd} JOB=1:${nj} ${expdir}/${decode_dir}/log/decode.JOB.log \
asr_recog.py \
--config ${decode_config} \
--ngpu ${ngpu} \
--backend ${backend} \
--recog-json ${feat_recog_dir}/split${nj}utt/data.JOB.json \
--result-label ${expdir}/${decode_dir}/data.JOB.json \
--model ${expdir}/results/${recog_model} \
${recog_opts}
score_sclite.sh --wer true --nlsyms ${nlsyms} ${expdir}/${decode_dir} ${dict}
) &
pids+=($!) # store background pids
done
i=0; for pid in "${pids[@]}"; do wait ${pid} || ((++i)); done
[ ${i} -gt 0 ] && echo "$0: ${i} background jobs are failed." && false
echo "Finished"
fi
|
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//using NUnit.Framework;
package com.quantconnect.lean.tests.common.securities;
[TestFixture]
public class SecurityTests {
[Test]
public void SimplePropertiesTests() {
exchangeHours = SecurityExchangeHours.AlwaysOpen(TimeZones.NewYork);
config = CreateTradeBarConfig();
security = new Security(exchangeHours, config, new Cash(CashBook.AccountCurrency, 0, 1m), SymbolProperties.GetDefault(CashBook.AccountCurrency));
Assert.AreEqual(config, security.Subscriptions.Single());
Assert.AreEqual(config.Symbol, security.Symbol);
Assert.AreEqual(config.SecurityType, security.Type);
Assert.AreEqual(config.Resolution, security.Resolution);
Assert.AreEqual(config.FillDataForward, security.IsFillDataForward);
Assert.AreEqual(exchangeHours, security.Exchange.Hours);
}
[Test]
public void ConstructorTests() {
security = GetSecurity();
Assert.IsNotNull(security.Exchange);
Assert.IsInstanceOf<SecurityExchange>(security.Exchange);
Assert.IsNotNull(security.Cache);
Assert.IsInstanceOf<SecurityCache>(security.Cache);
Assert.IsNotNull(security.PortfolioModel);
Assert.IsInstanceOf<SecurityPortfolioModel>(security.PortfolioModel);
Assert.IsNotNull(security.FillModel);
Assert.IsInstanceOf<ImmediateFillModel>(security.FillModel);
Assert.IsNotNull(security.PortfolioModel);
Assert.IsInstanceOf<IntegereractiveBrokersFeeModel>(security.FeeModel);
Assert.IsNotNull(security.SlippageModel);
Assert.IsInstanceOf<SpreadSlippageModel>(security.SlippageModel);
Assert.IsNotNull(security.SettlementModel);
Assert.IsInstanceOf<ImmediateSettlementModel>(security.SettlementModel);
Assert.IsNotNull(security.MarginModel);
Assert.IsInstanceOf<SecurityMarginModel>(security.MarginModel);
Assert.IsNotNull(security.DataFilter);
Assert.IsInstanceOf<SecurityDataFilter>(security.DataFilter);
}
[Test]
public void HoldingsTests() {
security = GetSecurity();
// Long 100 stocks test
security.Holdings.SetHoldings(100m, 100);
Assert.AreEqual(100m, security.Holdings.AveragePrice);
Assert.AreEqual(100, security.Holdings.Quantity);
Assert.IsTrue(security.HoldStock);
Assert.IsTrue(security.Invested);
Assert.IsTrue(security.Holdings.IsLong);
Assert.IsFalse(security.Holdings.IsShort);
// Short 100 stocks test
security.Holdings.SetHoldings(100m, -100);
Assert.AreEqual(100m, security.Holdings.AveragePrice);
Assert.AreEqual(-100, security.Holdings.Quantity);
Assert.IsTrue(security.HoldStock);
Assert.IsTrue(security.Invested);
Assert.IsFalse(security.Holdings.IsLong);
Assert.IsTrue(security.Holdings.IsShort);
// Flat test
security.Holdings.SetHoldings(100m, 0);
Assert.AreEqual(100m, security.Holdings.AveragePrice);
Assert.AreEqual(0, security.Holdings.Quantity);
Assert.IsFalse(security.HoldStock);
Assert.IsFalse(security.Invested);
Assert.IsFalse(security.Holdings.IsLong);
Assert.IsFalse(security.Holdings.IsShort);
}
[Test]
public void UpdatingSecurityPriceTests() {
security = GetSecurity();
// Update securuty price with a TradeBar
security.SetMarketPrice(new TradeBar(DateTime.Now, Symbols.SPY, 101m, 103m, 100m, 102m, 100000));
Assert.AreEqual(101m, security.Open);
Assert.AreEqual(103m, security.High);
Assert.AreEqual(100m, security.Low);
Assert.AreEqual(102m, security.Close);
Assert.AreEqual(100000, security.Volume);
// High/Close property is only modified by IBar instances
security.SetMarketPrice(new Tick(DateTime.Now, Symbols.SPY, 104m, 104m, 104m));
Assert.AreEqual(103m, security.High);
Assert.AreEqual(102m, security.Close);
Assert.AreEqual(104m, security.Price);
// Low/Close property is only modified by IBar instances
security.SetMarketPrice(new Tick(DateTime.Now, Symbols.SPY, 99m, 99m, 99m));
Assert.AreEqual(100m, security.Low);
Assert.AreEqual(102m, security.Close);
Assert.AreEqual(99m, security.Price);
}
[Test]
public void SetLeverageTest() {
security = GetSecurity();
security.SetLeverage(4m);
Assert.AreEqual(4m,security.Leverage);
security.SetLeverage(5m);
Assert.AreEqual(5m, security.Leverage);
Assert.That(() -> security.SetLeverage(0.1m),
Throws.TypeOf<ArgumentException>().With.Message.EqualTo( "Leverage must be greater than or equal to 1."));
}
private Security GetSecurity() {
return new Security(SecurityExchangeHours.AlwaysOpen(TimeZones.NewYork), CreateTradeBarConfig(), new Cash(CashBook.AccountCurrency, 0, 1m), SymbolProperties.GetDefault(CashBook.AccountCurrency));
}
private static SubscriptionDataConfig CreateTradeBarConfig() {
return new SubscriptionDataConfig(typeof(TradeBar), Symbols.SPY, Resolution.Minute, TimeZones.NewYork, TimeZones.NewYork, true, true, false);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.policy.followthesun;
import static org.testng.Assert.assertEquals;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.Test;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.Entities;
import brooklyn.entity.basic.EntityLocal;
import brooklyn.location.Location;
import brooklyn.location.basic.SimulatedLocation;
import brooklyn.policy.loadbalancing.BalanceableContainer;
import brooklyn.policy.loadbalancing.MockContainerEntity;
import brooklyn.policy.loadbalancing.MockItemEntity;
import brooklyn.policy.loadbalancing.MockItemEntityImpl;
import brooklyn.policy.loadbalancing.Movable;
import brooklyn.test.Asserts;
import brooklyn.util.collections.MutableMap;
import com.google.common.base.Function;
import com.google.common.base.Predicates;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
public class FollowTheSunPolicySoakTest extends AbstractFollowTheSunPolicyTest {
private static final Logger LOG = LoggerFactory.getLogger(FollowTheSunPolicySoakTest.class);
private static final long TIMEOUT_MS = 10*1000;
@Test
public void testFollowTheSunQuickTest() {
RunConfig config = new RunConfig();
config.numCycles = 1;
config.numLocations=3;
config.numContainersPerLocation = 5;
config.numLockedItemsPerLocation = 2;
config.numMovableItems = 10;
runFollowTheSunSoakTest(config);
}
@Test
public void testLoadBalancingManyItemsQuickTest() {
RunConfig config = new RunConfig();
config.numCycles = 1;
config.numLocations=2;
config.numContainersPerLocation = 3;
config.numLockedItemsPerLocation = 2;
config.numMovableItems = 10;
config.numContainerStopsPerCycle = 1;
config.numItemStopsPerCycle = 1;
runFollowTheSunSoakTest(config);
}
@Test(groups={"Integration"}) // takes ~2s
public void testLoadBalancingManyItemsNotTooLongTest() {
RunConfig config = new RunConfig();
config.numCycles = 1;
config.numLocations=3;
config.numContainersPerLocation = 5;
config.numLockedItemsPerLocation = 2;
config.numMovableItems = 500;
config.numContainerStopsPerCycle = 1;
config.numItemStopsPerCycle = 1;
runFollowTheSunSoakTest(config);
}
@Test(groups={"Integration","Acceptance"}) // integration group, because it's slow to run many cycles
public void testLoadBalancingSoakTest() {
RunConfig config = new RunConfig();
config.numCycles = 100;
config.numLocations=3;
config.numContainersPerLocation = 5;
config.numLockedItemsPerLocation = 2;
config.numMovableItems = 10;
runFollowTheSunSoakTest(config);
}
@Test(groups={"Integration","Acceptance"}) // integration group, because it's slow to run many cycles
public void testLoadBalancingManyItemsSoakTest() {
RunConfig config = new RunConfig();
config.numCycles = 100;
config.numLocations=3;
config.numContainersPerLocation = 5;
config.numLockedItemsPerLocation = 2;
config.numMovableItems = 100;
config.numContainerStopsPerCycle = 3;
config.numItemStopsPerCycle = 10;
runFollowTheSunSoakTest(config);
}
@Test(groups={"Integration","Acceptance"}) // integration group, because it's slow to run many cycles
public void testLoadBalancingManyManyItemsTest() {
RunConfig config = new RunConfig();
config.numCycles = 1;
config.numLocations=10;
config.numContainersPerLocation = 5;
config.numLockedItemsPerLocation = 100;
config.numMovableItems = 1000;
config.numContainerStopsPerCycle = 0;
config.numItemStopsPerCycle = 0;
config.timeout_ms = 30*1000;
config.verbose = false;
runFollowTheSunSoakTest(config);
}
private void runFollowTheSunSoakTest(RunConfig config) {
int numCycles = config.numCycles;
int numLocations = config.numLocations;
int numContainersPerLocation = config.numContainersPerLocation;
int numLockedItemsPerLocation = config.numLockedItemsPerLocation;
int numMovableItems = config.numMovableItems;
int numContainerStopsPerCycle = config.numContainerStopsPerCycle;
int numItemStopsPerCycle = config.numItemStopsPerCycle;
long timeout_ms = config.timeout_ms;
final boolean verbose = config.verbose;
MockItemEntityImpl.totalMoveCount.set(0);
List<Location> locations = new ArrayList<Location>();
Multimap<Location,MockContainerEntity> containers = HashMultimap.<Location,MockContainerEntity>create();
Multimap<Location,MockItemEntity> lockedItems = HashMultimap.<Location,MockItemEntity>create();
final List<MockItemEntity> movableItems = new ArrayList<MockItemEntity>();
for (int i = 1; i <= numLocations; i++) {
String locName = "loc"+i;
Location loc = new SimulatedLocation(MutableMap.of("name",locName));
locations.add(loc);
for (int j = 1; j <= numContainersPerLocation; j++) {
MockContainerEntity container = newContainer(app, loc, "container-"+locName+"-"+j);
containers.put(loc, container);
}
for (int j = 1; j <= numLockedItemsPerLocation; j++) {
MockContainerEntity container = Iterables.get(containers.get(loc), j%numContainersPerLocation);
MockItemEntity item = newLockedItem(app, container, "item-locked-"+locName+"-"+j);
lockedItems.put(loc, item);
}
}
for (int i = 1; i <= numMovableItems; i++) {
MockContainerEntity container = Iterables.get(containers.values(), i%containers.size());
MockItemEntity item = newItem(app, container, "item-movable"+i);
movableItems.add(item);
}
for (int i = 1; i <= numCycles; i++) {
LOG.info("{}: cycle {}", FollowTheSunPolicySoakTest.class.getSimpleName(), i);
// Stop movable items, and start others
for (int j = 1; j <= numItemStopsPerCycle; j++) {
int itemIndex = random.nextInt(numMovableItems);
MockItemEntity itemToStop = movableItems.get(itemIndex);
itemToStop.stop();
LOG.debug("Unmanaging item {}", itemToStop);
Entities.unmanage(itemToStop);
movableItems.set(itemIndex, newItem(app, Iterables.get(containers.values(), 0), "item-movable"+itemIndex));
}
// Choose a location to be busiest
int locIndex = random.nextInt(numLocations);
final Location busiestLocation = locations.get(locIndex);
// Repartition the load across the items
for (int j = 0; j < numMovableItems; j++) {
MockItemEntity item = movableItems.get(j);
Map<Entity, Double> workrates = Maps.newLinkedHashMap();
for (Map.Entry<Location,MockItemEntity> entry : lockedItems.entries()) {
Location location = entry.getKey();
MockItemEntity source = entry.getValue();
double baseWorkrate = (location == busiestLocation ? 1000 : 100);
double jitter = 10;
double jitteredWorkrate = Math.max(0, baseWorkrate + (random.nextDouble()*jitter*2 - jitter));
workrates.put(source, jitteredWorkrate);
}
((EntityLocal)item).setAttribute(MockItemEntity.ITEM_USAGE_METRIC, workrates);
}
// Stop containers, and start others
// This offloads the "immovable" items to other containers in the same location!
for (int j = 1; j <= numContainerStopsPerCycle; j++) {
int containerIndex = random.nextInt(containers.size());
MockContainerEntity containerToStop = Iterables.get(containers.values(), containerIndex);
Location location = Iterables.get(containerToStop.getLocations(), 0);
MockContainerEntity otherContainerInLocation = Iterables.find(containers.get(location), Predicates.not(Predicates.equalTo(containerToStop)), null);
containerToStop.offloadAndStop(otherContainerInLocation);
LOG.debug("Unmanaging container {}", containerToStop);
Entities.unmanage(containerToStop);
containers.remove(location, containerToStop);
MockContainerEntity containerToAdd = newContainer(app, location, "container-"+location.getDisplayName()+"-new."+i+"."+j);
containers.put(location, containerToAdd);
}
// Assert that the items all end up in the location with maximum load-generation
Asserts.succeedsEventually(MutableMap.of("timeout", timeout_ms), new Runnable() {
public void run() {
Iterable<Location> itemLocs = Iterables.transform(movableItems, new Function<MockItemEntity, Location>() {
public Location apply(MockItemEntity input) {
BalanceableContainer<?> container = input.getAttribute(Movable.CONTAINER);
Collection<Location> locs = (container != null) ? container.getLocations(): null;
return (locs != null && locs.size() > 0) ? Iterables.get(locs, 0) : null;
}});
Iterable<String> itemLocNames = Iterables.transform(itemLocs, new Function<Location, String>() {
public String apply(Location input) {
return (input != null) ? input.getDisplayName() : null;
}});
String errMsg;
if (verbose) {
errMsg = verboseDumpToString()+"; itemLocs="+itemLocNames;
} else {
Set<String> locNamesInUse = Sets.newLinkedHashSet(itemLocNames);
errMsg = "locsInUse="+locNamesInUse+"; totalMoves="+MockItemEntityImpl.totalMoveCount;
}
assertEquals(ImmutableList.copyOf(itemLocs), Collections.nCopies(movableItems.size(), busiestLocation), errMsg);
}});
}
}
static class RunConfig {
int numCycles = 1;
int numLocations = 3;
int numContainersPerLocation = 5;
int numLockedItemsPerLocation = 5;
int numMovableItems = 5;
int numContainerStopsPerCycle = 0;
int numItemStopsPerCycle = 0;
long timeout_ms = TIMEOUT_MS;
boolean verbose = true;
}
}
|
def extract_code_info(code_snippet):
# Split the code snippet based on the delimiters
parts = code_snippet.split("<")
# Extract the repository name, GitHub username, and stars range
repository_name = parts[1].split(">")[1]
github_username = parts[2].split("/")[0]
stars_range = parts[2].split(">")[1]
# Construct and return the dictionary with the extracted information
return {
"repository_name": repository_name,
"github_username": github_username,
"stars_range": stars_range
}
# Test the function with the given code snippet
code_snippet = "<reponame>xxao/pero<gh_stars>10-100"
result = extract_code_info(code_snippet)
print(result) # Output: {'repository_name': 'xxao/pero', 'github_username': 'reponame', 'stars_range': '10-100'} |
package com.github.yupc.example;
import com.github.yupc.example.BaseExample.Criterion;
import java.util.ArrayList;
import java.util.List;
/**
* @author yupc
* @createTime 2017-12-08 11:36
*/
public abstract class BaseGeneratedCriteria {
protected List<Criterion> criteria;
public BaseGeneratedCriteria() {
super();
criteria = new ArrayList<BaseExample.Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null || condition.isEmpty()) {
return;
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
return;
}
if (value instanceof String && ((String) value).isEmpty()) {
return;
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
return;
}
if (value1 instanceof String && ((String) value1).isEmpty()) {
return;
}
if (value2 instanceof String && ((String) value2).isEmpty()) {
return;
}
criteria.add(new Criterion(condition, value1, value2));
}
}
|
<reponame>NYCMOTI/open-bid<gh_stars>10-100
require 'rails_helper'
describe Admin::OpenListItem do
describe 'bid_count' do
it 'should return the total number of bids when the auction has bids' do
auction = create(:auction, :with_bids)
view_model = Admin::OpenListItem.new(auction)
expect(view_model.bid_count).to eq(auction.bids.count)
end
it 'should return N/A when the auction has no bids' do
auction = create(:auction)
view_model = Admin::OpenListItem.new(auction)
expect(view_model.bid_count).to eq('N/A')
end
end
describe 'lowest_bid_amount' do
it 'should return the lowest bid when there is one' do
auction = create(:auction, :sealed_bid, :with_bids)
bid = auction.bids.last
view_model = Admin::OpenListItem.new(auction)
expect(view_model.lowest_bid_amount).to eq(Currency.new(bid.amount).to_s)
end
it 'should return N/A when the auction has no bids' do
auction = create(:auction, :sealed_bid)
view_model = Admin::OpenListItem.new(auction)
expect(view_model.lowest_bid_amount).to eq('N/A')
end
end
describe 'current_winner' do
it 'should return a name when the user has a name' do
auction = create(:auction, :sealed_bid, :with_bids)
bid = auction.bids.last
user = bid.bidder
view_model = Admin::OpenListItem.new(auction)
expect(view_model.current_winner).to eq(user.name)
end
it 'should return a github_login when the user has no name' do
auction = create(:auction, :sealed_bid, :with_bids)
bid = auction.bids.last
user = bid.bidder
user.update_attribute(:name, '')
auction = Auction.find(auction.id)
view_model = Admin::OpenListItem.new(auction)
expect(view_model.current_winner).to eq(user.github_login)
end
it 'should return N/A if there are no bids' do
auction = create(:auction, :sealed_bid)
view_model = Admin::OpenListItem.new(auction)
expect(view_model.current_winner).to eq('N/A')
end
end
end
|
package com.codbuzz.springguru;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class SpringGuruApplication {
public static void main(String[] args) {
SpringApplication.run(SpringGuruApplication.class, args);
}
}
|
package admissionregistration
import (
"bytes"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"encoding/pem"
)
// KeyPair represents a public/private key pair
type KeyPair struct {
PublicKey string
PrivateKey string
key *rsa.PrivateKey
}
// Key returns the RSA private key for this private key pair
func (keyPair *KeyPair) Key() *rsa.PrivateKey {
return keyPair.key
}
func newPrivateKey(keyBitSize int) (*KeyPair, error) {
key, err := rsa.GenerateKey(rand.Reader, keyBitSize)
if err != nil {
return nil, err
}
publicKey, err := x509.MarshalPKIXPublicKey(&key.PublicKey)
if err != nil {
return nil, err
}
publicKeyPEM := new(bytes.Buffer)
err = pem.Encode(publicKeyPEM, &pem.Block{
Type: "PUBLIC KEY",
Bytes: publicKey,
})
if err != nil {
return nil, err
}
privateKeyPEM := new(bytes.Buffer)
err = pem.Encode(privateKeyPEM, &pem.Block{
Type: "RSA PRIVATE KEY",
Bytes: x509.MarshalPKCS1PrivateKey(key),
})
if err != nil {
return nil, err
}
return &KeyPair{
PublicKey: publicKeyPEM.String(),
PrivateKey: privateKeyPEM.String(),
key: key,
}, nil
}
|
<gh_stars>0
/*
* Copyright 2015 Textocat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.textocat.textokit.morph.lemmatizer.util;
import org.apache.commons.io.FileUtils;
import org.apache.uima.analysis_engine.AnalysisEngineDescription;
import org.apache.uima.resource.ResourceInitializationException;
import org.xml.sax.SAXException;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngineDescription;
/**
* @author <NAME>
*/
public class GenerateNormalizedTextWriterDescriptor {
public static void main(String[] args) throws ResourceInitializationException, IOException, SAXException {
AnalysisEngineDescription anDesc = createEngineDescription(NormalizedTextWriter.class);
try (FileOutputStream out = FileUtils.openOutputStream(new File(
"src/main/resources/" +
NormalizedTextWriter.class.getName().replace('.', '/') + ".xml"))) {
anDesc.toXML(new BufferedOutputStream(out));
}
}
}
|
const argv = require('yargs').argv
const shelljs = require('shelljs')
const {promisify} = require('util')
const fs = require('fs')
const {pkg} = argv
process.nextTick(async () => {
if (!!pkg) {
shelljs.cd(`packages/${pkg}`)
shelljs.exec(`rm -f *.js`)
shelljs.exec(`babel src -d .`)
shelljs.exec(`npm publish --access=public`)
shelljs.exec(`rm -f *.js`)
}
})
|
docker node ps c1n1 c1n2 c1n3 c1n4 c1n5 |
#!/bin/bash
LUASOCKET_C="deps/luasocket/*.c -D LUASOCKET_C -lws2_32";
if [[ "$*" == "nosocket" ]]; then
LUASOCKET_C="";
fi
mkdir -p bin
i686-w64-mingw32-gcc -v -ffunction-sections -fdata-sections -Xlinker --gc-sections -Os -s -Ideps/lua-5.1.5 -Ideps/luasocket main.c deps/lua-5.1.5/*.c $LUASOCKET_C && python2 python/hack.py
|
#!/bin/sh
arr1=(1 2 3 4)
arr2=(
1
2
3
4
)
arr3[0]=1
arr3[1]=2
arr3[2]=3
arr3[3]=4
echo 'arr1内容:' ${arr1[@]}
echo 'arr2内容:' ${arr2[@]}
echo 'arr3内容:' ${arr3[@]}
echo 'length:' ${#arr1[@]} |
describe('Data List Demo Test', () => {
it('Navigate to demo section', () => {
cy.visit('http://localhost:3000/data-list-demo-nav-link');
});
it('Verify rows selectable', () => {
cy.get('#row1.pf-m-selectable').should('exist');
cy.get('#row2.pf-m-selectable').should('exist');
cy.get('#row1.pf-m-selected').should('not.exist');
cy.get('#row2.pf-m-selected').should('not.exist');
cy.get('#row1').click();
cy.get('#row1.pf-m-selected').should('exist');
cy.get('#row2.pf-m-selected').should('not.exist');
cy.get('#row2').click();
cy.get('#row1.pf-m-selected').should('not.exist');
cy.get('#row2.pf-m-selected').should('exist');
});
});
describe('Data List Draggable Demo Test', () => {
it('Navigate to demo section', () => {
cy.visit('http://localhost:3000/data-list-draggable-demo-nav-link');
});
it('Verify drag', () => {
cy.get('#data1').contains('Item 1');
cy.get('#drag1').type(' ');
cy.get('#drag1').type('{downarrow}');
cy.get('#data1').should('have.class', 'pf-m-ghost-row');
cy.get('#drag1').type('{downarrow}');
cy.get('#drag1').type('{enter}');
cy.get('#data1').should('not.have.class', 'pf-m-ghost-row');
});
});
|
from torch.utils.data import DataLoader
import importlib
from tqdm import tqdm
import torch.backends.cudnn as cudnn
from utils.utils import *
from utils.utils_datasets import TrainSetDataLoader, MultiTestSetDataLoader
from collections import OrderedDict
import imageio
def main(args):
''' Create Dir for Save'''
log_dir, checkpoints_dir, val_dir = create_dir(args)
''' Logger '''
logger = Logger(log_dir, args)
''' CPU or Cuda'''
device = torch.device(args.device)
if 'cuda' in args.device:
torch.cuda.set_device(device)
''' DATA Training LOADING '''
logger.log_string('\nLoad Training Dataset ...')
train_Dataset = TrainSetDataLoader(args)
logger.log_string("The number of training data is: %d" % len(train_Dataset))
train_loader = torch.utils.data.DataLoader(dataset=train_Dataset, num_workers=args.num_workers,
batch_size=args.batch_size, shuffle=True,)
''' DATA Validation LOADING '''
logger.log_string('\nLoad Validation Dataset ...')
test_Names, test_Loaders, length_of_tests = MultiTestSetDataLoader(args)
logger.log_string("The number of validation data is: %d" % length_of_tests)
''' MODEL LOADING '''
logger.log_string('\nModel Initial ...')
MODEL_PATH = 'model.' + args.task + '.' + args.model_name
MODEL = importlib.import_module(MODEL_PATH)
net = MODEL.get_model(args)
''' Load Pre-Trained PTH '''
if args.use_pre_ckpt == False:
net.apply(MODEL.weights_init)
start_epoch = 0
logger.log_string('Do not use pre-trained model!')
else:
try:
ckpt_path = args.path_pre_pth
checkpoint = torch.load(ckpt_path, map_location='cpu')
start_epoch = checkpoint['epoch']
try:
new_state_dict = OrderedDict()
for k, v in checkpoint['state_dict'].items():
name = 'module.' + k # add `module.`
new_state_dict[name] = v
# load params
net.load_state_dict(new_state_dict)
logger.log_string('Use pretrain model!')
except:
new_state_dict = OrderedDict()
for k, v in checkpoint['state_dict'].items():
new_state_dict[k] = v
# load params
net.load_state_dict(new_state_dict)
logger.log_string('Use pretrain model!')
except:
net = MODEL.get_model(args)
net.apply(MODEL.weights_init)
start_epoch = 0
logger.log_string('No existing model, starting training from scratch...')
pass
pass
net = net.to(device)
cudnn.benchmark = True
''' Print Parameters '''
logger.log_string('PARAMETER ...')
logger.log_string(args)
''' LOSS LOADING '''
criterion = MODEL.get_loss(args).to(device)
''' Optimizer '''
optimizer = torch.optim.Adam(
[paras for paras in net.parameters() if paras.requires_grad == True],
lr=args.lr,
betas=(0.9, 0.999),
eps=1e-08,
weight_decay=args.decay_rate
)
scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.n_steps, gamma=args.gamma)
''' TRAINING & TEST '''
logger.log_string('\nStart training...')
for idx_epoch in range(start_epoch, args.epoch):
logger.log_string('\nEpoch %d /%s:' % (idx_epoch + 1, args.epoch))
''' Training '''
loss_epoch_train, psnr_epoch_train, ssim_epoch_train = train(train_loader, device, net, criterion, optimizer)
logger.log_string('The %dth Train, loss is: %.5f, psnr is %.5f, ssim is %.5f' %
(idx_epoch + 1, loss_epoch_train, psnr_epoch_train, ssim_epoch_train))
''' Save PTH '''
if args.local_rank == 0:
save_ckpt_path = str(checkpoints_dir) + '/%s_%dx%d_%dx_epoch_%02d_model.pth' % (
args.model_name, args.angRes_in, args.angRes_in, args.scale_factor, idx_epoch + 1)
state = {
'epoch': idx_epoch + 1,
'state_dict': net.module.state_dict() if hasattr(net, 'module') else net.state_dict(),
}
torch.save(state, save_ckpt_path)
logger.log_string('Saving the epoch_%02d model at %s' % (idx_epoch + 1, save_ckpt_path))
''' Validation '''
step = 1
if (idx_epoch + 1)%step==0 or idx_epoch > args.epoch-step:
with torch.no_grad():
''' Create Excel for PSNR/SSIM '''
excel_file = ExcelFile()
psnr_testset = []
ssim_testset = []
for index, test_name in enumerate(test_Names):
test_loader = test_Loaders[index]
epoch_dir = val_dir.joinpath('VAL_epoch_%02d' % (idx_epoch + 1))
epoch_dir.mkdir(exist_ok=True)
save_dir = epoch_dir.joinpath(test_name)
save_dir.mkdir(exist_ok=True)
psnr_iter_test, ssim_iter_test, LF_name = test(test_loader, device, net, save_dir)
excel_file.write_sheet(test_name, LF_name, psnr_iter_test, ssim_iter_test)
psnr_epoch_test = float(np.array(psnr_iter_test).mean())
ssim_epoch_test = float(np.array(ssim_iter_test).mean())
psnr_testset.append(psnr_epoch_test)
ssim_testset.append(ssim_epoch_test)
logger.log_string('The %dth Test on %s, psnr/ssim is %.2f/%.3f' % (
idx_epoch + 1, test_name, psnr_epoch_test, ssim_epoch_test))
pass
psnr_mean_test = float(np.array(psnr_testset).mean())
ssim_mean_test = float(np.array(ssim_testset).mean())
logger.log_string('The mean psnr on testsets is %.5f, mean ssim is %.5f'
% (psnr_mean_test, ssim_mean_test))
excel_file.xlsx_file.save(str(epoch_dir) + '/evaluation.xls')
pass
pass
''' scheduler '''
scheduler.step()
pass
pass
def train(train_loader, device, net, criterion, optimizer):
''' training one epoch '''
psnr_iter_train = []
loss_iter_train = []
ssim_iter_train = []
for idx_iter, (data, label, data_info) in tqdm(enumerate(train_loader), total=len(train_loader), ncols=70):
[Lr_angRes_in, Lr_angRes_out] = data_info
data_info[0] = Lr_angRes_in[0].item()
data_info[1] = Lr_angRes_out[0].item()
data = data.to(device) # low resolution
label = label.to(device) # high resolution
out = net(data, data_info)
loss = criterion(out, label, data_info)
optimizer.zero_grad()
loss.backward()
optimizer.step()
torch.cuda.empty_cache()
loss_iter_train.append(loss.data.cpu())
psnr, ssim = cal_metrics(args, label, out)
psnr_iter_train.append(psnr)
ssim_iter_train.append(ssim)
pass
loss_epoch_train = float(np.array(loss_iter_train).mean())
psnr_epoch_train = float(np.array(psnr_iter_train).mean())
ssim_epoch_train = float(np.array(ssim_iter_train).mean())
return loss_epoch_train, psnr_epoch_train, ssim_epoch_train
def test(test_loader, device, net, save_dir=None):
LF_iter_test = []
psnr_iter_test = []
ssim_iter_test = []
for idx_iter, (Lr_SAI_y, Hr_SAI_y, Sr_SAI_cbcr, data_info, LF_name) in tqdm(enumerate(test_loader), total=len(test_loader), ncols=70):
[Lr_angRes_in, Lr_angRes_out] = data_info
data_info[0] = Lr_angRes_in[0].item()
data_info[1] = Lr_angRes_out[0].item()
Lr_SAI_y = Lr_SAI_y.squeeze().to(device) # numU, numV, h*angRes, w*angRes
Hr_SAI_y = Hr_SAI_y
Sr_SAI_cbcr = Sr_SAI_cbcr
''' Crop LFs into Patches '''
subLFin = LFdivide(Lr_SAI_y, args.angRes_in, args.patch_size_for_test, args.stride_for_test)
numU, numV, H, W = subLFin.size()
subLFin = rearrange(subLFin, 'n1 n2 a1h a2w -> (n1 n2) 1 a1h a2w')
subLFout = torch.zeros(numU * numV, 1, args.angRes_in * args.patch_size_for_test * args.scale_factor,
args.angRes_in * args.patch_size_for_test * args.scale_factor)
''' SR the Patches '''
for i in range(0, numU * numV, args.minibatch_for_test):
tmp = subLFin[i:min(i + args.minibatch_for_test, numU * numV), :, :, :]
with torch.no_grad():
net.eval()
torch.cuda.empty_cache()
out = net(tmp.to(device), data_info)
subLFout[i:min(i + args.minibatch_for_test, numU * numV), :, :, :] = out
subLFout = rearrange(subLFout, '(n1 n2) 1 a1h a2w -> n1 n2 a1h a2w', n1=numU, n2=numV)
''' Restore the Patches to LFs '''
Sr_4D_y = LFintegrate(subLFout, args.angRes_out, args.patch_size_for_test * args.scale_factor,
args.stride_for_test * args.scale_factor, Hr_SAI_y.size(-2)//args.angRes_out, Hr_SAI_y.size(-1)//args.angRes_out)
Sr_SAI_y = rearrange(Sr_4D_y, 'a1 a2 h w -> 1 1 (a1 h) (a2 w)')
''' Calculate the PSNR & SSIM '''
psnr, ssim = cal_metrics(args, Hr_SAI_y, Sr_SAI_y)
psnr_iter_test.append(psnr)
ssim_iter_test.append(ssim)
LF_iter_test.append(LF_name[0])
''' Save RGB '''
if save_dir is not None:
save_dir_ = save_dir.joinpath(LF_name[0])
save_dir_.mkdir(exist_ok=True)
views_dir = save_dir_.joinpath('views')
views_dir.mkdir(exist_ok=True)
Sr_SAI_ycbcr = torch.cat((Sr_SAI_y, Sr_SAI_cbcr), dim=1)
Sr_SAI_rgb = (ycbcr2rgb(Sr_SAI_ycbcr.squeeze().permute(1, 2, 0).numpy()).clip(0,1)*255).astype('uint8')
Sr_4D_rgb = rearrange(Sr_SAI_rgb, '(a1 h) (a2 w) c -> a1 a2 h w c', a1=args.angRes_out, a2=args.angRes_out)
# save the SAI
# path = str(save_dir_) + '/' + LF_name[0] + '_SAI.bmp'
# imageio.imwrite(path, Sr_SAI_rgb)
# save the center view
img = Sr_4D_rgb[args.angRes_out // 2, args.angRes_out // 2, :, :, :]
path = str(save_dir_) + '/' + LF_name[0] + '_' + 'CenterView.bmp'
imageio.imwrite(path, img)
# save all views
for i in range(args.angRes_out):
for j in range(args.angRes_out):
img = Sr_4D_rgb[i, j, :, :, :]
path = str(views_dir) + '/' + LF_name[0] + '_' + str(i) + '_' + str(j) + '.bmp'
imageio.imwrite(path, img)
pass
pass
pass
pass
return psnr_iter_test, ssim_iter_test, LF_iter_test
if __name__ == '__main__':
from option import args
main(args)
|
#!/usr/bin/env bash
# Copyright © 2018 The Havener
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
set -euo pipefail
export GO111MODULE=on
echo -e 'Vetting packages'
go list ./... | xargs go vet
|
package com.lakeel.altla.android.log.example;
import com.lakeel.altla.android.log.LogFactory;
import android.app.Application;
public class App extends Application {
@Override
public void onCreate() {
super.onCreate();
LogFactory.setTag("AndroidLog");
LogFactory.setDebug(BuildConfig.DEBUG);
}
}
|
#!/bin/bash
DEVNAME=$1
#######################################################################################
# YAML Parser to read Config
#
# From: https://stackoverflow.com/questions/5014632/how-can-i-parse-a-yaml-file-from-a-linux-shell-script
#######################################################################################
function parse_yaml {
local prefix=$2
local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
sed -ne "s|^\($s\):|\1|" \
-e "s|^\($s\)\($w\)$s:$s[\"']\(.*\)[\"']$s\$|\1$fs\2$fs\3|p" \
-e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
awk -F$fs '{
indent = length($1)/2;
vname[indent] = $2;
for (i in vname) {if (i > indent) {delete vname[i]}}
if (length($3) > 0) {
vn=""; for (i=0; i<indent; i++) {vn=(vn)(vname[i])("_")}
printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
}
}'
}
eval $(parse_yaml /etc/arm/arm.yaml "CONFIG_")
#######################################################################################
# Log Discovered Type and Start Rip
#######################################################################################
# ID_CDROM_MEDIA_BD = Bluray
# ID_CDROM_MEDIA_CD = CD
# ID_CDROM_MEDIA_DVD = DVD
if [ "$ID_CDROM_MEDIA_DVD" == "1" ]; then
if [ "$CONFIG_PREVENT_99" != "false" ]; then
numtracks=$(lsdvd /dev/${DEVNAME} 2> /dev/null | sed 's/,/ /' | cut -d ' ' -f 2 | grep -E '[0-9]+' | sort -r | head -n 1)
if [ "$numtracks" == "99" ]; then
echo "[ARM] ${DEVNAME} has 99 Track Protection. Bailing out and ejecting." | logger -t ARM -s
eject ${DEVNAME}
exit
fi
fi
echo "[ARM] Starting ARM for DVD on ${DEVNAME}" | logger -t ARM -s
elif [ "$ID_CDROM_MEDIA_BD" == "1" ]; then
echo "[ARM] Starting ARM for Bluray on ${DEVNAME}" | logger -t ARM -s
elif [ "$ID_CDROM_MEDIA_CD" == "1" ]; then
echo "[ARM] Starting ARM for CD on ${DEVNAME}" | logger -t ARM -s
elif [ "$ID_FS_TYPE" != "" ]; then
echo "[ARM] Starting ARM for Data Disk on ${DEVNAME} with File System ${ID_FS_TYPE}" | logger -t ARM -s
else
echo "[ARM] Not CD, Bluray, DVD or Data. Bailing out on ${DEVNAME}" | logger -t ARM -s
exit #bail out
fi
/bin/su -l -c "echo /usr/bin/python3 /opt/arm/arm/ripper/main.py -d ${DEVNAME} | at now" -s /bin/bash arm
#######################################################################################
# Check to see if the admin page is running, if not, start it
#######################################################################################
if ! pgrep -f "runui.py" > /dev/null; then
echo "[ARM] ARM Webgui not running; starting it " | logger -t ARM -s
/bin/su -l -c "/usr/bin/python3 /opt/arm/arm/runui.py " -s /bin/bash arm
fi
|
def add_two_numbers(a, b):
return a + b + 0.0 # add 0.0 to prevent integer overflow and make the function faster. |
#!/usr/bin/env bash
if [[ ! -d "profiler" ]];
then
sudo bash -c "echo 1 > /proc/sys/kernel/perf_event_paranoid"
sudo bash -c "echo 0 > /proc/sys/kernel/kptr_restrict"
mkdir profiler
cd profiler
wget https://github.com/jvm-profiling-tools/async-profiler/releases/download/v1.5/async-profiler-1.5-linux-x64.tar.gz
tar -zxvf async-profiler*
fi
# sudo ./profiler.sh -d 30 -f /tmp/flamegraph.svg $(cat /var/run/cassandra/cassandra.pid) |
<reponame>elviscruz45/curso-practico-javascript<gh_stars>0
const lista1 = [1,2,3,1,2,3,4,2,2,2,1,];
const lista1Count = {};
lista1.map(function (elemento) {if (lista1Count[elemento]) {lista1Count[elemento] += 1;}else {lista1Count[elemento] = 1;}});
//Lista1Count se convierte en esta llave {1: 3, 2: 5, 3: 2, 4: 1}
const lista1Array = Object.entries(lista1Count).sort(function (elementoA, elementoB) {return elementoA[1] - elementoB[1];});
//Object.entries(lista1Count) convierte la lista en esto lista1Array=[['1', 3],['2', 5],['3', 2],['4', 1]]
//esta expresion ordena la lita de menora mayor: sort(function (elementoA, elementoB) {return elementoA[1] - elementoB[1];});
const moda = lista1Array[lista1Array.length - 1];
//pruebas
const lista1 = [1,2,3,1,2,3,4,2,2,2,1,];
casa=lista1.sort((a,b)=> (a-b))
const NUMBERS = [2, 2, 2, 2, 2, 4, 5, 5, 5, 5, 5, 5, 5, 5, 9];
function mode(arr){return arr.sort((a,b) =>arr.filter(v => v===a).length- arr.filter(v => v===b).length).pop();}
console.log(mode(NUMBERS));
//5 |
// Author: <NAME>
package com.piercelbrooks.common;
import androidx.annotation.NonNull;
public interface PromptListener
{
public void onPositive(@NonNull Prompt sender);
public void onNegative(@NonNull Prompt sender);
public void onNeutral(@NonNull Prompt sender);
}
|
#!/bin/bash
PREFIX="$CACHE/cmake-$CMAKE_VERSION"
if [[ ! -f "$PREFIX/bin/cmake" || -n "$UPDATE_CACHE" ]] ; then
rm -fr "$PREFIX"
mkdir -p "$PREFIX"
curl -L "http://cmake.org/files/v${CMAKE_SHORT_VERSION}/cmake-${CMAKE_VERSION}-Linux-x86_64.tar.gz" | gunzip -c | tar -x -C "$PREFIX" --strip-components 1
fi
export PATH="$PREFIX/bin:$PATH"
|
"""
The forecast script generates metrics traces needed for workload forecasting.
"""
from pathlib import Path
from typing import List
from xml.etree import ElementTree
from ..oltpbench.test_case_oltp import TestCaseOLTPBench
from ..oltpbench.test_oltpbench import TestOLTPBench
from ..util.common import run_command
from ..util.constants import LOG, ErrorCode
from .constants import (DEFAULT_OLTP_SERVER_ARGS, DEFAULT_OLTP_TEST_CASE,
DEFAULT_QUERY_TRACE_FILE, DEFAULT_TPCC_TIME_SEC,
DEFAULT_TPCC_WEIGHTS, DEFAULT_PIPELINE_METRICS_FILE,
DEFAULT_PIPELINE_METRICS_SAMPLE_RATE)
def config_forecast_data(xml_config_file: str, rate_pattern: List[int]) -> None:
"""
Modify an OLTPBench config file to follow a certain pattern in its duration.
Parameters
----------
xml_config_file : str
The file to be modified.
rate_pattern : List[int]
The pattern to be used.
"""
xml = ElementTree.parse(xml_config_file)
root = xml.getroot()
works = root.find("works")
works.clear()
# Set the work pattern
for rate in rate_pattern:
work = ElementTree.Element("work")
# NOTE: rate has to be before weights... This is what the OLTP expects
elems = [
("time", str(DEFAULT_TPCC_TIME_SEC)),
("rate", str(rate)),
("weights", DEFAULT_TPCC_WEIGHTS)
]
for name, text in elems:
elem = ElementTree.Element(name)
elem.text = text
work.append(elem)
works.append(work)
# Write back result
xml.write(xml_config_file)
def gen_oltp_trace(
tpcc_weight: str, tpcc_rates: List[int], pattern_iter: int, record_pipeline_metrics_with_counters: bool) -> bool:
"""
Generate the trace by running OLTPBench's TPCC benchmark on the built DBMS.
Parameters
----------
tpcc_weight : str
The weight for the TPCC workload.
tpcc_rates : List[int]
The arrival rates for each phase in a pattern.
pattern_iter : int
The number of patterns.
record_pipeline_metrics_with_counters : bool
Record the pipeline metrics instead of query traces
Returns
-------
True on success.
"""
# Server is running when this returns
oltp_server = TestOLTPBench(DEFAULT_OLTP_SERVER_ARGS)
db_server = oltp_server.db_instance
db_server.run_db()
# Download the OLTP repo and build it
oltp_server.run_pre_suite()
# Load the workload pattern - based on the tpcc.json in
# testing/oltpbench/config
test_case_config = DEFAULT_OLTP_TEST_CASE
test_case_config["weights"] = tpcc_weight
test_case = TestCaseOLTPBench(test_case_config)
# Prep the test case build the result dir
test_case.run_pre_test()
rates = tpcc_rates * pattern_iter
config_forecast_data(test_case.xml_config, rates)
if record_pipeline_metrics_with_counters:
# Turn on pipeline metrics recording
db_server.execute("SET pipeline_metrics_enable='true'", expect_result=False)
db_server.execute("SET pipeline_metrics_sample_rate={}".format(DEFAULT_PIPELINE_METRICS_SAMPLE_RATE),
expect_result=False)
db_server.execute("SET counters_enable='true'", expect_result=False)
result_file = DEFAULT_PIPELINE_METRICS_FILE
else:
# Turn on query trace metrics tracing
db_server.execute("SET query_trace_metrics_enable='true'", expect_result=False)
result_file = DEFAULT_QUERY_TRACE_FILE
# Remove the old result file
Path(result_file).unlink(missing_ok=True)
# Run the actual test
ret_val, _, stderr = run_command(test_case.test_command,
cwd=test_case.test_command_cwd)
if ret_val != ErrorCode.SUCCESS:
LOG.error(stderr)
return False
# Clean up, disconnect the DB
db_server.stop_db()
db_server.delete_wal()
if not Path(result_file).exists():
LOG.error(
f"Missing {result_file} at CWD after running OLTP TPCC")
return False
return True
|
OPENBLAS_NUM_THREADS=1
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/lib/native |
// https://cses.fi/problemset/task/1670
#include <bits/stdc++.h>
using namespace std;
using ii = tuple<int, int>;
using vi = vector<int>;
int n = 9*8*7*6*5*4*3*2;
int index(vi &v) {
int m = 1, r = 0, k = n / 9;
for (int i = 0; i < 8; i++) {
int d = v[i];
int e = (d - __builtin_popcount(m & ((1 << d) - 1)));
r += e * k;
k /= 8 - i;
m |= (1 << d);
}
return r;
}
vi permutation(int r) {
vi v(9);
int m = 1, k = n / 9;
for (int i = 0; i <= 8; i++) {
int d = r / k;
int j = 0, l = 1;
while (j <= d) {
if (!(m & (1 << l))) j++;
if (j > d) break;
l++;
}
v[i] = l;
if (i == 8) break;
r %= k;
k /= 8 - i;
m |= (1 << l);
}
return v;
}
int main() {
int x, a, b;
vi v(9, 0);
for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) cin >> x, v[i*3+j] = x;
int t = index(v);
vi d(n);
d[0] = 1;
queue<int> q;
q.push(0);
while (1) {
tie(x) = q.front();
q.pop();
if (t == x) {
cout << d[x] - 1 << "\n";
break;
}
ii z[] = {{0,1},{1,2},{3,4},{4,5},{6,7},{7,8},{0,3},{1,4},{2,5},{3,6},{4,7},{5,8}};
vi v = permutation(x);
for (ii y : z) {
tie(a, b) = y;
swap(v[a], v[b]);
int i = index(v);
if (!d[i]) {
d[i] = d[x] + 1;
q.push(i);
}
swap(v[a], v[b]);
}
}
}
|
<gh_stars>1-10
// Problem Description :
// Find the maximum sum such that it is smaller than or equal to x;
// N <= 40, X <= 10^9, A[i] <= 10^9;
#include <bits/stdc++.h>
#define fast ios_base::sync_with_stdio(0);cin.tie(NULL);cout.tie(NULL)
#define ll long long int
#define ld long double
using namespace std;
const int N = 1e6 + 5;
const int MOD = 1e9 + 7;
vector<long long int> sum_left, sum_right, leftt, rightt;
long long int generate_left(int i, long long int sum) {
if (i >= leftt.size()) {
if (sum) sum_left.push_back(sum);
return 0;
}
generate_left(i + 1, sum + leftt[i]);
generate_left(i + 1, sum);
return 0;
}
long long int generate_right(int i, long long int sum) {
if (i >= rightt.size()) {
if (sum) sum_right.push_back(sum);
return 0;
}
generate_right(i + 1, sum + rightt[i]);
generate_right(i + 1, sum);
return 0;
}
void solve() {
long long int n, x;
cin >> n >> x;
long long int a[n];
for (int i = 0; i < n; ++i)
cin >> a[i];
for (int i = 0; i < n; ++i) {
if (i < (n / 2)) leftt.push_back(a[i]);
else rightt.push_back(a[i]);
}
long long int ans = 0;
generate_left(0, 0);
generate_right(0, 0);
sort(sum_left.begin(), sum_left.end());
sort(sum_right.begin(), sum_right.end());
for(int i = 0; i < sum_left.size(); ++i){
if(sum_left[i] > x) break;
ans = max(ans, sum_left[i]);
long long int tar = x - sum_left[i];
int add = -1, low = 0, high = sum_right.size() - 1;
while(low <= high){
int mid = (low + high) / 2;
if(sum_right[mid] > tar)
high = mid - 1;
else{
add = sum_right[mid];
low = mid + 1;
}
}
if(add != -1)
ans = max(ans, sum_left[i] + add);
}
for(int i = 0; i < sum_right.size(); ++i){
if(sum_right[i] > x) break;
ans = max(ans, sum_right[i]);
long long int tar = x - sum_right[i];
long long int add = -1, low = 0, high = sum_left.size() - 1;
while(low <= high){
int mid = (low + high) / 2;
if(sum_left[mid] > tar)
high = mid - 1;
else{
add = sum_left[mid];
low = mid + 1;
}
}
if(add != -1)
ans = max(ans, sum_right[i] + add);
}
cout << ans << '\n';
}
int main() {
fast;
int t = 1;
// cin >> t;
while (t--)
solve();
return 0;
}
|
#!/bin/bash
docker exec -it artis sh
|
# Sets all char[] text strings to const char[] if they're used in certain functions.
# This makes code readability a little better.
# For use in Oddworld: Abe's Exoddus NoStubs Edition.
# sub_4630F0 is a good place to make sure this script works.
import idautils
import idc
import idaapi
def set_chars_to_const_good(ref_address):
for x in XrefsTo(ref_address):
if DecodeInstruction(x.frm).itype != 16: # If not a call func, continue.
continue;
func_args = idaapi.get_arg_addrs(x.frm)
if func_args is not None and len(func_args) > 0:
paramAddr = GetOperandValue(func_args[0], 0)
if paramAddr < idaapi.get_imagebase():
continue
paramType = GetType(paramAddr)
if paramType == "char[]" or paramType is None:
print "Setting", hex(paramAddr), GetString(paramAddr), "called from", hex(x.frm), "to const char[]"
SetType(paramAddr, "const char[]")
def set_chars_to_const_hacky(ref_address):
for x in XrefsTo(ref_address):
if DecodeInstruction(x.frm).itype != 16: # If not a call func, continue.
continue;
paramAddr = GetOperandValue(x.frm - 5, 0)
if paramAddr < idaapi.get_imagebase():
continue
paramType = GetType(paramAddr)
if paramType == "char[]" or paramType is None:
print "Setting", hex(paramAddr), GetString(paramAddr), "called from", hex(x.frm), "to const char[]"
SetType(paramAddr, "const char[]")
def set_chars_to_const(ref_address):
print "Searching for char[] args called from", hex(ref_address)
set_chars_to_const_good(ref_address)
set_chars_to_const_hacky(ref_address)
set_chars_to_const(0x49C170)
set_chars_to_const(0x4DBE00)
set_chars_to_const(0x4DBE70) |
#!/bin/bash
# SPDX-License-Identifier: Apache-2.0
# Copyright (C) 2020 The LineageOS Project
# If we're being sourced by the common script that we called,
# stop right here. No need to go down the rabbit hole.
if [ "${BASH_SOURCE[0]}" != "${0}" ]; then
return
fi
set -e
# Required!
export DEVICE=z3q
export DEVICE_COMMON=sm8250-common
export VENDOR=samsung
export DEVICE_BRINGUP_YEAR=2020
"./../../${VENDOR}/${DEVICE_COMMON}/extract-files.sh" "$@"
|
#!/usr/bin/env sh
rm ./examples/ade20k/log/alex/*
rm ./examples/ade20k/log/caffe/*
rm ./examples/ade20k/log/google/*
rm ./examples/ade20k/log/resnet/*
rm ./examples/ade20k/log/vgg/*
|
<reponame>KimJeongYeon/jack2_android
#include <cerrno>
#include <cstring>
#include <stdexcept>
#include <fcntl.h>
#include <unistd.h>
#include "JackALSARawMidiUtil.h"
void
Jack::CreateNonBlockingPipe(int *fds)
{
if (pipe(fds) == -1) {
throw std::runtime_error(strerror(errno));
}
try {
SetNonBlocking(fds[0]);
SetNonBlocking(fds[1]);
} catch (...) {
close(fds[1]);
close(fds[0]);
throw;
}
}
void
Jack::DestroyNonBlockingPipe(int *fds)
{
close(fds[1]);
close(fds[0]);
}
void
Jack::SetNonBlocking(int fd)
{
int flags = fcntl(fd, F_GETFL);
if (flags == -1) {
throw std::runtime_error(strerror(errno));
}
if (fcntl(fd, F_SETFL, flags | O_NONBLOCK) == -1) {
throw std::runtime_error(strerror(errno));
}
}
|
<reponame>uwap/BahnhofsAbfahrten
// @ƒlow
const rawStations = require('db-stations/data.json');
const mostUsedNames = [
'Hannover Hbf',
'Wuppertal Hbf',
'Düsseldorf Hbf',
'Hamburg Hbf',
'Kempen (Niederrhein)',
'Frankfurt (Main) Hbf',
'Rheda-Wiedenbrück',
'Braunschweig Hbf',
'Köln Hbf',
'Wolfsburg Hbf',
'Opladen',
'Mannheim Hbf',
'Berlin-Spandau',
'Duisburg Hbf',
'München Hbf',
].map(n => n.toLowerCase());
const mostUsedStations = {};
rawStations.forEach(s => {
if (mostUsedNames.includes(s.name.toLowerCase())) {
mostUsedStations[s.name.toLowerCase()] = s;
}
});
// eslint-disable-next-line no-console
console.log(
JSON.stringify(
mostUsedNames
.map(n => mostUsedStations[n])
.filter(Boolean)
.map(s => ({
title: s.name,
id: s.id,
}))
)
);
|
function validateInput() {
var name = document.getElementById("name").value;
if (name == "") {
alert("Please enter your name");
return false;
}
var email = document.getElementById("email").value;
if (email == "") {
alert("Please enter your email address");
return false;
}
var password = document.getElementById("password").value;
if (password.length < 6) {
alert("Your password must be of at least 6 characters");
return false;
}
return true;
} |
app.config(['$routeProvider', '$locationProvider', '$stateProvider' ,'$urlRouterProvider', 'LOGIN', 'LOGOUT', 'REGISTER', 'COMMUNITY', 'PROFILE', function($routeProvider, $locationProvider, $stateProvider, $urlRouterProvider, LOGIN, LOGOUT, REGISTER, COMMUNITY, PROFILE){
$locationProvider.hashPrefix('');
$urlRouterProvider.otherwise('/');
var indexState = {
name: 'index',
url: '/',
templateUrl: "home.html",
controller: "homeCtrl"
}
var loginState = {
name: 'login',
url: LOGIN,
templateUrl: "login.html",
controller: "loginCtrl"
}
var logoutState = {
name: 'logout',
url: LOGOUT,
templateUrl: "logout.html",
controller: "logoutCtrl"
}
var registerState = {
name: 'register',
url: REGISTER,
templateUrl: "register.html",
controller: "registerCtrl"
}
var communityState = {
name: 'community',
url: COMMUNITY,
templateUrl: "community.html",
controller: "communityCtrl"
}
var profileState = {
name: 'profile',
url: PROFILE,
templateUrl: "profile.html",
controller: "profileCtrl"
}
$stateProvider.state(indexState);
$stateProvider.state(loginState);
$stateProvider.state(logoutState);
$stateProvider.state(registerState);
$stateProvider.state(communityState);
$stateProvider.state(profileState);
}]); |
<filename>spec/controllers/public_controller_spec.rb
require "rails_helper"
RSpec.describe PublicController, :type => :controller do
it 'uses public layout' do
get :index
expect(response).to render_template(layout: 'public')
end
context 'logged in user' do
login_user
it 'redirects to dashboard' do
get :index
expect(response).to redirect_to(dashboard_path)
end
end
it 'does not redirect if user is not logged in' do
get :index
expect(response).to render_template(:index)
end
end
|
const mysql = require('mysql');
// connect to sql database
let con = mysql.createConnection({
host: "localhost",
user: "user",
password: "password",
database: "test"
});
con.connect(function(err) {
if (err) {
throw err;
}
console.log("Connected!");
// create query to retrieve data from the posts table
let query = "SELECT * FROM posts";
con.query(query, function (err, result) {
if (err) throw err;
console.log("Result: ");
// iterate over the result set and log the number of comments and likes
result.forEach(post => {
let {postId, numComments, numLikes} = post;
console.log(`Post ${postId} has ${numComments} comments and ${numLikes} likes`);
});
con.end();
});
}); |
<filename>Tweets_Display_Filter-app/MyTwitter/app/src/main/java/com/ee5453/mytwitter/StatusProvider.java<gh_stars>0
package com.ee5453.mytwitter;
import android.content.ContentProvider;
import android.content.ContentUris;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import android.util.Log;
/**
* Created by kkx358 on 4/16/2015.
*/
public class StatusProvider extends ContentProvider {
public static final String AUTHORITY = "content://com.ee5453.mytwitter";
public static final Uri CONTENT_URI = Uri.parse(AUTHORITY);
public static final String DB_NAME = "twitterStatus.db";
static int DB_VERSION = 2;
public static final String TABLE_NAME = "twitterStatus";
public static final String C_ID = "_id";
public static final String C_CREATED_AT = "created_at";
public static final String C_TEXT = "status_text";
public static final String C_USER = "user_name";
TwitterDBHelper dbHelper;
SQLiteDatabase statusDB;
@Override
public boolean onCreate() {
dbHelper=new TwitterDBHelper(getContext());
return true;
}
@Override
public String getType(Uri uri) {
return null;
}
@Override
public Uri insert(Uri uri, ContentValues values) {
statusDB=dbHelper.getWritableDatabase();
long id=statusDB.insertWithOnConflict(TABLE_NAME,null,
values,SQLiteDatabase.CONFLICT_IGNORE);
Uri itemUri = ContentUris.withAppendedId(uri, id);
getContext().getContentResolver().
notifyChange(itemUri, null);
return null;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
Cursor tweets;
statusDB=dbHelper.getReadableDatabase();
tweets = statusDB.query(TABLE_NAME,
projection,selection,selectionArgs,null,null,sortOrder);
return tweets;
}
@Override
public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) {
return 0;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
return 0;
}
}
class TwitterDBHelper extends SQLiteOpenHelper {
static String TAG = "TwitterDBHelper";
/* public TwitterDBHelper(Context context, String name,
SQLiteDatabase.CursorFactory factory, int version) {
super(context, name, factory, version);
}
*/
public TwitterDBHelper(Context context) {
super(context,StatusProvider.DB_NAME, null, StatusProvider.DB_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
String sql = String.format("create table %s (%s integer primary key, " +
"%s integer, %s text, %s text)",
StatusProvider.TABLE_NAME,StatusProvider.C_ID,
StatusProvider.C_CREATED_AT,StatusProvider.C_TEXT,
StatusProvider.C_USER);
Log.d(TAG, "onCreate: " + sql);
db.execSQL(sql);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
//don't do this
String sql = String.format("drop table if exists %s",
StatusProvider.TABLE_NAME);
db.execSQL(sql);
Log.d(TAG,"onUpgrade");
onCreate(db);
}
} |
# Use git template directory when creating or cloning projects.
git config --global init.templatedir '~/.git_template'
# global gitignore
git config --global core.excludesfile ~/.gitignore_global
# autosquash
git config --global rebase.autosquash true
# rebase to reconcile pulls
git config --global pull.rebase true
|
#!/bin/bash
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager1 interface down
ifconfig eth1 down
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager1 interface up
ifconfig eth1 up
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager2 interface down
ifconfig eth2 down
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager2 interface up
ifconfig eth2 up
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager3 interface down
ifconfig eth3 down
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - manager3 interface up
ifconfig eth3 up
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - worker1 interface down
ifconfig eth4 down
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - worker1 interface up
ifconfig eth4 up
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - worker2 interface down
ifconfig eth5 down
sleep 10s
echo $(date --utc "+%Y-%m-%d %H:%M:%S") - worker2 interface up
ifconfig eth5 up
|
import colorsys
def get_closeness_to_target(source_rgb, target_rgb):
"""
Compute the closeness of a given color to a target color
"""
source_hls = colorsys.rgb_to_hls(*source_rgb)
target_hls = colorsys.rgb_to_hls(*target_rgb)
# Compute Euclidean distance between the two color's attributes
h_dist = (source_hls[0] - target_hls[0])**2
l_dist = (source_hls[1] - target_hls[1])**2
s_dist = (source_hls[2] - target_hls[2])**2
closeness = (h_dist + l_dist + s_dist)**0.5
return closeness |
#!/bin/sh
capacity="$(cat /sys/class/power_supply/BAT1/capacity)%"
#status=$(sed "s/[Dd]ischarging//;s/[Cc]harging//;s/[Ff]ull//" /sys/class/power_supply/BAT1/status)
status=$(sed "s/[Dd]ischarging/d/;s/[Cc]harging//;s/[Ff]ull/d/" /sys/class/power_supply/BAT1/status)
if [[ ($status == "d") ]]
then
if [[ ${capacity:0:-1} -gt "90" ]]
then
status=""
elif [[ ${capacity:0:-1} -gt "75" ]]
then
status=""
elif [[ ${capacity:0:-1} -gt "60" ]]
then
status=""
elif [[ ${capacity:0:-1} -gt "40" ]]
then
status=""
elif [[ ${capacity:0:-1} -gt "35" ]]
then
status=""
else
status=""
fi
if [[ ${capacity:0:-1} -lt "40" ]]
then
twmnc -t "Warning" -c "Low battery, please connect the charger" -d 10000
fi
fi
printf ""%s%s%% " $status " "$capacity "
|
<gh_stars>1-10
import { Component, h, Host, Prop } from "@stencil/core";
@Component({
tag: "fl-spinner",
styleUrl: "spinner.scss",
shadow: true,
})
export class Spinner {
@Prop() size: "small" | "medium" | "large" = "medium";
render() {
return (
<Host>
<div class="spinner"></div>
</Host>
);
}
}
|
export interface IUpdateCheckItemStateOnCard {
checkItem: {
state: string;
name: string;
id: string;
};
checklist: {
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
};
board: {
shortLink: string;
name: string;
id: string;
};
}
export interface IAddMemberToCard {
board: {
shortLink: string;
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
};
idMember: string;
}
export interface ICreateCard {
board: {
shortLink: string;
name: string;
id: string;
};
list: {
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
};
}
export interface IUpdateCardMoveList {
listAfter: {
name: string;
id: string;
};
listBefore: {
name: string;
id: string;
};
board: {
shortLink: string;
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
idList: string;
};
old: {
idList: string;
};
}
export interface IUpdateCardMovePos {
list: {
name: string;
id: string;
};
board: {
shortLink: string;
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
pos: number;
};
old: {
pos: number;
};
}
export interface IAddAttachmentToCard {
board: {
shortLink: string;
name: string;
id: string;
};
list: {
name: string;
id: string;
};
card: {
shortLink: string;
idShort: number;
name: string;
id: string;
};
attachment: {
url: string;
name: string;
id: string;
edgeColor: string;
previewUrl: string;
previewUrl2x: string;
};
}
|
<reponame>oishefarhan/OSC-recorder
# -*- coding: utf-8 -*-
"""Unit test package for osc_recorder."""
|
<reponame>cjx2328/gocms
package file
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
)
func GetCurrentDirectory() string {
dir, _ := filepath.Abs(filepath.Dir(os.Args[0]))
return strings.Replace(dir, "\\", "/", -1)
}
func GetRootDir() string {
file, err := filepath.Abs(filepath.Dir(os.Args[0]))
if err != nil {
file = fmt.Sprintf(".%s", string(os.PathSeparator))
} else {
file = fmt.Sprintf("%s%s", file, string(os.PathSeparator))
}
return file
}
func GetExecFilePath() string {
file, err := exec.LookPath(os.Args[0])
if err != nil {
file = fmt.Sprintf(".%s", string(os.PathSeparator))
} else {
file, _ = filepath.Abs(file)
}
return file
}
|
#!/bin/bash
. docker.conf
docker run -d --restart always --name "${DOCKER_NAME}" "${DOCKER_IMAGE}"
|
# proxy enable on Gnome
local proxy_path="/home/$(whoami)/clash"
function set-proxy(){
export all_proxy="socks://127.0.0.1:7891/"
export http_proxy="http://127.0.0.1:7890/"
export https_proxy="http://127.0.0.1:7890/"
export ALL_PROXY="socks://127.0.0.1:7891/"
export HTTPS_PROXY="http://127.0.0.1:7890/"
export HTTP_PROXY="http://127.0.0.1:7890/"
}
function unset-proxy(){
unset all_proxy
unset http_proxy
unset https_proxy
unset ALL_PROXY
unset HTTPS_PROXY
unset HTTP_PROXY
}
# network mode
# manual|auto|none
function start-proxy(){
gsettings set org.gnome.system.proxy mode 'manual' #ubuntu网络模式配置为手动
set-proxy && ${proxy_path}/clash -d ${proxy_path}/ & #在后台执行clash客户端
echo "start clash successfully!" #启动成功的提示
}
function stop-proxy(){
gsettings set org.gnome.system.proxy mode 'none' # disable
# var=$(ps | grep "clash" | awk '{print $1}') #抓取clash的进程号
var=$(ps -ef| grep -m1 "clash" | awk '{print $2}') #抓取clash的进程号
kill -9 $var && echo "stop clash successfully!"
unset-proxy && echo "unset all proxy variables"
}
function restart-proxy(){
stop-proxy
start-proxy
}
# take functions
# mkcd is equivalent to takedir
function mkcd takedir() {
mkdir -p $@ && cd ${@:$#}
}
function takeurl() {
local data thedir
data="$(mktemp)"
curl -L "$1" > "$data"
tar xf "$data"
thedir="$(tar tf "$data" | head -n 1)"
rm "$data"
cd "$thedir"
}
function takegit() {
git clone "$1"
cd "$(basename ${1%%.git})"
}
function take() {
if [[ $1 =~ ^(https?|ftp).*\.tar\.(gz|bz2|xz)$ ]]; then
takeurl "$1"
elif [[ $1 =~ ^([A-Za-z0-9]\+@|https?|git|ssh|ftps?|rsync).*\.git/?$ ]]; then
takegit "$1"
else
takedir "$@"
fi
}
function bombs {
to_rm=$(pwd)
cd .. && rm -rf $to_rm
}
function Typora {
touch $1 && typora $1
}
#############
# bat & fzf #
#############
function batdiff() {
git diff --name-only --diff-filter=d | xargs bat --diff
}
function preview(){
fzf --preview 'bat --style=numbers --color=always --line-range :500 {}'
} |
const Discord = require("discord.js");
module.exports.run = async (bot, message, args) => {
let user = message.mentions.users.first() || message.author;
if (user.presence.game != null) {
if (user.presence.game.name === "Spotify") {
var trackImg = user.presence.game.assets.largeImageURL;
var trackUrl = `https://open.spotify.com/track/${user.presence.game.syncID}`;
var trackName = user.presence.game.details;
var trackAlbum = user.presence.game.assets.largeText;
var trackAuthor = user.presence.game.state;
let embed = new Discord.RichEmbed()
.setTitle(`${user.username}#${user.discriminator} | Track Info`) // | Listening to music on Spotify, "https://cdn.discordapp.com/emojis/525426257853284352.gif?v=1")
.setColor("#ffa500")
.setThumbnail(trackImg)
.addField("Song Name", `${trackName}`, true)
.addField("Album", `${trackAlbum}`, true)
.addField("Author", `${trackAuthor}`, true)
.addField(
"Listen to this track here:",
`[Open Spotify](${trackUrl})`,
true
)
.setTimestamp()
.setFooter(
/* "FoxBot, a bot by Akemi#4040" */ "Listening to music on spotify",
"https://cdn.discordapp.com/emojis/525404673843134464.png?v=1"
);
message.channel.send(embed);
} else {
message.channel.send(
`Sorry, that user isn't listening to anything on spotify right now.`
);
}
}
};
module.exports.help = {
name: "spotify"
};
|
<filename>discojs/src/privacy.ts
import { List } from 'immutable'
import { Weights } from '@/types'
import { Task } from '@/task'
import * as tf from '@tensorflow/tfjs'
/**
* Add task-parametrized Gaussian noise to and clip the weights update between the previous and current rounds.
* The previous round's weights are the last weights pulled from server/peers.
* The current round's weights are obtained after a single round of training, from the previous round's weights.
* @param updatedWeights weights from the current round
* @param staleWeights weights from the previous round
* @param task the task
* @returns the noised weights for the current round
*/
export function addDifferentialPrivacy (updatedWeights: Weights, staleWeights: Weights, task: Task): Weights {
const noiseScale = task.trainingInformation?.noiseScale
const clippingRadius = task.trainingInformation?.clippingRadius
const weightsDiff = List(updatedWeights)
.zip(List(staleWeights))
.map(([w1, w2]) => w1.add(-w2))
let newWeightsDiff: List<tf.Tensor>
if (clippingRadius !== undefined) {
// Frobenius norm
const norm = Math.sqrt(weightsDiff.map((w) => w.square().sum().dataSync()[0]).reduce((a: number, b) => a + b))
newWeightsDiff = weightsDiff.map((w) => {
const clipped = w.div(Math.max(1, norm / clippingRadius))
if (noiseScale !== undefined) {
// Add clipping and noise
const noise = tf.randomNormal(w.shape, 0, (noiseScale * noiseScale) * (clippingRadius * clippingRadius))
return clipped.add(noise)
} else {
// Add clipping without any noise
return clipped
}
})
} else {
if (noiseScale !== undefined) {
// Add noise without any clipping
newWeightsDiff = weightsDiff.map((w) => tf.randomNormal(w.shape, 0, (noiseScale * noiseScale)))
} else {
return updatedWeights
}
}
return List(staleWeights)
.zip(newWeightsDiff)
.map(([w, d]) => w.add(d))
.toArray()
}
|
<reponame>neggas/blog
import { Controller, Patch } from '@nestjs/common';
import { SubscriptionService } from './Subscription.service';
@Controller('subscriptions')
export class SubscriptionController {
constructor(private readonly subscriptionService: SubscriptionService) {}
/* @Patch(':id') */
}
|
class Encrypter {
constructor(key) {
this.key = key;
}
encrypt(data) {
const encryptedData = Crypto.AES.encrypt(data, this.key);
return encryptedData;
}
decrypt(data) {
const decryptedData = Crypto.AES.decrypt(data, this.key);
return decryptedData;
}
}
const myEncrypter = new Encrypter('mySecureKey');
const encryptedData = myEncrypter.encrypt('my secret data');
const decryptedData = myEncrypter.decrypt(encryptedData); |
exports.wrapPageElement = ({ element, props }) => {
// dont ssr errors deck
if (props["*"] && props["*"].startsWith("errors")) return "";
return element;
};
|
define(function(require, exports, module) {
var project = require("../project");
module.exports = function(files, pattern) {
var fileRegex, pathRegex;
var pathMatches = {};
function buildResult(match, segments) {
var runs = [];
var insideRuns = [];
var lastRun = false;
var insideChars = 0;
var totalChars = 0;
match.shift(); // ???
for (var index = 0; index < match.length; index++) {
var capture = match[index];
if (capture.length) {
var inside = index % 2 !== 0;
capture = capture.replace('/', '');
totalChars += capture.length;
if (inside) {
insideChars += capture.length;
}
if (lastRun && lastRun.inside === inside) {
lastRun.string += capture;
} else {
lastRun = {
string: capture,
inside: inside
};
runs.push(lastRun);
if (inside) {
insideRuns.push(lastRun);
}
}
}
}
var charRatio = totalChars > 0 ? insideChars / totalChars : 1;
var runRatio = insideRuns.length > 0 ? segments / insideRuns.length : 1;
return {
score: runRatio * charRatio,
result: runs,
missed: false
};
}
function matchPath(filename) {
var segments = filename.split('/').length - 1;
var dirname = project.dirname(filename);
if (pathMatches[dirname]) {
return pathMatches[dirname];
}
if (pathRegex) {
var match = pathRegex.exec(filename);
return match ? (pathMatches[dirname] = buildResult(match, segments)) : (pathMatches[dirname] = {
score: 1,
result: dirname,
missed: true
});
} else {
return (pathMatches[dirname] = {
score: 1,
result: dirname,
missed: false
});
}
}
function matchFile(filename, pathMatch) {
var basename = project.filename(filename);
var dirname = project.dirname(filename);
var match = fileRegex.exec(basename);
if (match) {
var matchResult = buildResult(match, 1);
return {
path: filename,
dirname: dirname,
name: basename,
pathRuns: pathMatch.result,
fileRuns: matchResult.result,
score: pathMatch.score * matchResult.score
};
} else {
return false;
}
}
function makePattern(part) {
function charToPattern(pattern, character) {
if (pattern.length) {
pattern += '([^/]*?)';
}
return pattern += '(' + character + ')';
}
return part.split('').reduce(charToPattern, '');
}
pattern = pattern.replace(/ /g, '');
var parts = pattern.split('/');
if (pattern.match(/\/$/)) {
parts.push('');
}
var filePart = parts.pop();
if (parts.length) {
pathRegex = new RegExp('^(.*?)' + parts.map(makePattern).join('(.*?/.*?)') + '(.*?)$', 'i');
}
fileRegex = new RegExp("^(.*?)" + (makePattern(filePart)) + "(.*)$", "i");
var matches = [];
files.forEach(function(filename) {
var pathMatch = matchPath(filename);
if (!pathMatch.missed) {
var fileMatch = matchFile(filename, pathMatch);
if (fileMatch) {
matches.push(fileMatch);
}
}
});
return matches;
};
}); |
module SASS
module Writer
class << self
def render(title, result)
html_header(title)
puts '<pre>'
puts result
puts '</pre>'
html_footer
end
def convert(type)
puts Sass::CSS.new(SELECTED_TEXT).render(type)
end
end
end
end
|
CREATE OR REPLACE FUNCTION your_project_id.your_dataset.h3_compact(h3_set ARRAY<STRING>)
RETURNS ARRAY<STRING>
LANGUAGE js AS
"""
return h3.compact(h3_set);
"""
OPTIONS (
library=['gs://file_path']
);
|
import { terser as pluginTerser } from 'rollup-plugin-terser';
import pluginNodeResolve from '@rollup/plugin-node-resolve';
import pluginCommonjs from '@rollup/plugin-commonjs';
import pluginTypescript from '@rollup/plugin-typescript';
import * as pkg from './package.json';
const env = {
format: process.env.format,
min: process.env.min === 'true',
dev: process.env.mode === 'dev',
};
// the build hangs when these are imported
const pluginServe = env.dev ? require('rollup-plugin-serve') : () => null;
const pluginLivereload = env.dev ? require('rollup-plugin-livereload') : () => null;
const copyright = `// ${pkg.homepage} v${pkg.version} Copyright ${new Date().getFullYear()} ${
pkg.author.name
}`;
const formatExtension = env.format === 'umd' ? '' : `.${env.format}`;
const fullExtension = env.min ? formatExtension + '.min' : formatExtension;
const bundleDeps = env.dev;
const mainConfig = {
input: 'src/index.ts',
output: {
dir: 'dist',
entryFileNames: `${pkg.name}${fullExtension}.js`,
name: pkg.name,
format: process.env.format,
banner: copyright,
sourcemap: true,
globals: bundleDeps
? {}
: {
webcola: 'cola',
'd3-color': 'd3',
'd3-dispatch': 'd3',
'd3-drag': 'd3',
'd3-ease': 'd3',
'd3-interpolate': 'd3',
'd3-path': 'd3',
'd3-selection': 'd3',
'd3-shape': 'd3',
'd3-timer': 'd3',
'd3-transition': 'd3',
'd3-zoom': 'd3',
},
},
plugins: [
...(env.min ? [pluginTerser()] : []),
pluginNodeResolve({ browser: env.format === 'iife' }),
pluginCommonjs(),
pluginTypescript(),
],
external: bundleDeps
? []
: [
'webcola',
'd3-color',
'd3-drag',
'd3-dispatch',
'd3-drag',
'd3-ease',
'd3-interpolate',
'd3-path',
'd3-selection',
'd3-shape',
'd3-timer',
'd3-transition',
'd3-zoom',
],
};
const examplesConfig = {
input: ['examples/example.ts'],
output: {
dir: 'examples/dist',
name: 'examples',
format: 'iife',
globals: {
'..': 'algorithmx',
},
sourcemap: true,
},
plugins: [
pluginTypescript({ tsconfig: 'examples/tsconfig.json' }),
pluginServe({
open: true,
contentBase: ['examples', 'dist'],
port: 8080,
}),
pluginLivereload(),
],
watch: {
clearScreen: false,
},
};
export default env.dev ? [mainConfig, examplesConfig] : mainConfig;
|
import { Component, OnInit, Inject, NgZone } from '@angular/core';
import { MAT_SNACK_BAR_DATA, MatSnackBarRef } from '@angular/material/snack-bar';
import { ColidMatSnackBarData } from '../colid-mat-snack-bar-data.model';
import { ColidMatSnackBarType } from '../colid-mat-snack-bar-type.model';
@Component({
selector: 'app-colid-mat-snack-bar',
templateUrl: './colid-mat-snack-bar.component.html',
styleUrls: ['./colid-mat-snack-bar.component.css']
})
export class ColidMatSnackBarComponent implements OnInit {
colidSnackBarType = ColidMatSnackBarType;
constructor(public snackbarref: MatSnackBarRef<ColidMatSnackBarComponent>, private zone: NgZone, @Inject(MAT_SNACK_BAR_DATA) public data: ColidMatSnackBarData) { }
ngOnInit() {
}
copy() {
const selBox = document.createElement('textarea');
selBox.style.position = 'fixed';
selBox.style.left = '0';
selBox.style.top = '0';
selBox.style.opacity = '0';
selBox.value = JSON.stringify(this.data.data);
document.body.appendChild(selBox);
selBox.focus();
selBox.select();
document.execCommand('copy');
document.body.removeChild(selBox);
}
dismiss() {
this.zone.run(() => {
this.snackbarref.dismiss()
});
}
}
|
<filename>tests/test_record.py
import unittest
from bibrecord import record
class TestRecord(unittest.TestCase):
def setUp(self):
self.record = record.Record()
def test_instantiate_class(self):
pass
def test_key_property_can_be_set_by_instantiation(self):
record_ = record.Record(key='foo')
self.assertEqual('foo', record_.key)
def test_has_to_string_method(self):
self.assertTrue(hasattr(self.record, 'to_string'))
self.assertTrue(callable(self.record.to_string))
def test_to_string_returns_string(self):
self.assertTrue(isinstance(self.record.to_string(), str))
def test_to_string_with_format_and_properties_returns_string(self):
self.record.format = 'author: title'
self.record.author = ['<NAME>']
self.record.title = 'Lorem ipsum'
output = f"{self.record.author[0]}: {self.record.title}"
self.assertEqual(output, self.record.to_string())
def test_to_string_with_format_and_authors_returns_string(self):
self.record.format = 'author: title'
self.record.author = ['<NAME>', '<NAME>']
self.record.title = 'Lorem ipsum'
authors = ', '.join(self.record.author)
output = f"{authors}: {self.record.title}"
self.assertEqual(output, self.record.to_string())
def test_to_string_with_format_and_author_with_suffix_returns_string(self):
self.record.format = 'author: title'
self.record.author = ['Doe, Jr., John']
self.record.title = 'Lorem ipsum'
output = f"<NAME>, Jr.: {self.record.title}"
self.assertEqual(output, self.record.to_string())
def test_to_string_with_format_and_author_and_reverse_returns_string(self):
self.record.format = 'author: title'
self.record.author = ['<NAME>']
self.record.title = 'Lorem ipsum'
self.record.reverse = True
output = f"<NAME>: {self.record.title}"
self.assertEqual(output, self.record.to_string())
def test_has_to_bib_method(self):
self.assertTrue(hasattr(self.record, 'to_bib'))
self.assertTrue(callable(self.record.to_bib))
def test_to_bib_returns_string(self):
self.assertTrue(isinstance(self.record.to_string(), str))
def test_to_bib_returns_bibtex_record(self):
output = "@Record{,\n\n}"
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_key_returns_bibtex_record_with_key(self):
output = "@Record{foo,\n\n}"
self.record.key = 'foo'
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_property_adds_property_to_bibtex_record(self):
self.record.author = ['<NAME>']
output = "@Record{,\n\tauthor = {<NAME>}\n}"
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_empty_property_returns_empty_bibtex_record(self):
output = "@Record{,\n\n}"
self.record.title = ''
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_properties_adds_properties_to_bibtex_record(self):
self.record.author = ['<NAME>']
self.record.title = 'Lorem ipsum'
output = "@Record{,\n\tauthor = {<NAME>},\n\ttitle = {Lorem ipsum}\n}"
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_author_reversed_adds_author_to_bibtex_record(self):
self.record.author = ['<NAME>']
self.record.reverse = True
output = "@Record{,\n\tauthor = {Doe, John}\n}"
self.assertEqual(self.record.to_bib(), output)
def test_to_bib_with_authors_adds_authors_to_bibtex_record(self):
self.record.author = ['<NAME>', '<NAME>']
output = "@Record{,\n\tauthor = {<NAME> AND <NAME>mann}\n}"
self.assertEqual(self.record.to_bib(), output)
class TestPerson(unittest.TestCase):
def setUp(self):
self.person = record.Person()
self.first = 'John'
self.last = 'Doe'
self.particle = 'van der'
self.suffix = 'Jr.'
def test_instantiate_class(self):
pass
def test_has_properties(self):
for prop in ['first', 'last', 'particle', 'suffix']:
self.assertTrue(hasattr(self.person, prop))
def test_has_from_bib_method(self):
self.assertTrue(hasattr(self.person, 'from_bib'))
self.assertTrue(callable(self.person.from_bib))
def test_from_bib_sets_first_and_last(self):
self.person.from_bib('{} {}'.format(self.first, self.last))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_additional_spaces_sets_first_and_last(self):
self.person.from_bib('{} {}'.format(self.first, self.last))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_leading_spaces_sets_first_and_last(self):
self.person.from_bib(' {} {}'.format(self.first, self.last))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_trailing_spaces_sets_first_and_last(self):
self.person.from_bib('{} {} '.format(self.first, self.last))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_to_first_names_sets_first_and_last(self):
self.first = '<NAME>.'
self.person.from_bib('{} {}'.format(self.first, self.last))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_names_reversed_sets_first_and_last(self):
self.person.from_bib('{}, {}'.format(self.last, self.first))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
def test_from_bib_with_particle_sets_particle(self):
self.person.from_bib('{} {}, {}'.format(self.particle, self.last,
self.first))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
self.assertEqual(self.particle, self.person.particle)
def test_from_bib_with_suffix_sets_particle(self):
self.person.from_bib('{} {}, {}, {}'.format(self.particle, self.last,
self.suffix, self.first))
self.assertEqual(self.first, self.person.first)
self.assertEqual(self.last, self.person.last)
self.assertEqual(self.particle, self.person.particle)
self.assertEqual(self.suffix, self.person.suffix)
def test_has_to_string_method(self):
self.assertTrue(hasattr(self.person, 'to_string'))
self.assertTrue(callable(self.person.to_string))
def test_to_string_returns_string(self):
self.assertTrue(isinstance(self.person.to_string(), str))
def test_to_string_with_first_and_last(self):
string = '{} {}'.format(self.first, self.last)
self.person.from_bib(string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_particle(self):
string = '{} {} {}'.format(self.first, self.particle, self.last)
bib_string = '{} {}, {}'.format(self.particle, self.last, self.first)
self.person.from_bib(bib_string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_suffix(self):
string = '{} {}, {}'.format(self.first, self.last, self.suffix)
bib_string = '{}, {}, {}'.format(self.last, self.suffix, self.first)
self.person.from_bib(bib_string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_particle_and_suffix(self):
string = '{} {} {}, {}'.format(self.first, self.particle, self.last,
self.suffix)
bib_string = '{} {}, {}, {}'.format(self.particle, self.last,
self.suffix, self.first)
self.person.from_bib(bib_string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_first_and_last_reversed(self):
string = '{}, {}'.format(self.last, self.first)
self.person.reverse = True
self.person.from_bib(string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_particle_reversed(self):
string = '{} {}, {}'.format(self.particle, self.last, self.first)
self.person.reverse = True
self.person.from_bib(string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_suffix_reversed(self):
string = '{}, {}, {}'.format(self.last, self.suffix, self.first)
self.person.reverse = True
self.person.from_bib(string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_to_string_with_particle_and_suffix_reversed(self):
string = '{} {}, {}, {}'.format(self.particle, self.last,
self.suffix, self.first)
self.person.reverse = True
self.person.from_bib(string)
output = self.person.to_string()
self.assertEqual(string, output)
def test_has_to_bib_method(self):
self.assertTrue(hasattr(self.person, 'to_bib'))
self.assertTrue(callable(self.person.to_bib))
def test_to_bib_returns_string(self):
self.assertTrue(isinstance(self.person.to_bib(), str))
def test_to_bib_with_particle_returns_reversed(self):
string = '{} {}, {}'.format(self.particle, self.last, self.first)
self.person.reverse = False
self.person.from_bib(string)
output = self.person.to_bib()
self.assertEqual(string, output)
def test_to_bib_with_suffix_returns_reversed(self):
string = '{}, {}, {}'.format(self.last, self.suffix, self.first)
self.person.reverse = False
self.person.from_bib(string)
output = self.person.to_bib()
self.assertEqual(string, output)
def test_to_bib_with_particle_resets_reverse_property(self):
string = '{} {}, {}'.format(self.particle, self.last, self.first)
self.person.reverse = False
self.person.from_bib(string)
self.person.to_bib()
self.assertFalse(self.person.reverse)
class TestArticle(unittest.TestCase):
def setUp(self):
self.article = record.Article()
self.author = ['<NAME>']
self.title = 'Lorem ipsum'
self.journal = 'J. Peculiar Res.'
self.volume = '42'
self.pages = '0'
self.year = '1968'
def test_instantiate_class(self):
pass
def test_has_properties(self):
for prop in ['author', 'title', 'journal', 'year', 'volume', 'pages',
'doi']:
self.assertTrue(hasattr(self.article, prop))
def test_properties_can_be_set_by_instantiation(self):
article = record.Article(author=self.author)
self.assertEqual(self.author, article.author)
def test_has_sensible_default_format(self):
article = record.Article(
author=self.author,
title=self.title,
journal=self.journal,
volume=self.volume,
pages=self.pages,
year=self.year
)
authors = ', '.join(self.author)
output = f"{authors}: {self.title}. {self.journal} {self.volume}:" \
f"{self.pages}, {self.year}."
self.assertEqual(output, article.to_string())
def test_to_bib_returns_bibtex_record_with_correct_type(self):
output = "@Article{,\n\n}"
self.assertEqual(self.article.to_bib(), output)
class TestBook(unittest.TestCase):
def setUp(self):
self.book = record.Book()
self.author = ['<NAME>']
self.title = 'Lorem ipsum'
self.publisher = 'Springer'
self.year = '1968'
self.address = 'Berlin'
self.edition = '1'
def test_instantiate_class(self):
pass
def test_has_properties(self):
for prop in ['author', 'editor', 'title', 'publisher', 'year',
'address', 'edition']:
self.assertTrue(hasattr(self.book, prop))
def test_properties_can_be_set_by_instantiation(self):
book = record.Book(author=self.author)
self.assertEqual(self.author, book.author)
def test_has_sensible_default_format(self):
book = record.Book(
author=self.author,
title=self.title,
publisher=self.publisher,
year=self.year,
address=self.address,
)
authors = ', '.join(self.author)
output = f"{authors}: {self.title}. {self.publisher}, {self.address} " \
f"{self.year}."
self.assertEqual(output, book.to_string())
def test_with_editor_has_sensible_default_format(self):
book = record.Book(
editor=self.author,
title=self.title,
publisher=self.publisher,
year=self.year,
address=self.address,
)
editors = ', '.join(self.author)
output = f"{editors} (Ed.): {self.title}. {self.publisher}," \
f" {self.address} " \
f"{self.year}."
self.assertEqual(output, book.to_string())
def test_to_bib_returns_bibtex_record_with_correct_type(self):
output = "@Book{,\n\n}"
self.assertEqual(self.book.to_bib(), output)
|
#include <iostream>
int main()
{
int num;
int sum = 0;
while (std::cin >> num)
{
sum += num;
}
std::cout << "The sum is: " << sum << '\n';
return 0;
} |
# Import the necessary packages
import tensorflow as tf
import numpy as np
import pandas as pd
import random
# Initialize the neural network
model = tf.keras.Sequential([
tf.keras.layers.Dense(8, input_shape=(8,), activation='relu'),
tf.keras.layers.Dense(8, activation='relu'),
tf.keras.layers.Dense(1, activation='sigmoid')
])
# Compile the model
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
# Load the dataset
x_data = pd.read_csv('data.csv')
# Train the model
model.fit(x_data, epochs=10)
# Start a conversation
while True:
user_input = input("User:")
if user_input == "Goodbye":
break
else:
# Generate a random response
response_index = random.randint(0, len(responses) - 1)
bot_response = responses[response_index]
# Use the model to predict the user response to the bot's message
prediction = model.predict([[user_input]])
# If prediction is high, generate a recommendation
if prediction[0][0] > 0.5:
recommendation = generate_recommendation()
bot_response = recommendation
print("Bot:", bot_response) |
python3 ./execute.py reinforcement source_pygame_catch player_reinforce_2A --save catch_REINFORCE_2A --epis 50000
|
// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#include <swganh_core/object/object.h>
namespace swganh {
namespace badge {
class BadgeService;
class BadgeRegion : public swganh::object::Object
{
public:
BadgeRegion(std::string badge_name, swganh::badge::BadgeService* service);
~BadgeRegion(void);
/**
* Called when a object collides with the BadgeRegion. Awards the target with
* approprate badge.
*/
void OnCollisionEnter(std::shared_ptr<swganh::object::Object> collider);
private:
swganh::badge::BadgeService* badge_service_;
std::string badge_name_;
};
}} // swganh::badge |
#!/usr/bin/env bash
gulp prod-build
mv node_modules node_modules_tmp
npm ci --production
docker build -t seyranmamutov/backoffice_frontend:latest .
rm node_modules -rf
mv node_modules_tmp node_modules |
#!/bin/bash
ansible-playbook -i localhost, -c local ../agnosticd/ansible/configs/ocp-workloads/ocp-workload.yml -e @./config/xraylab_base.yaml -e ACTION=create -e guid=0001 -e ocp_username=opentlc-mgr |
package com.mycom.app.repository;
import com.mycom.app.domain.entity.Game;
import com.mycom.app.domain.entity.User;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface GameRepository extends JpaRepository<Game, Integer> {
}
|
#!/bin/bash
#to run ./scripts/Zprime_ll/run_limits.sh fcc_v02 ee/mumu/ll
myversion=$1
lepton=$2
#for ene in 4 6 8 10 15 20 25 30 35 40 45 50;
for ene in 20;
do
echo $lepton
if [ $lepton == "ll" ]
then
./myFit.exe mwfl config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
else
./myFit.exe h config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
./myFit.exe w config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
./myFit.exe f config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
./myFit.exe d config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
./myFit.exe p config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
./myFit.exe l config_FCC/Zprime_ll/"$myversion"/Zprime_"$lepton"_"$ene"TeV.config
fi
done
|
#!/bin/bash
# platform = multi_platform_fedora,Red Hat Enterprise Linux 8
# profiles = xccdf_org.ssgproject.content_profile_ospp, xccdf_org.ssgproject.content_profile_standard
rm -f /etc/krb5.conf.d/crypto-policies
ln -s /etc/crypto-policies/back-ends/krb5.config /etc/krb5.conf.d/crypto-policies
|
def find_sqrt(arr, x):
for i in range(len(arr)):
if arr[i] == x:
return i
return -1 |
<reponame>p-tylczak/adventofcode-2021
package com.softhale.day15;
import com.softhale.utils.BoardUtils;
import com.softhale.utils.ParserUtils;
import org.jgrapht.alg.shortestpath.DijkstraShortestPath;
import org.jgrapht.graph.DefaultWeightedEdge;
import org.jgrapht.graph.SimpleDirectedWeightedGraph;
import java.awt.*;
import java.util.List;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public class Day15 {
private final ParserUtils parserUtils = new ParserUtils();
private final BoardUtils boardUtils = new BoardUtils();
public long part1(String filePath, int maxX, int maxY) {
var cells = boardUtils.toCells(
parserUtils.readLines(filePath),
line -> Arrays.stream(line.split(""))
.map(Integer::valueOf)
.toList());
return findShortestPathWeight(maxX, maxY, cells);
}
public long part2(String filePath, int maxX, int maxY) {
var cells = boardUtils.toCells(
parserUtils.readLines(filePath),
line -> Arrays.stream(line.split(""))
.map(Integer::valueOf)
.toList());
var cellByLocation = cells.stream()
.collect(Collectors.toMap(this::toKey, Function.identity()));
var extendedCells = new ArrayList<>(cells);
for (int index = 1; index < 15; index++) {
for (int y = 0; y < maxY; y++) {
for (int x = 0; x < maxX; x++) {
var cell = cellByLocation.get(toKey(x, y));
var newContent = cell.content() + index;
if (newContent > 9) {
newContent = newContent - 9;
}
var newX = cell.location().x + (index * maxX);
var newCell = new BoardUtils.Cell<>(
new Point(newX, cell.location().y),
newContent);
extendedCells.add(newCell);
}
}
}
cellByLocation = extendedCells.stream()
.collect(Collectors.toMap(this::toKey, Function.identity()));
var newX = 0;
var newY = 0;
var newCells = new ArrayList<BoardUtils.Cell<Integer>>();
for (int index = 0; index < 5; index++) {
for (int y = 0; y < maxY; y++) {
for (int x = index * maxX; x < maxX * 5 + (index * maxX); x++) {
// if (x % maxX == 0)
// System.out.print(" ");
var cell = cellByLocation.get(toKey(x, y));
// System.out.print(cell.content());
newCells.add(new BoardUtils.Cell<>(
new Point(newX, newY),
cell.content()));
newX++;
}
newX = 0;
newY++;
System.out.println();
}
}
return findShortestPathWeight(maxX * 5, maxY * 5, newCells);
}
private long findShortestPathWeight(int maxX, int maxY, List<BoardUtils.Cell<Integer>> cells) {
var graph = new SimpleDirectedWeightedGraph<>(DefaultWeightedEdge.class);
var cellByLocation = cells.stream()
.peek(graph::addVertex)
.collect(Collectors.toMap(this::toKey, Function.identity()));
for (int y = 0; y < maxY; y++) {
for (int x = 0; x < maxX; x++) {
var adjacent = findAdjacentCells(x, y, cellByLocation);
var thisCell = cellByLocation.get(toKey(x, y));
adjacent.forEach(a -> {
var edge = graph.addEdge(thisCell, a);
graph.setEdgeWeight(edge, a.content());
});
}
}
var alg = new DijkstraShortestPath<>(graph);
var path = alg.getPath(
cellByLocation.get("0_0"),
cellByLocation.get(toKey((maxX - 1), (maxY - 1))));
return (long) path.getWeight();
}
private List<BoardUtils.Cell<Integer>> findAdjacentCells(int x, int y, Map<
String, BoardUtils.Cell<Integer>> cells) {
var n = cells.get(toKey(x, y - 1));
var s = cells.get(toKey(x, y + 1));
var e = cells.get(toKey(x + 1, y));
var w = cells.get(toKey(x - 1, y));
return Stream.of(n, s, e, w)
.filter(Objects::nonNull)
.toList();
}
private String toKey(BoardUtils.Cell<Integer> cell) {
return toKey(cell.location().x, cell.location().y);
}
private String toKey(int x, int y) {
return x + "_" + y;
}
}
|
<reponame>Ruivalim/project-manager<gh_stars>0
#!/usr/bin/env node
const chalk = require('chalk');
const cli = require('cli');
const args = process.argv.slice(2);
const fs = require("fs");
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const path = require('path');
const pmPath = path.join(process.mainModule.path, "../");
const projectsPath = path.join(process.mainModule.path, "../templates/");
const message = (text) => {
console.log(chalk.cyan(text));
}
const success = (text) => {
console.log(chalk.green(text));
}
const highlight = (text) => {
console.log(chalk.magenta(text));
}
const normal = (text) => {
console.log(text);
}
const error = (text) => {
console.log(chalk.red(text));
cli.exit();
}
const actionInPM = async (action) => {
await exec(`cd ${pmPath} && ${action}`);
};
cli.main(async () => {
switch(args[0]){
case "new":
switch(args[1]){
case "project":
const template = args[2];
const path = args[3];
const current_path = process.cwd();
const project_path = current_path + "/" + path;
if (!fs.existsSync(projectsPath + template)) {
error(`Error: ${template} doesn't exists`);
}
if (fs.existsSync(project_path)) {
error(`Error: ${project_path} already exists`);
}
await exec(`mkdir ${path}`);
await exec(`cp -R ${projectsPath}/${template}/ ./${path} && rm -rf ./${path}/.git`);
await exec(`cd ./${path} && yarn && git init && git add --all && git commit -m "First commit"`);
break;
case "template":
message("Installing template...");
const template_url = args[2];
const template_name = args[3];
const save = args[4] == "--save" ? true : false;
await actionInPM(`git submodule --quiet add ${template_url} templates/${template_name}`);
await actionInPM(`git submodule --quiet update --init --recursive`);
if( save ){
await actionInPM(`git add --all && git commit -m "feat: template ${template_name} added to the project" && git push`);
}
success("Templated saved successfully");
break;
default:
break;
}
break;
case "update":
message("Updating templates...");
await actionInPM(`git submodule --quiet update --init --recursive`);
await actionInPM(`git submodule foreach git pull origin master`);
await actionInPM(`git pull`);
await actionInPM(`git add --all && git commit -m "feat: templates version update" && git push`);
message("Templates updated successfully");
break;
case "help":
success("Usage:")
highlight("$ pm new project $template $project_name");
message("Templates available:");
const templates = fs.readdirSync(projectsPath);
templates.map((file) => {
normal("\t -> "+file);
});
message("\n");
highlight("$ pm new template $template_url $template_name --save?\n");
highlight("$ pm update\n");
break;
default:
break;
}
});
|
#!/bin/bash
# Termine instâncias de barras em execução
killall -q polybar
# Espere até que os processos em execução sejam terminados
while pgrep -u $UID -x polybar >/dev/null; do sleep 1; done
# execute a Polybar, usando a configuração padrão ~/.config/polybar/config
polybar -r bar
|
require './server'
run AerynApp
|
// Define the astroevent module for astronomical event information
mod astroevent {
// Define the AstroEventRequest structure for requesting event information
pub struct AstroEventRequest {
// Define the fields for the event request
// ...
}
impl AstroEventRequest {
// Implement methods for requesting event information
// ...
}
}
// Define the astroposition module for celestial body positions
mod astroposition {
// Define the AstroPositionRequest structure for requesting celestial body positions
pub struct AstroPositionRequest {
// Define the fields for the position request
// ...
}
impl AstroPositionRequest {
// Implement methods for requesting celestial body positions
// ...
}
} |
<filename>src/actions/registrationconfirmation/index.js
import { confirmRegistration } from 'api/Services'
import { storeUserCredentials } from 'actions/auth'
export const REQUEST_CONFIRMATION = 'CONFIRM_REGISTRATION_REQUEST'
export const SUCCESSFULLY_CONFIRMED_REGISTRATION = 'SUCCESSFULLY_CONFIRMED_REGISTRATION'
export const FAILED_TO_CONFIRM_REGISTRATION = 'FAILED_TO_CONFIRM_REGISTRATION'
export const isRequestingConfirmation = () => ({
type: REQUEST_CONFIRMATION
})
export const successfullyRequestedConfirmation = () => ({
type: SUCCESSFULLY_CONFIRMED_REGISTRATION
})
export const failedToRequestConfirmation = (error) => ({
type: FAILED_TO_CONFIRM_REGISTRATION,
error
})
export const performRequestToConfirmRegistration = (token) => {
return (dispatch, getState) => {
dispatch(isRequestingConfirmation())
return confirmRegistration({token})
.then((response) => dispatch(storeUserCredentials(response)))
.then((response) => {
dispatch(successfullyRequestedConfirmation())
return Promise.resolve(response)
})
.catch((error) => {
dispatch(failedToRequestConfirmation(error))
return Promise.reject(error)
})
}
}
|
#!/usr/bin/env bash
SERVER_FOLDER="asaramet@grid01.hs-esslingen.de:/opt/openfoam/bwUniData"
DATA_FOLDER="/opt/bwhpc/es/dbdata/"
rsync -uar ${DATA_FOLDER} ${SERVER_FOLDER}
|
package ch.heigvd.res.lab01.impl;
import ch.heigvd.res.lab01.impl.explorers.DFSFileExplorer;
import ch.heigvd.res.lab01.impl.transformers.CompleteFileTransformer;
import ch.heigvd.res.lab01.interfaces.IApplication;
import ch.heigvd.res.lab01.interfaces.IFileExplorer;
import ch.heigvd.res.lab01.interfaces.IFileVisitor;
import ch.heigvd.res.lab01.quotes.QuoteClient;
import ch.heigvd.res.lab01.quotes.Quote;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.io.FileUtils;
/**
*
* @author <NAME>
*/
public class Application implements IApplication {
/**
* This constant defines where the quotes will be stored. The path is
* relative to where the Java application is invoked.
*/
public static String WORKSPACE_DIRECTORY = "./workspace/quotes";
private static final Logger LOG = Logger.getLogger(Application.class.getName());
public static void main(String[] args) {
/*
* I prefer to have LOG output on a single line, it's easier to read. Being able
* to change the formatting of console outputs is one of the reasons why it is
* better to use a Logger rather than using System.out.println
*/
System.setProperty("java.util.logging.SimpleFormatter.format", "%4$s: %5$s%6$s%n");
int numberOfQuotes = 0;
try {
numberOfQuotes = Integer.parseInt(args[0]);
} catch(Exception e) {
System.err.println("The command accepts a single numeric argument (number of quotes to fetch)");
System.exit(-1);
}
Application app = new Application();
try {
/*
* Step 1 : clear the output directory
*/
app.clearOutputDirectory();
/*
* Step 2 : use the QuotesClient to fetch quotes; store each quote in a file
*/
app.fetchAndStoreQuotes(numberOfQuotes);
/*
* Step 3 : use a file explorer to traverse the file system; print the name of each directory and file
*/
Writer writer = new StringWriter(); // we create a special writer that will send characters into a string (memory)
app.printFileNames(writer); // we hand over this writer to the printFileNames method
LOG.info(writer.toString()); // we dump the whole result on the console
/*
* Step 4 : process the quote files, by applying 2 transformations to their content
* (convert to uppercase and add line numbers)
*/
app.processQuoteFiles();
} catch(IOException ex) {
LOG.log(Level.SEVERE, "Could not fetch quotes. {0}", ex.getMessage());
ex.printStackTrace();
}
}
@Override
public void fetchAndStoreQuotes(int numberOfQuotes) throws IOException {
clearOutputDirectory();
QuoteClient client = new QuoteClient();
for(int i = 0; i < numberOfQuotes; i++) {
Quote quote = client.fetchQuote();
/* There is a missing piece here!
* As you can see, this method handles the first part of the lab. It uses the web service
* client to fetch quotes. We have removed a single line from this method. It is a call to
* one method provided by this class, which is responsible for storing the content of the
* quote in a text file (and for generating the directories based on the tags).
*/
storeQuote(quote, "quote-" + i + ".utf8");
LOG.info(quote.getSource());
for(String tag : quote.getTags()) {
LOG.info("> " + tag);
}
}
}
/**
* This method deletes the WORKSPACE_DIRECTORY and its content. It uses the
* apache commons-io library. You should call this method in the main
* method.
*
* @throws IOException
*/
void clearOutputDirectory() throws IOException {
FileUtils.deleteDirectory(new File(WORKSPACE_DIRECTORY));
}
/**
* This method stores the content of a quote in the local file system. It
* has 2 responsibilities:
*
* - with quote.getTags(), it gets a list of tags and uses it to create
* sub-folders (for instance, if a quote has three tags "A", "B" and "C", it
* will be stored in /quotes/A/B/C/quotes-n.utf8.
*
* - with quote.getQuote(), it has access to the text of the quote. It
* stores this text in UTF-8 file.
*
* @param quote the quote object, with tags and text
* @param filename the name of the file to create and where to store the
* quote text
* @throws IOException
*/
void storeQuote(Quote quote, String filename) throws IOException {
String dir = WORKSPACE_DIRECTORY;
for(String tag : quote.getTags()) {
dir += '/' + tag;
}
try {
new File(dir).mkdirs();
(new File(dir + '/' + filename)).createNewFile();
} catch(IOException e) {
throw new IOException("Can't write on disk");
}
// throw new UnsupportedOperationException("The student has not implemented this method yet.");
}
/**
* This method uses a IFileExplorer to explore the file system and prints
* the name of each encountered file and directory.
*/
void printFileNames(final Writer writer) {
IFileExplorer explorer = new DFSFileExplorer();
explorer.explore(new File(WORKSPACE_DIRECTORY), new IFileVisitor() {
@Override
public void visit(File file) {
try {
/*
* There is a missing piece here. Notice how we use an anonymous class here. We provide the implementation
* of the the IFileVisitor interface inline. You just have to add the body of the visit method, which should
* be pretty easy (we want to write the filename, including the path, to the writer passed in argument).
*/
writer.write(file.getPath() + System.getProperty("line.separator"));
} catch(IOException ex) {
Logger.getLogger(Application.class.getName()).log(Level.WARNING, null, ex);
}
}
});
}
@Override
public String getAuthorEmail() {
return "<EMAIL>";
}
@Override
public void processQuoteFiles() throws IOException {
IFileExplorer explorer = new DFSFileExplorer();
explorer.explore(new File(WORKSPACE_DIRECTORY), new CompleteFileTransformer());
}
}
|
#!/bin/bash
#############################################################
# A script to build and run the API with a test configuration
#############################################################
cd "$(dirname "${BASH_SOURCE[0]}")"
#
# Download development SSL certificates if required
#
./downloadcerts.sh
if [ $? -ne 0 ]; then
exit
fi
#
# Copy down the test configuration, to point the API to Wiremock rather than AWS Cognito
#
cp environments/test.config.json ./api.config.json
#
# Get the platform
#
case "$(uname -s)" in
Darwin)
PLATFORM='MACOS'
;;
MINGW64*)
PLATFORM='WINDOWS'
;;
esac
#
# Run the API in a child window
#
echo 'Running API ...'
if [ "$PLATFORM" == 'MACOS' ]; then
open -a Terminal ./run_wiremock.sh
open -a Terminal ./run_api.sh
else
"$GIT_BASH" -c ./run_wiremock.sh &
"$GIT_BASH" -c ./run_api.sh &
fi
#
# Wait for endpoints to become available
#
echo 'Waiting for Wiremock endpoints to come up ...'
WIREMOCK_URL='https://login.authsamples-dev.com:446/__admin/mappings'
while [ "$(curl -k -s -X GET -o /dev/null -w '%{http_code}' "$WIREMOCK_URL")" != '200' ]; do
sleep 2
done
echo 'Waiting for API endpoints to come up ...'
API_URL='https://api.authsamples-dev.com:445/api/companies'
while [ "$(curl -k -s -X GET -o /dev/null -w '%{http_code}' "$API_URL")" != '401' ]; do
sleep 2
done
#
# Restore the API configuration once the API is loaded
#
cp environments/api.config.json ./api.config.json
#
# Indicate success
#
echo "Start tests via './gradlew test --rerun-tasks' or './gradlew loadtest --rerun-tasks' ..."
|
#
# Initializes OCaml package management.
#
# Authors:
# Sebastian Wiesner <lunaryorn@gmail.com>
#
# Return if requirements are not found.
if [[ ! -f "$HOME/.opam/opam-init/init.zsh" ]]; then
return 1
fi
# Initialize OPAM.
source "$HOME/.opam/opam-init/init.zsh"
# support Merlin's Locate on all libs
export OPAMKEEPBUILDDIR=true
export OCAMLPARAM="_,bin-annot=1"
|
<gh_stars>1-10
//
// Created by JinWen on 2019/4/27.
//
#ifndef JETANALYSOR_UTILS_H
#define JETANALYSOR_UTILS_H
#include <fastjet/PseudoJet.hh>
#include <HepMC/IO_GenEvent.h>
#include <HepMC/GenEvent.h>
inline bool isUp (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 2;
}
inline bool isDown (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 1;
}
inline bool isCharm (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 4;
}
inline bool isStrange (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 3;
}
inline bool isTop (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 6;
}
inline bool isBottom (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 5;
}
inline bool isElectron (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 11;
}
inline bool isMuon (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 13;
}
inline bool isTau (const HepMC::GenParticle* p ){
return abs(p->pdg_id()) == 15;
}
inline bool isLepton( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 11 || abs(p->pdg_id()) == 13 || abs(p->pdg_id()) == 15;
}
inline bool isNeutrino(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 12 || abs(p->pdg_id()) == 14 || abs(p->pdg_id()) == 16;
}
inline bool isGluon( const HepMC::GenParticle* p ) {
return p->pdg_id() == 21;
}
inline bool isPhoton( const HepMC::GenParticle* p ) {
return p->pdg_id() == 22;
}
inline bool isZ( const HepMC::GenParticle* p ) {
return p->pdg_id() == 23;
}
inline bool isW( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 24;
}
inline bool isHiggs( const HepMC::GenParticle* p ) {
return p->pdg_id() == 25;
}
//@TODO: Zprime,Wprime,Zprimeprime,H0,A0,H+
inline bool isChargedPion( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 211;
}
inline bool isChargino1( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 1000024;
}
inline bool isNeutralino2( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 1000023;
}
inline bool isNeutralino1( const HepMC::GenParticle* p ) {
return abs(p->pdg_id()) == 1000022;
}
//no way to access beams by status code (4 and 11 appear)
//but only beams have px = py = 0
inline bool isBeam( const HepMC::GenParticle* p ) {
return p->status() == 4 || (p->momentum().py() == 0 && p->momentum().py() == 0);
}
// returns true if the GenParticle does not decay
inline bool isFinal( const HepMC::GenParticle* p ) {
return !p->end_vertex() && p->status()==1;
}
//returns true if the GenParticle is about to decay (no more showering)
inline bool isLast( const HepMC::GenParticle* p ) {
if ( p->end_vertex() ) {
for ( HepMC::GenVertex::particle_iterator desc = p->end_vertex()->particles_begin(HepMC::descendants);
desc != p->end_vertex()-> particles_end(HepMC::descendants);
++desc ) {
//particle still showering if it decays to itself
if ( p->pdg_id() == (*desc)->pdg_id() ) return false;
}
return true;
}
return false;
}
//returns true if particle not yet showered
inline bool isFirst(const HepMC::GenParticle* p){
if ( p->production_vertex() ) {
for ( HepMC::GenVertex::particle_iterator mother = p->production_vertex()->particles_begin(HepMC::parents);
mother != p->production_vertex()-> particles_end(HepMC::parents);
++mother ) {
//particle is showered if it has originated from itself
if ( p->pdg_id() == (*mother)->pdg_id() ) return false;
}
return true;
}
return false;
}
//returns true if particle entry is from hard process i.e has 2 mothers
//wish there was a status code for this...
inline bool notFromShower( const HepMC::GenParticle* p ) {
if ( p->production_vertex() ) {
int nmoth=0;
for ( HepMC::GenVertex::particle_iterator mother = p->production_vertex()->particles_begin(HepMC::parents);
mother != p->production_vertex()-> particles_end(HepMC::parents);
++mother ) nmoth++;
if (nmoth == 2) return true;
}
return false;
}
//returns true if GenParticle originates from a meson decay
inline bool fromMeson( const HepMC::GenParticle* p ) {
if ( p->production_vertex() ) {
for ( HepMC::GenVertex::particle_iterator mother = p->production_vertex()->particles_begin(HepMC::parents);
mother != p->production_vertex()-> particles_end(HepMC::parents);
++mother ) {
if ( abs((*mother)->pdg_id()) > 100 ) return true; //@TODO: this line is wrong!
}
return false;
}
return false;
}
inline bool iso1Meson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 111 ||
abs(p->pdg_id()) == 211 ||
abs(p->pdg_id()) == 113 ||
abs(p->pdg_id()) == 213 ||
abs(p->pdg_id()) == 115 ||
abs(p->pdg_id()) == 215 ||
abs(p->pdg_id()) == 117 ||
abs(p->pdg_id()) == 217 ||
abs(p->pdg_id()) == 119 ||
abs(p->pdg_id()) == 219 ||
abs(p->pdg_id()) == 9000111 ||
abs(p->pdg_id()) == 9000211 ||
abs(p->pdg_id()) == 100111 ||
abs(p->pdg_id()) == 100211 ||
abs(p->pdg_id()) == 10111 ||
abs(p->pdg_id()) == 10211 ||
abs(p->pdg_id()) == 9010111 ||
abs(p->pdg_id()) == 9010211 ||
abs(p->pdg_id()) == 10113 ||
abs(p->pdg_id()) == 10213 ||
abs(p->pdg_id()) == 20113 ||
abs(p->pdg_id()) == 20213 ||
abs(p->pdg_id()) == 9000113 ||
abs(p->pdg_id()) == 9000213 ||
abs(p->pdg_id()) == 9010113 ||
abs(p->pdg_id()) == 9010213 ||
abs(p->pdg_id()) == 100113 ||
abs(p->pdg_id()) == 100213 ||
abs(p->pdg_id()) == 9010113 ||
abs(p->pdg_id()) == 9010213 ||
abs(p->pdg_id()) == 9020113 ||
abs(p->pdg_id()) == 9020213 ||
abs(p->pdg_id()) == 30113 ||
abs(p->pdg_id()) == 30213 ||
abs(p->pdg_id()) == 9030113 ||
abs(p->pdg_id()) == 9030213 ||
abs(p->pdg_id()) == 9040113 ||
abs(p->pdg_id()) == 9040213 ||
abs(p->pdg_id()) == 10115 ||
abs(p->pdg_id()) == 10215 ||
abs(p->pdg_id()) == 9000115 ||
abs(p->pdg_id()) == 9000215 ||
abs(p->pdg_id()) == 9010115 ||
abs(p->pdg_id()) == 9010215 ||
abs(p->pdg_id()) == 9000117 ||
abs(p->pdg_id()) == 9000217 ||
abs(p->pdg_id()) == 9010117 ||
abs(p->pdg_id()) == 9010217;
}
inline bool iso0Meson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 221 ||
abs(p->pdg_id()) == 331 ||
abs(p->pdg_id()) == 223 ||
abs(p->pdg_id()) == 333 ||
abs(p->pdg_id()) == 225 ||
abs(p->pdg_id()) == 335 ||
abs(p->pdg_id()) == 227 ||
abs(p->pdg_id()) == 337 ||
abs(p->pdg_id()) == 229 ||
abs(p->pdg_id()) == 339 ||
abs(p->pdg_id()) == 339 ||
abs(p->pdg_id()) == 100221 ||
abs(p->pdg_id()) == 10221 ||
abs(p->pdg_id()) == 100331 ||
abs(p->pdg_id()) == 10331 ||
abs(p->pdg_id()) == 10223 ||
abs(p->pdg_id()) == 20223 ||
abs(p->pdg_id()) == 10333 ||
abs(p->pdg_id()) == 20333 ||
abs(p->pdg_id()) == 100223 ||
abs(p->pdg_id()) == 30223 ||
abs(p->pdg_id()) == 100333 ||
abs(p->pdg_id()) == 10225 ||
abs(p->pdg_id()) == 10335 ||
abs(p->pdg_id()) == 9000221 ||
abs(p->pdg_id()) == 9010221 ||
abs(p->pdg_id()) == 9020221 ||
abs(p->pdg_id()) == 9030221 ||
abs(p->pdg_id()) == 9040221 ||
abs(p->pdg_id()) == 9050221 ||
abs(p->pdg_id()) == 9060221 ||
abs(p->pdg_id()) == 9070221 ||
abs(p->pdg_id()) == 9080221 ||
abs(p->pdg_id()) == 9000223 ||
abs(p->pdg_id()) == 9010223 ||
abs(p->pdg_id()) == 9000225 ||
abs(p->pdg_id()) == 9010225 ||
abs(p->pdg_id()) == 9020225 ||
abs(p->pdg_id()) == 9030225 ||
abs(p->pdg_id()) == 9040225 ||
abs(p->pdg_id()) == 9050225 ||
abs(p->pdg_id()) == 9060225 ||
abs(p->pdg_id()) == 9070225 ||
abs(p->pdg_id()) == 9080225 ||
abs(p->pdg_id()) == 9090225 ||
abs(p->pdg_id()) == 9000229 ||
abs(p->pdg_id()) == 9010229;
}
inline bool strangeMeson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 130 ||
abs(p->pdg_id()) == 310 ||
abs(p->pdg_id()) == 311 ||
abs(p->pdg_id()) == 321 ||
abs(p->pdg_id()) == 313 ||
abs(p->pdg_id()) == 323 ||
abs(p->pdg_id()) == 315 ||
abs(p->pdg_id()) == 325 ||
abs(p->pdg_id()) == 317 ||
abs(p->pdg_id()) == 327 ||
abs(p->pdg_id()) == 319 ||
abs(p->pdg_id()) == 329 ||
abs(p->pdg_id()) == 9000311 ||
abs(p->pdg_id()) == 9000321 ||
abs(p->pdg_id()) == 10311 ||
abs(p->pdg_id()) == 10321 ||
abs(p->pdg_id()) == 100311 ||
abs(p->pdg_id()) == 100321 ||
abs(p->pdg_id()) == 9010311 ||
abs(p->pdg_id()) == 9010321 ||
abs(p->pdg_id()) == 9020311 ||
abs(p->pdg_id()) == 9020321 ||
abs(p->pdg_id()) == 10313 ||
abs(p->pdg_id()) == 10323 ||
abs(p->pdg_id()) == 20313 ||
abs(p->pdg_id()) == 20323 ||
abs(p->pdg_id()) == 100313 ||
abs(p->pdg_id()) == 100323 ||
abs(p->pdg_id()) == 9000313 ||
abs(p->pdg_id()) == 9000323 ||
abs(p->pdg_id()) == 30313 ||
abs(p->pdg_id()) == 30323 ||
abs(p->pdg_id()) == 9000315 ||
abs(p->pdg_id()) == 9000325 ||
abs(p->pdg_id()) == 10315 ||
abs(p->pdg_id()) == 10325 ||
abs(p->pdg_id()) == 20315 ||
abs(p->pdg_id()) == 20325 ||
abs(p->pdg_id()) == 9010315 ||
abs(p->pdg_id()) == 9010325 ||
abs(p->pdg_id()) == 9020315 ||
abs(p->pdg_id()) == 9020325 ||
abs(p->pdg_id()) == 9010317 ||
abs(p->pdg_id()) == 9010327 ||
abs(p->pdg_id()) == 9010319 ||
abs(p->pdg_id()) == 9010329;
}
inline bool charmedMeson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 411 ||
abs(p->pdg_id()) == 421 ||
abs(p->pdg_id()) == 413 ||
abs(p->pdg_id()) == 423 ||
abs(p->pdg_id()) == 415 ||
abs(p->pdg_id()) == 425 ||
abs(p->pdg_id()) == 431 ||
abs(p->pdg_id()) == 433 ||
abs(p->pdg_id()) == 435 ||
abs(p->pdg_id()) == 10411 ||
abs(p->pdg_id()) == 10421 ||
abs(p->pdg_id()) == 10413 ||
abs(p->pdg_id()) == 10423 ||
abs(p->pdg_id()) == 20413 ||
abs(p->pdg_id()) == 20423 ||
abs(p->pdg_id()) == 10431 ||
abs(p->pdg_id()) == 10433 ||
abs(p->pdg_id()) == 20433;
}
inline bool bottomMeson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 511 ||
abs(p->pdg_id()) == 521 ||
abs(p->pdg_id()) == 513 ||
abs(p->pdg_id()) == 523 ||
abs(p->pdg_id()) == 515 ||
abs(p->pdg_id()) == 525 ||
abs(p->pdg_id()) == 531 ||
abs(p->pdg_id()) == 533 ||
abs(p->pdg_id()) == 535 ||
abs(p->pdg_id()) == 541 ||
abs(p->pdg_id()) == 543 ||
abs(p->pdg_id()) == 545 ||
abs(p->pdg_id()) == 10511 ||
abs(p->pdg_id()) == 10521 ||
abs(p->pdg_id()) == 10513 ||
abs(p->pdg_id()) == 10523 ||
abs(p->pdg_id()) == 20513 ||
abs(p->pdg_id()) == 20523 ||
abs(p->pdg_id()) == 10531 ||
abs(p->pdg_id()) == 10533 ||
abs(p->pdg_id()) == 20533 ||
abs(p->pdg_id()) == 10541 ||
abs(p->pdg_id()) == 10543 ||
abs(p->pdg_id()) == 20543;
}
inline bool ccMeson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 441 ||
abs(p->pdg_id()) == 443 ||
abs(p->pdg_id()) == 445 ||
abs(p->pdg_id()) == 10441 ||
abs(p->pdg_id()) == 100441 ||
abs(p->pdg_id()) == 10443 ||
abs(p->pdg_id()) == 100443 ||
abs(p->pdg_id()) == 20443 ||
abs(p->pdg_id()) == 30443 ||
abs(p->pdg_id()) == 9000443 ||
abs(p->pdg_id()) == 9010443 ||
abs(p->pdg_id()) == 9020443 ||
abs(p->pdg_id()) == 100445;
}
inline bool bbMeson(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 551 ||
abs(p->pdg_id()) == 553 ||
abs(p->pdg_id()) == 555 ||
abs(p->pdg_id()) == 557 ||
abs(p->pdg_id()) == 10551 ||
abs(p->pdg_id()) == 100551 ||
abs(p->pdg_id()) == 110551 ||
abs(p->pdg_id()) == 200551 ||
abs(p->pdg_id()) == 210551 ||
abs(p->pdg_id()) == 10553 ||
abs(p->pdg_id()) == 20553 ||
abs(p->pdg_id()) == 30553 ||
abs(p->pdg_id()) == 100553 ||
abs(p->pdg_id()) == 110553 ||
abs(p->pdg_id()) == 120553 ||
abs(p->pdg_id()) == 130553 ||
abs(p->pdg_id()) == 200553 ||
abs(p->pdg_id()) == 210553 ||
abs(p->pdg_id()) == 220553 ||
abs(p->pdg_id()) == 300553 ||
abs(p->pdg_id()) == 9000553 ||
abs(p->pdg_id()) == 9010553 ||
abs(p->pdg_id()) == 10555 ||
abs(p->pdg_id()) == 20555 ||
abs(p->pdg_id()) == 100555 ||
abs(p->pdg_id()) == 110555 ||
abs(p->pdg_id()) == 120555 ||
abs(p->pdg_id()) == 200555 ||
abs(p->pdg_id()) == 100557;
}
inline bool isMeson(const HepMC::GenParticle* p){
return iso1Meson(p) ||
iso0Meson(p) ||
strangeMeson(p) ||
charmedMeson(p) ||
bottomMeson(p) ||
ccMeson(p) ||
bbMeson(p);
}
inline bool lightBaryon(const HepMC::GenParticle* p){
return abs(p->pdg_id()) == 2212 ||
abs(p->pdg_id()) == 2112 ||
abs(p->pdg_id()) == 2224 ||
abs(p->pdg_id()) == 2214 ||
abs(p->pdg_id()) == 2114 ||
abs(p->pdg_id()) == 1114;
}
inline bool strangeBaryon(const HepMC::GenParticle* p){
return abs(p->pdg_id()) >= 3000 && abs(p->pdg_id()) < 4000;
}
inline bool charmedBaryon(const HepMC::GenParticle* p){
return abs(p->pdg_id()) >= 4000 && abs(p->pdg_id()) < 5000;
}
inline bool bottomBaryon(const HepMC::GenParticle* p){
return abs(p->pdg_id()) >= 5000 && abs(p->pdg_id()) < 6000;
}
inline bool isBaryon(const HepMC::GenParticle* p){
return lightBaryon(p) ||
charmedBaryon(p) ||
strangeBaryon(p) ||
bottomBaryon(p);
}
inline bool isHadron(const HepMC::GenParticle* p){
return isMeson(p) || isBaryon(p);
}
inline bool isBHadron(const HepMC::GenParticle* p){
return bottomMeson(p) || bbMeson(p) || bottomBaryon(p);
}
inline bool isVisible(const HepMC::GenParticle* p){
return isElectron(p) || isMuon(p) || isPhoton(p) || isHadron(p);
}
inline bool bTagged(fastjet::PseudoJet jet ){
for (int i=0; i < jet.constituents().size(); i++){
if ( jet.constituents()[i].user_index() == 1) return true;
}
return false;
}
#endif //JETANALYSOR_UTILS_H
|
#! /bin/bash
cat << enof
This is mulite line here doc delimeter
this is second line
enof
|
<reponame>Jinhaihan/WLW_LAB<gh_stars>0
package com.example.jinha.wlwlab.app;
import android.content.Context;
import com.tencent.mmkv.MMKV;
import java.util.logging.Handler;
/**
* @ProjectName: SportsHealth
* @Package: cn.ac.ia.iot.healthlibrary.app
* @ClassName: ApplicationDelegate
* @Author: mebee
* @CreateDate: 2018/8/21 10:15
* @Description: Application的代理实现
*/
public final class ApplicationDelegate {
/**
*
* 初始化Application代理
*
* @param context
* @return
*/
public static AppConfigurator init(Context context, boolean isRealse) {
MMKV.initialize(context);
return AppConfigurator.getInstance()
.withAppContext(context.getApplicationContext());
}
public static AppConfigurator getAppConfigurator() {
return AppConfigurator.getInstance();
}
public static <T> T getConfiguration(Object key) {
return getAppConfigurator().getConfiguration(key);
}
public static Context getAppContext() {
return getConfiguration(AppConfigKeys.APP_CONTEXT);
}
public static Handler getHandler() {
return getConfiguration(AppConfigKeys.HANDLER);
}
}
|
<reponame>ChrisJones100/dig4639-mobile-dev
import React, { Component } from "react";
import logo from "./logo.svg";
import "./App.css";
import Card from "./components/Card/index.js";
///Implement card class
class App extends Component {
render() {
return <Card content="This is a card!"></Card>
};
}
export default App; |
<filename>examples/parse_openapi3_json.py
"""
In this example `openapi` version is used instead of `swagger` version.
This example will test validation when post
"""
from flask import Flask, jsonify, request
from flasgger import Swagger, swag_from
from flask_restful import Api, Resource
app = Flask(__name__)
api = Api(app)
swag = Swagger(app, template_file='parse_openapi3_json_product_schema.yml',
parse=True, config={
'headers': [],
'specs': [
{
'endpoint': 'apispec',
'route': '/apispec.json',
'test': 'test'
}
],
'openapi': '3.0.1'
})
class NewProduct(Resource):
def post(self):
pass
api.add_resource(NewProduct, '/product')
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
res = client.post('product', json={'id': 0, 'upc': 1})
assert res.status_code == 200
res = client.post('product', json={'id': 0, 'upc': 'astring'})
assert res.status_code == 400
if __name__ == '__main__':
app.run(debug=True)
|
#!/bin/bash
platform='unknown'
unamestr=`uname`
if [[ "$unamestr" == 'Darwin' ]]; then
cp $RECIPE_DIR/build_mac.conf build.conf
else
cp $RECIPE_DIR/build_linux.conf build.conf
fi
sed -i'' -e "s:PREFIX:$PREFIX:g" build.conf
export LIBRARY_PATH="${PREFIX}/lib"
export LD_LIBRARY_PATH="${PREFIX}/lib"
$PYTHON setup.py build
$PYTHON setup.py install
|
<gh_stars>1-10
export * from './segment-tree';
export * from './cases';
|
package gen
import (
"sort"
"strings"
"github.com/go-faster/errors"
"github.com/ogen-go/ogen/internal/ir"
)
type nodes []*RouteNode
func (e nodes) Len() int {
return len(e)
}
func (e nodes) Less(i, j int) bool {
return e[i].head < e[j].head
}
func (e nodes) Swap(i, j int) {
e[i], e[j] = e[j], e[i]
}
func (e nodes) Sort() {
sort.Sort(e)
}
// RouteNode is node of Radix tree of routes.
type RouteNode struct {
prefix string
head byte
child nodes
paramName string
param *ir.Parameter // nil-able
op *ir.Operation
}
// Prefix returns common prefix.
func (n *RouteNode) Prefix() string {
return n.prefix
}
// Head returns first byte of prefix.
func (n *RouteNode) Head() byte {
return n.head
}
// IsStatic whether node is not a parameter node.
func (n *RouteNode) IsStatic() bool {
return n.param == nil
}
// IsLeaf whether node has no children.
func (n *RouteNode) IsLeaf() bool {
return n.child.Len() == 0
}
// IsParam whether node is a parameter node.
func (n *RouteNode) IsParam() bool {
return n.param != nil
}
// Children returns child nodes.
func (n *RouteNode) Children() []*RouteNode {
return n.child
}
// StaticChildren returns slice of child static nodes.
func (n *RouteNode) StaticChildren() (r []*RouteNode) {
for _, c := range n.child {
if c.IsStatic() {
r = append(r, c)
}
}
return r
}
// ParamChildren returns slice of child parameter nodes.
func (n *RouteNode) ParamChildren() (r []*RouteNode) {
for _, c := range n.child {
if c.IsParam() {
r = append(r, c)
}
}
return r
}
// Tails returns heads of child nodes.
//
// Used for matching end of parameter node between two static.
func (n *RouteNode) Tails() (r []byte) {
for _, c := range n.child {
if !c.IsParam() {
r = append(r, c.head)
}
}
return r
}
// ParamName returns parameter name, if any.
func (n *RouteNode) ParamName() string {
return n.paramName
}
// Param returns associated parameter, if any.
//
// May be nil.
func (n *RouteNode) Param() *ir.Parameter {
return n.param
}
// Operation returns associated operation.
func (n *RouteNode) Operation() *ir.Operation {
return n.op
}
func nextPathPart(s string) (hasParam bool, paramStart, paramEnd int, tail byte, _ error) {
paramStart = strings.IndexByte(s, '{')
if paramStart < 0 {
return false, 0, 0, 0, nil
}
paramEnd = strings.IndexByte(s, '}')
if paramEnd < 0 || paramEnd < paramStart {
return false, paramStart, paramEnd, tail, errors.Errorf("unclosed '{' at %d", paramStart)
}
// Need to match parameter part including both brackets.
paramEnd++
if paramEnd < len(s) {
tail = s[paramEnd]
}
return true, paramStart, paramEnd, tail, nil
}
func (n *RouteNode) addChild(path string, op *ir.Operation, ch *RouteNode) (r *RouteNode, _ error) {
r = ch
hasParam, start, end, _, err := nextPathPart(path)
if err != nil {
return nil, errors.Errorf("parse %q", path)
}
if hasParam {
paramName := path[start+1 : end-1]
p, ok := findParam(op, paramName)
if !ok {
return nil, errors.Errorf("unknown parameter %q", paramName)
}
if start == 0 { // Route starts with a param.
ch.paramName = paramName
ch.param = p
// Handle tail of path.
if len(path[end:]) > 0 {
path = path[end:]
n, err := ch.addChild(path, op, &RouteNode{
prefix: path,
head: path[0],
})
if err != nil {
return nil, err
}
r = n
}
} else { // Route contains param.
// Set prefix to static part of path.
ch.prefix = path[:start]
// Get parameterized part.
path = path[start:]
// Add parameterized child node.
n, err := ch.addChild(path, op, &RouteNode{
head: path[0],
paramName: paramName,
param: p,
})
if err != nil {
return nil, err
}
r = n
}
}
n.child = append(n.child, ch)
n.child.Sort()
return r, nil
}
func (n *RouteNode) childIdx(head byte) (int, bool) {
for i := range n.child {
if n.child[i].head == head {
return i, true
}
}
return 0, false
}
func (n *RouteNode) replaceChild(head byte, child *RouteNode) {
idx, _ := n.childIdx(head)
n.child[idx] = child
}
func (n *RouteNode) getChild(head byte) *RouteNode {
idx, ok := n.childIdx(head)
if ok && n.child[idx].head == head {
return n.child[idx]
}
return nil
}
func (n *RouteNode) walk(level int, cb func(level int, n *RouteNode)) {
if n == nil {
return
}
cb(level, n)
for _, ch := range n.child {
ch.walk(level+1, cb)
}
}
|
#!/bin/bash
# Update code and restart server (run on server)
set -e
if [ -d "/home/feross/www/build-instant.io" ]; then
echo "ERROR: Build folder already exists. Is another build in progress?"
exit 1
fi
if [ -d "/home/feross/www/old-instant.io" ]; then
echo "ERROR: Old folder exists. Did a previous build crash?"
exit 1
fi
cp -R /home/feross/www/instant.io /home/feross/www/build-instant.io
cd /home/feross/www/build-instant.io && git pull
cd /home/feross/www/build-instant.io && rm -rf node_modules
cd /home/feross/www/build-instant.io && npm ci --no-progress
cd /home/feross/www/build-instant.io && npm run build
cd /home/feross/www/build-instant.io && npm prune --production --no-progress
sudo supervisorctl stop instant
cd /home/feross/www && mv instant.io old-instant.io
cd /home/feross/www && mv build-instant.io instant.io
sudo supervisorctl start instant
cd /home/feross/www && rm -rf old-instant.io
|
import request2 from '@/utils/request2'
import request from '@/utils/request'
export function getPage(query) {
return request2({
url: '/business/list',
method: 'get',
params: { data: JSON.stringify(query).replace("\'", '"') }
})
}
export function getPhotoList(id) {
return request2({
url: '/business/getPhotoList',
method: 'get',
params: { id }
})
}
export function getSQLData(id) {
return request2({
url: '/business/getSQLData',
method: 'get',
params: { id }
})
}
export function fetchArticle(id) {
return request({
url: '/programmer-notes/article/detail',
method: 'get',
params: { id }
})
}
export function fetchPv(pv) {
return request({
url: '/programmer-notes/article/pv',
method: 'get',
params: { pv }
})
}
export function save(data) {
return request2({
url: '/business/update',
method: 'post',
data
})
}
export function updateSQL(data) {
return request2({
url: '/business/updateSQL',
method: 'post',
data
})
}
export function deleteData(data) {
return request2({
url: '/business/delete',
method: 'post',
data
})
}
|
package io.eventuate.examples.tram.sagas.ordersandcustomers.orders.common;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Embeddable;
@Embeddable
@Access(AccessType.FIELD)
public class ProductDetails {
private Long productId;
private int productAmount;
public ProductDetails() {
}
public ProductDetails(Long productId, int productAmount) {
this.productId = productId;
this.productAmount = productAmount;
}
public Long getProductId() {
return productId;
}
public int getProductAmount() {
return productAmount;
}
} |
<reponame>Tahul/strapi-module<gh_stars>0
import type {
Strapi4RequestParams,
Strapi4Response
} from '../types/v4'
import { useStrapiVersion } from './useStrapiVersion'
import { useStrapiClient } from './useStrapiClient'
export const useStrapi4 = () => {
const version = useStrapiVersion()
if (version !== 'v4') {
// eslint-disable-next-line no-console
console.warn('useStrapi4 is only available for v4')
}
/**
* Get a list of {content-type} entries
*
* @param {string} contentType - Content type's name pluralized
* @param {Strapi4RequestParams} params? - Query parameters
* @returns Promise<Strapi4Response<T>>
*/
const find = <T>(contentType: string, params?: Strapi4RequestParams): Promise<Strapi4Response<T>> => {
const client = useStrapiClient()
return client(`/${contentType}`, { method: 'GET', params })
}
/**
* Get a specific {content-type} entry
*
* @param {string} contentType - Content type's name pluralized
* @param {string|number} id - ID of entry
* @param {Strapi4RequestParams} params? - Query parameters
* @returns Promise<Strapi4Response<T>>
*/
const findOne = <T>(contentType: string, id: string | number, params?: Strapi4RequestParams): Promise<Strapi4Response<T>> => {
const client = useStrapiClient()
return client(`/${contentType}/${id}`, { method: 'GET', params })
}
/**
* Create a {content-type} entry
*
* @param {string} contentType - Content type's name pluralized
* @param {Record<string, any>} data - Form data
* @returns Promise<Strapi4Response<T>>
*/
const create = <T>(contentType: string, data: Partial<T>): Promise<Strapi4Response<T>> => {
const client = useStrapiClient()
return client(`/${contentType}`, { method: 'POST', body: { data } })
}
/**
* Update an entry
*
* @param {string} contentType - Content type's name pluralized
* @param {string|number} id - ID of entry to be updated
* @param {Record<string, any>} data - Form data
* @returns Promise<Strapi4Response<T>>
*/
const update = <T>(contentType: string, id: string | number | Partial<T>, data?: Partial<T>): Promise<Strapi4Response<T>> => {
const client = useStrapiClient()
if (typeof id === 'object') {
data = id
// @ts-ignore
id = undefined
}
const path = [contentType, id].filter(Boolean).join('/')
return client(path, { method: 'PUT', body: { data } })
}
/**
* Delete an entry
*
* @param {string} contentType - Content type's name pluralized
* @param {string|number} id - ID of entry to be deleted
* @returns Promise<Strapi4Response<T>>
*/
const _delete = <T>(contentType: string, id?: string | number): Promise<Strapi4Response<T>> => {
const client = useStrapiClient()
const path = [contentType, id].filter(Boolean).join('/')
return client(path, { method: 'DELETE' })
}
return {
find,
findOne,
create,
update,
delete: _delete
}
}
|
<reponame>Carnis/CMtest
package com.cm.cmtest.modtut;
import com.cm.cmtest.modtut.commands.TeleportCommand;
import com.cm.cmtest.modtut.proxy.CommonProxy;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.FMLInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPostInitializationEvent;
import net.minecraftforge.fml.common.event.FMLPreInitializationEvent;
import net.minecraftforge.fml.common.event.FMLServerStartingEvent;
import org.apache.logging.log4j.Logger;
@Mod(modid = ModTut.MODID, name = ModTut.MODNAME, version = ModTut.MODVERSION, dependencies = "required-after:forge@[13.19.0.2129,)", useMetadata = true)
public class ModTut {
public static final String MODID = "modtut";
public static final String MODNAME = "Mod tutorials";
public static final String MODVERSION = "0.0.1";
@SidedProxy(clientSide = "com.cm.cmtest.modtut.proxy.ClientProxy", serverSide = "com.cm.cmtest.modtut.proxy.ServerProxy")
public static CommonProxy proxy;
@Mod.Instance
public static ModTut instance;
public static Logger logger;
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event){
logger = event.getModLog();
proxy.preInit(event);
}
@Mod.EventHandler
public void init(FMLInitializationEvent e) {
proxy.init(e);
}
@Mod.EventHandler
public void postInit(FMLPostInitializationEvent e) {
proxy.postInit(e);
}
@Mod.EventHandler
public void serverLoad(FMLServerStartingEvent event) {
event.registerServerCommand(new TeleportCommand());
}
}
|
<filename>test/queries/network_event_contacts_query_test.rb
require 'test_helper'
class NetworkEventContactsQueryTest < ActiveSupport::TestCase
def execute_query(input)
NetworkEventContactsQuery.new(@network_event.id, input).run
end
def name_and_email(member)
"#{member.first_name} #{member.last_name} (#{member.email})"
end
setup do
@network_event = network_events(:tuggle_network)
@network_event.volunteers << members(:malachi)
end
test "returns an empty set if there are no matches" do
assert_empty execute_query("this-should-not-match-anything")
end
test "returns a matching site contact record if first_name matches" do
member = @network_event.site_contacts.first
assert_includes execute_query(member.first_name),
{"id" => member.id, "type" => "Site Contact", "text" => name_and_email(member)}
end
test "returns a matching volunteer record if first_name matches" do
member = @network_event.volunteers.first
assert_includes execute_query(member.first_name),
{"id" => member.id, "type" => "Volunteer", "text" => name_and_email(member)}
end
test "returns a matching school contact record if last_name matches" do
member = @network_event.school_contacts.first
assert_includes execute_query(member.last_name),
{"id" => member.id, "type" => "School Contact", "text" => name_and_email(member)}
end
test "returns a matching volunteer record if email matches" do
member = @network_event.volunteers.first
assert_includes execute_query(member.email),
{"id" => member.id, "type" => "Volunteer", "text" => name_and_email(member)}
end
test "returns matching record based on type" do
volunteers = @network_event.volunteers
result = execute_query("Volun")
volunteers.each do |volunteer|
assert_includes result, {"id" => volunteer.id, "type" => "Volunteer", "text" => name_and_email(volunteer)}
end
end
end
|
def load_data(file_name):
try:
data = open(file_name).read()
return data
except IOError:
print("Error: File not found")
return None |
<reponame>yamatokataoka/file-service<gh_stars>1-10
package com.yamatokataoka.xroaddrive.api;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.mongodb.config.EnableMongoAuditing;
@Configuration
@EnableMongoAuditing
public class MongoConfig {} |
require('dotenv').config();
//
const ModelClient = require('./model-client.js');
const TagClient = require( './tag-client.js');
const JobClient = require('./job-client.js');
const ResultClient = require('./result-client.js');
const AccountingClient = require('./accounting-client.js');
const ModzyClient = require('./modzy-client.js');
const ApiError = require('./api-error.js');
/**
* @type {{TagClient: TagClient, ResultClient: ResultClient, ModelClient: ModelClient, AccountingClient: AccountingClient, ModzyClient: ModzyClient, ApiError: ApiError, JobClient: JobClient}}
*/
module.exports = {
ModelClient,
TagClient,
JobClient,
ResultClient,
AccountingClient,
ModzyClient,
ApiError
}; |
package sensors
import "github.com/mickael-carl/hue_exporter/pkg/common"
type SensorState struct {
Temperature *int
Presence *bool
ButtonEvent *int
Status *int
Flag *bool
LightLevel *int
Dark *bool
Daylight *bool
LastUpdated *string
}
type SensorConfig struct {
On bool
Battery *int
Reachable *bool
Alert *string
LedIndication *bool
UserTest *bool
Pending []string
Sensitivity *int
SensitivityMax *int
SunriseOffset *int
SunsetOffset *int
}
type SensorCapabilities struct {
Certified bool
}
type SensorAttributes struct {
State SensorState
SoftwareUpdate *common.SoftwareUpdate `json:"swupdate"`
Config SensorConfig
Name string
Type string
ModelId string
ManufacturerName string
ProductName string
SoftwareVersion string `json:"swversion"`
UniqueId string
Capabilities SensorCapabilities
}
type Sensors map[int]SensorAttributes
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.