text stringlengths 1 1.05M |
|---|
<reponame>nohbdy/libggpk
#include <iostream>
#include <iomanip>
#include "ggpk.h"
#include "ggpk/Archive.h"
#include "ggpk/Node.h"
using namespace std;
// Translate a NodeType enum value into string for printing
const char* nodeTypeToName(ggpk::Node::NodeType t) {
switch (t) {
case ggpk::Node::File:
return "File";
case ggpk::Node::Directory:
return "Directory";
case ggpk::Node::Unknown:
return "Unknown";
default:
return "???";
}
}
// Write information about a given file within the archive
int doInfo(ggpk::Archive* archive, const char* path) {
// Open the node if it exists
ggpk::Node* node = archive->FindNode(path);
if (node == NULL) {
cerr << "File not found" << endl;
return 2;
}
// Print the following information:
// <path>
// offset <file_offset>
// node_type <File|Directory|Unknown>
// data_size <data_size>
// hash <fedcba0123456789...>
cout << path << endl;
cout << "offset " << node->file_offset() << endl;
cout << "node_type " << nodeTypeToName(node->node_type()) << endl;
cout << "data_size " << node->data_size() << endl;
cout << "hash " << hex << setfill('0');
const unsigned char* hash = node->hash();
for (int i = 0; i < 32; i++) {
cout << setw(2) << (unsigned)hash[i];
}
cout << endl;
return 0;
} |
import sqlite3
class Database(object):
__vars__ = []
def __init__(self, name):
self._name = name
def _execute(self, command, args=None):
connection = sqlite3.connect("exel.db")
cursor = connection.cursor()
if args is None:
out = cursor.execute(command).fetchall()
else:
out = cursor.execute(command, args).fetchall()
return out
def create_table(self, table_name, columns):
command = f"CREATE TABLE {table_name} ({', '.join(columns)})"
self._execute(command)
def insert_data(self, table_name, data):
placeholders = ', '.join(['?'] * len(data))
command = f"INSERT INTO {table_name} VALUES ({placeholders})"
self._execute(command, data)
def query_data(self, table_name, query):
command = f"SELECT * FROM {table_name} WHERE {query}"
return self._execute(command) |
#!/usr/bin/env bash
# _4f is as _4e, but halving the regularization from 0.0001 to 0.00005.
# It's even better than 4e, by about 0.3% abs.
# 4c 4e 4f
# Final valid prob: -0.1241 -0.1267 -0.1230
# Final train prob: -0.08820 -0.1149 -0.1058
# ./show_wer.sh 4f
# %WER 16.83 [ 8282 / 49204, 870 ins, 2354 del, 5058 sub ] exp/chain/tdnn_4f_sp/decode_train_dev_sw1_tg/wer_10_0.0
# %WER 15.73 [ 7739 / 49204, 864 ins, 2256 del, 4619 sub ] exp/chain/tdnn_4f_sp/decode_train_dev_sw1_fsh_fg/wer_10_0.0
# %WER 18.4 | 4459 42989 | 83.5 11.0 5.5 2.0 18.4 56.2 | exp/chain/tdnn_4f_sp/decode_eval2000_sw1_tg/score_9_0.0/eval2000_hires.ctm.filt.sys
# %WER 16.6 | 4459 42989 | 85.2 9.7 5.1 1.8 16.6 53.4 | exp/chain/tdnn_4f_sp/decode_eval2000_sw1_fsh_fg/score_9_0.0/eval2000_hires.ctm.filt.sys
# a03:s5c: ./show_wer.sh 4e
# %WER 17.09 [ 8407 / 49204, 923 ins, 2242 del, 5242 sub ] exp/chain/tdnn_4e_sp/decode_train_dev_sw1_tg/wer_9_0.0
# %WER 15.91 [ 7829 / 49204, 932 ins, 2141 del, 4756 sub ] exp/chain/tdnn_4e_sp/decode_train_dev_sw1_fsh_fg/wer_9_0.0
# %WER 18.5 | 4459 42989 | 83.5 10.8 5.7 2.0 18.5 56.0 | exp/chain/tdnn_4e_sp/decode_eval2000_sw1_tg/score_9_0.0/eval2000_hires.ctm.filt.sys
# %WER 16.9 | 4459 42989 | 84.9 9.8 5.4 1.8 16.9 53.9 | exp/chain/tdnn_4e_sp/decode_eval2000_sw1_fsh_fg/score_9_0.0/eval2000_hires.ctm.filt.sys
# _4e is as _4c, but adding the option --l2-regularize 0.0001.
# _4c is as _4a, but using half the --jesus-hidden-dim: 7500 versus 15000.
# _4a is as _3s, but using narrower splice-indexes in the first layer.
# _3s is as _3r but reducing jesus-forward-input-dim from 500 to 400.
# num-params is quite small now: 5.4 million, vs. 12.1 million in 2y, and 8.8 million in 3p.
# This of course reduces overtraining. Results are a bit better than 3p but still
# not as good as 2y
# ./show_wer.sh 3s
# %WER 17.88 [ 8799 / 49204, 1006 ins, 2312 del, 5481 sub ] exp/chain/tdnn_3s_sp/decode_train_dev_sw1_tg/wer_11_0.0
# %WER 16.67 [ 8200 / 49204, 982 ins, 2221 del, 4997 sub ] exp/chain/tdnn_3s_sp/decode_train_dev_sw1_fsh_fg/wer_11_0.0
# %WER 19.6 | 4459 42989 | 82.8 11.8 5.4 2.4 19.6 57.6 | exp/chain/tdnn_3s_sp/decode_eval2000_sw1_tg/score_10_0.0/eval2000_hires.ctm.filt.sys
# %WER 17.6 | 4459 42989 | 84.4 10.1 5.4 2.1 17.6 54.7 | exp/chain/tdnn_3s_sp/decode_eval2000_sw1_fsh_fg/score_11_0.0/eval2000_hires.ctm.filt.sys
# a03:s5c: ./show_wer.sh 3p
# %WER 18.05 [ 8880 / 49204, 966 ins, 2447 del, 5467 sub ] exp/chain/tdnn_3p_sp/decode_train_dev_sw1_tg/wer_12_0.0
# %WER 16.86 [ 8296 / 49204, 967 ins, 2321 del, 5008 sub ] exp/chain/tdnn_3p_sp/decode_train_dev_sw1_fsh_fg/wer_12_0.0
# %WER 19.8 | 4459 42989 | 82.4 11.5 6.1 2.1 19.8 57.7 | exp/chain/tdnn_3p_sp/decode_eval2000_sw1_tg/score_11_0.0/eval2000_hires.ctm.filt.sys
# %WER 18.2 | 4459 42989 | 83.9 10.5 5.7 2.0 18.2 55.6 | exp/chain/tdnn_3p_sp/decode_eval2000_sw1_fsh_fg/score_11_0.0/eval2000_hires.ctm.filt.sys
# a03:s5c: ./show_wer.sh 2y
# %WER 16.99 [ 8358 / 49204, 973 ins, 2193 del, 5192 sub ] exp/chain/tdnn_2y_sp/decode_train_dev_sw1_tg/wer_11_0.0
# %WER 15.86 [ 7803 / 49204, 959 ins, 2105 del, 4739 sub ] exp/chain/tdnn_2y_sp/decode_train_dev_sw1_fsh_fg/wer_11_0.0
# %WER 18.9 | 4459 42989 | 83.4 11.3 5.3 2.3 18.9 56.3 | exp/chain/tdnn_2y_sp/decode_eval2000_sw1_tg/score_10_0.0/eval2000_hires.ctm.filt.sys
# %WER 17.0 | 4459 42989 | 85.1 10.1 4.8 2.1 17.0 53.5 | exp/chain/tdnn_2y_sp/decode_eval2000_sw1_fsh_fg/score_10_0.0/eval2000_hires.ctm.filt.sys
# _3r is as _3p but reducing the number of parameters as it seemed to be
# overtraining (despite already being quite a small model): [600,1800 ->
# 500,1500]. Also in the interim there was a script change to
# nnet3/chain/train_tdnn.sh to, on mix-up iters, apply half the max-change.
# [changing it right now from 1/2 to 1/sqrt(2) which is more consistent
# with the halving of the minibatch size.]
# _3p is the same as 3o, but after a code and script change so we can use
# natural gradient for the RepeatedAffineComponent.
# [natural gradient was helpful, based on logs;
# also made a change to use positive bias for the jesus-component affine parts.]
# _3o is as _3n but filling in the first splice-indexes from -1,2 to -1,0,1,2.
# _3n is as _3d (a non-recurrent setup), but using the more recent scripts that support
# recurrence, with improvements to the learning of the jesus layers.
# _3g is as _3f but using 100 blocks instead of 200, as in d->e 200 groups was found
# to be worse.
# It's maybe a little better than the baseline 2y; and better than 3d [-> I guess recurrence
# is helpful.]
#./show_wer.sh 3g
#%WER 17.05 [ 8387 / 49204, 905 ins, 2386 del, 5096 sub ] exp/chain/tdnn_3g_sp/decode_train_dev_sw1_tg/wer_11_0.0
#%WER 15.67 [ 7712 / 49204, 882 ins, 2250 del, 4580 sub ] exp/chain/tdnn_3g_sp/decode_train_dev_sw1_fsh_fg/wer_11_0.0
#%WER 18.7 | 4459 42989 | 83.5 11.1 5.3 2.2 18.7 56.2 | exp/chain/tdnn_3g_sp/decode_eval2000_sw1_tg/score_10_0.0/eval2000_hires.ctm.filt.sys
#%WER 16.8 | 4459 42989 | 85.1 9.9 5.0 2.0 16.8 53.7 | exp/chain/tdnn_3g_sp/decode_eval2000_sw1_fsh_fg/score_10_0.5/eval2000_hires.ctm.filt.sys
#a03:s5c: ./show_wer.sh 2y
#%WER 16.99 [ 8358 / 49204, 973 ins, 2193 del, 5192 sub ] exp/chain/tdnn_2y_sp/decode_train_dev_sw1_tg/wer_11_0.0
#%WER 15.86 [ 7803 / 49204, 959 ins, 2105 del, 4739 sub ] exp/chain/tdnn_2y_sp/decode_train_dev_sw1_fsh_fg/wer_11_0.0
#%WER 18.9 | 4459 42989 | 83.4 11.3 5.3 2.3 18.9 56.3 | exp/chain/tdnn_2y_sp/decode_eval2000_sw1_tg/score_10_0.0/eval2000_hires.ctm.filt.sys
#%WER 17.0 | 4459 42989 | 85.1 10.1 4.8 2.1 17.0 53.5 | exp/chain/tdnn_2y_sp/decode_eval2000_sw1_fsh_fg/score_10_0.0/eval2000_hires.ctm.filt.sys
#a03:s5c: ./show_wer.sh 3d
#%WER 17.35 [ 8539 / 49204, 1023 ins, 2155 del, 5361 sub ] exp/chain/tdnn_3d_sp/decode_train_dev_sw1_tg/wer_10_0.0
#%WER 16.09 [ 7919 / 49204, 1012 ins, 2071 del, 4836 sub ] exp/chain/tdnn_3d_sp/decode_train_dev_sw1_fsh_fg/wer_10_0.0
#%WER 18.9 | 4459 42989 | 83.2 11.2 5.6 2.1 18.9 56.6 | exp/chain/tdnn_3d_sp/decode_eval2000_sw1_tg/score_10_0.0/eval2000_hires.ctm.filt.sys
#%WER 17.0 | 4459 42989 | 85.0 9.8 5.2 2.0 17.0 53.6 | exp/chain/tdnn_3d_sp/decode_eval2000_sw1_fsh_fg/score_10_0.0/eval2000_hires.ctm.filt.sys
# _3f is as _3e, but modifying the splicing setup to add (left) recurrence:
# added the :3's in --splice-indexes "-2,-1,0,1,2 -1,2 -3,0,3:-3 -6,-3,0,3:-3 -6,-3,0,3:-3"
# Therefore it's
# no longer really a tdnn, more like an RNN combined with TDNN. BTW, I'm not re-dumping egs with extra
# context, and this isn't really ideal - I want to see if this seems promising first.
# _3e is as _3d, but increasing the --num-jesus-blocks from 100 (the default)
# to 200 in order to reduce computation in the Jesus layer.
# _3d is as _2y, and re-using the egs, but using --jesus-opts and
# configs from make_jesus_configs.py.
# --jesus-opts "--affine-output-dim 600 --jesus-output-dim 1800 --jesus-hidden-dim 15000" \
# --splice-indexes "-2,-1,0,1,2 -1,2 -3,0,3 -6,-3,0,3 -6,-3,0,3"
# _2y is as _2o, but increasing the --frames-per-iter by a factor of 1.5, from
# 800k to 1.2 million. The aim is to avoid some of the per-job overhead
# (model-averaging, etc.), since each iteration takes only a minute or so.
# I added the results to the table below. It seems the same on average-
# which is good. We'll probably keep this configuration.
# _2o is as _2m, but going back to our original 2-state topology, which it turns
# out that I never tested to WER.
# hm--- it's about the same, or maybe slightly better!
# caution: accidentally overwrote most of this dir, but kept the key stuff.
# note: when I compare with the rerun of 2o (not shown), this run is actually
# better.
# WER on 2m 2o 2y [ now comparing 2o->2y:]
# train_dev,tg 17.22 17.24 16.99 0.2% better
# train_dev,fg 15.87 15.93 15.86 0.1% better
# eval2000,tg 18.7 18.7 18.9 0.2% worse
# eval2000,fg 17.0 16.9 17.0 0.1% worse
# train-prob,final -0.0803 -0.0835
# valid-prob,final -0.0116 -0.0122
# _2m is as _2k, but setting --leftmost-questions-truncate=-1, i.e. disabling
# that mechanism.
# _2k is as _2i, but doing the same change as in _s -> _2e, in which we
# set --apply-deriv-weights false and --frames-overlap-per-eg 0.
# _2i is as _2d but with a new set of code for estimating the LM, in which we compute
# the log-like change when deciding which states to back off. The code is not the same
# as the one in 2{f,g,h}. We have only the options --num-extra-lm-states=2000. By
# default it estimates a 4-gram, with 3-gram as the no-prune order. So the configuration
# is quite similar to 2d, except new/more-exact code is used.
# _2d is as _2c but with different LM options:
# --lm-opts "--ngram-order=4 --leftmost-context-questions=/dev/null --num-extra-states=2000"
# ... this gives us a kind of pruned 4-gram language model, instead of a 3-gram.
# the --leftmost-context-questions=/dev/null option overrides the leftmost-context-questions
# provided from the tree-building, and effectively puts the leftmost context position as a single
# set.
# This seems definitely helpful: on train_dev, with tg improvement is 18.12->17.55 and with fg
# from 16.73->16.14; and on eval2000, with tg from 19.8->19.5 and with fg from 17.8->17.6.
# _2c is as _2a but after a code change in which we start using transition-scale
# and self-loop-scale of 1 instead of zero in training; we change the options to
# mkgraph used in testing, to set the scale to 1.0. This shouldn't affect
# results at all; it's is mainly for convenience in pushing weights in graphs,
# and checking that graphs are stochastic.
# _2a is as _z but setting --lm-opts "--num-extra-states=8000".
# _z is as _x but setting --lm-opts "--num-extra-states=2000".
# (see also y, which has --num-extra-states=500).
# _x is as _s but setting --lm-opts "--num-extra-states=0".
# this is a kind of repeat of the u->v experiment, where it seemed to make things
# worse, but there were other factors involved in that so I want to be sure.
# _s is as _q but setting pdf-boundary-penalty to 0.0
# This is helpful: 19.8->18.0 after fg rescoring on all of eval2000,
# and 18.07 -> 16.96 on train_dev, after fg rescoring.
# _q is as _p except making the same change as from n->o, which
# reduces the parameters to try to reduce over-training. We reduce
# relu-dim from 1024 to 850, and target num-states from 12k to 9k,
# and modify the splicing setup.
# note: I don't rerun the tree-building, I just use the '5o' treedir.
# _p is as _m except with a code change in which we switch to a different, more
# exact mechanism to deal with the edges of the egs, and correspondingly
# different script options... we now dump weights with the egs, and apply the
# weights to the derivative w.r.t. the output instead of using the
# --min-deriv-time and --max-deriv-time options. Increased the frames-overlap
# to 30 also. This wil. give 10 frames on each side with zero derivs, then
# ramping up to a weight of 1.0 over 10 frames.
# _m is as _k but after a code change that makes the denominator FST more
# compact. I am rerunning in order to verify that the WER is not changed (since
# it's possible in principle that due to edge effects related to weight-pushing,
# the results could be a bit different).
# The results are inconsistently different but broadly the same. On all of eval2000,
# the change k->m is 20.7->20.9 with tg LM and 18.9->18.6 after rescoring.
# On the train_dev data, the change is 19.3->18.9 with tg LM and 17.6->17.6 after rescoring.
# _k is as _i but reverting the g->h change, removing the --scale-max-param-change
# option and setting max-param-change to 1.. Using the same egs.
# _i is as _h but longer egs: 150 frames instead of 75, and
# 128 elements per minibatch instead of 256.
# _h is as _g but different application of max-param-change (use --scale-max-param-change true)
# _g is as _f but more splicing at last layer.
# _f is as _e but with 30 as the number of left phone classes instead
# of 10.
# _e is as _d but making it more similar in configuration to _b.
# (turns out b was better than a after all-- the egs' likelihoods had to
# be corrected before comparing them).
# the changes (vs. d) are: change num-pdfs target from 8k to 12k,
# multiply learning rates by 5, and set final-layer-normalize-target to 0.5.
# _d is as _c but with a modified topology (with 4 distinct states per phone
# instead of 2), and a slightly larger num-states (8000) to compensate for the
# different topology, which has more states.
# _c is as _a but getting rid of the final-layer-normalize-target (making it 1.0
# as the default) as it's not clear that it was helpful; using the old learning-rates;
# and modifying the target-num-states to 7000.
# _b is as as _a except for configuration changes: using 12k num-leaves instead of
# 5k; using 5 times larger learning rate, and --final-layer-normalize-target=0.5,
# which will make the final layer learn less fast compared with other layers.
set -e
# configs for 'chain'
stage=12
train_stage=-10
get_egs_stage=-10
speed_perturb=true
dir=exp/chain/tdnn_4f # Note: _sp will get added to this if $speed_perturb == true.
# training options
num_epochs=4
initial_effective_lrate=0.001
final_effective_lrate=0.0001
leftmost_questions_truncate=-1
max_param_change=1.0
final_layer_normalize_target=0.5
num_jobs_initial=3
num_jobs_final=16
minibatch_size=128
frames_per_eg=150
remove_egs=false
# End configuration section.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed.
EOF
fi
# The iVector-extraction and feature-dumping parts are the same as the standard
# nnet3 setup, and you can skip them by setting "--stage 8" if you have already
# run those things.
suffix=
if [ "$speed_perturb" == "true" ]; then
suffix=_sp
fi
dir=${dir}$suffix
train_set=train_nodup$suffix
ali_dir=exp/tri4_ali_nodup$suffix
treedir=exp/chain/tri5_2y_tree$suffix
lang=data/lang_chain_2y
# if we are using the speed-perturbed data we need to generate
# alignments for it.
local/nnet3/run_ivector_common.sh --stage $stage \
--speed-perturb $speed_perturb \
--generate-alignments $speed_perturb || exit 1;
if [ $stage -le 9 ]; then
# Get the alignments as lattices (gives the CTC training more freedom).
# use the same num-jobs as the alignments
nj=$(cat exp/tri4_ali_nodup$suffix/num_jobs) || exit 1;
steps/align_fmllr_lats.sh --nj $nj --cmd "$train_cmd" data/$train_set \
data/lang exp/tri4 exp/tri4_lats_nodup$suffix
rm exp/tri4_lats_nodup$suffix/fsts.*.gz # save space
fi
if [ $stage -le 10 ]; then
# Create a version of the lang/ directory that has one state per phone in the
# topo file. [note, it really has two states.. the first one is only repeated
# once, the second one has zero or more repeats.]
rm -rf $lang
cp -r data/lang $lang
silphonelist=$(cat $lang/phones/silence.csl) || exit 1;
nonsilphonelist=$(cat $lang/phones/nonsilence.csl) || exit 1;
# Use our special topology... note that later on may have to tune this
# topology.
steps/nnet3/chain/gen_topo.py $nonsilphonelist $silphonelist >$lang/topo
fi
if [ $stage -le 11 ]; then
# Build a tree using our new topology.
steps/nnet3/chain/build_tree.sh --frame-subsampling-factor 3 \
--leftmost-questions-truncate $leftmost_questions_truncate \
--cmd "$train_cmd" 9000 data/$train_set $lang $ali_dir $treedir
fi
if [ $stage -le 12 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $dir/egs/storage ]; then
utils/create_split_dir.pl \
/export/b0{5,6,7,8}/$USER/kaldi-data/egs/swbd-$(date +'%m_%d_%H_%M')/s5c/$dir/egs/storage $dir/egs/storage
fi
touch $dir/egs/.nodelete # keep egs around when that run dies.
steps/nnet3/chain/train_tdnn.sh --stage $train_stage \
--l2-regularize 0.00005 \
--egs-dir exp/chain/tdnn_2y_sp/egs \
--jesus-opts "--jesus-forward-input-dim 400 --jesus-forward-output-dim 1500 --jesus-hidden-dim 7500 --jesus-stddev-scale 0.2 --final-layer-learning-rate-factor 0.25" \
--splice-indexes "-1,0,1 -1,0,1,2 -3,0,3 -6,-3,0,3 -6,-3,0,3" \
--apply-deriv-weights false \
--frames-per-iter 1200000 \
--lm-opts "--num-extra-lm-states=2000" \
--get-egs-stage $get_egs_stage \
--minibatch-size $minibatch_size \
--egs-opts "--frames-overlap-per-eg 0" \
--frames-per-eg $frames_per_eg \
--num-epochs $num_epochs --num-jobs-initial $num_jobs_initial --num-jobs-final $num_jobs_final \
--feat-type raw \
--online-ivector-dir exp/nnet3/ivectors_${train_set} \
--cmvn-opts "--norm-means=false --norm-vars=false" \
--initial-effective-lrate $initial_effective_lrate --final-effective-lrate $final_effective_lrate \
--max-param-change $max_param_change \
--cmd "$decode_cmd" \
--remove-egs $remove_egs \
data/${train_set}_hires $treedir exp/tri4_lats_nodup$suffix $dir || exit 1;
fi
if [ $stage -le 13 ]; then
# Note: it might appear that this $lang directory is mismatched, and it is as
# far as the 'topo' is concerned, but this script doesn't read the 'topo' from
# the lang directory.
utils/mkgraph.sh --self-loop-scale 1.0 data/lang_sw1_tg $dir $dir/graph_sw1_tg
fi
decode_suff=sw1_tg
graph_dir=$dir/graph_sw1_tg
if [ $stage -le 14 ]; then
for decode_set in train_dev eval2000; do
(
steps/nnet3/decode.sh --acwt 1.0 --post-decode-acwt 10.0 \
--extra-left-context 20 \
--nj 50 --cmd "$decode_cmd" \
--online-ivector-dir exp/nnet3/ivectors_${decode_set} \
$graph_dir data/${decode_set}_hires $dir/decode_${decode_set}_${decode_suff} || exit 1;
if $has_fisher; then
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
data/lang_sw1_{tg,fsh_fg} data/${decode_set}_hires \
$dir/decode_${decode_set}_sw1_{tg,fsh_fg} || exit 1;
fi
) &
done
fi
wait;
exit 0;
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-IP/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-IP/7-1024+0+512-LMPI-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_sentences_low_pmi_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
<reponame>moc-yuto/envoy
#include "test/extensions/filters/network/thrift_proxy/integration.h"
#include <algorithm>
#include <fstream>
#include "test/test_common/environment.h"
namespace Envoy {
namespace Extensions {
namespace NetworkFilters {
namespace ThriftProxy {
std::string PayloadOptions::modeName() const {
switch (mode_) {
case DriverMode::Success:
return "success";
case DriverMode::IDLException:
return "idl-exception";
case DriverMode::Exception:
return "exception";
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
}
std::string PayloadOptions::transportName() const {
switch (transport_) {
case TransportType::Framed:
return "framed";
case TransportType::Unframed:
return "unframed";
case TransportType::Header:
return "header";
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
}
std::string PayloadOptions::protocolName() const {
switch (protocol_) {
case ProtocolType::Binary:
return "binary";
case ProtocolType::Compact:
return "compact";
case ProtocolType::Twitter:
return "finagle";
default:
NOT_REACHED_GCOVR_EXCL_LINE;
}
}
std::string BaseThriftIntegrationTest::thrift_config_;
void BaseThriftIntegrationTest::preparePayloads(const PayloadOptions& options,
Buffer::Instance& request_buffer,
Buffer::Instance& response_buffer) {
std::vector<std::string> args = {
TestEnvironment::runfilesPath(
"test/extensions/filters/network/thrift_proxy/driver/generate_fixture.sh"),
options.modeName(),
options.transportName(),
options.protocolName(),
};
if (options.service_name_) {
args.push_back("-s");
args.push_back(*options.service_name_);
}
if (options.headers_.size() > 0) {
args.push_back("-H");
std::vector<std::string> headers;
std::transform(options.headers_.begin(), options.headers_.end(), std::back_inserter(headers),
[](const std::pair<std::string, std::string>& header) -> std::string {
return header.first + "=" + header.second;
});
args.push_back(StringUtil::join(headers, ","));
}
args.push_back(options.method_name_);
std::copy(options.method_args_.begin(), options.method_args_.end(), std::back_inserter(args));
TestEnvironment::exec(args);
std::stringstream file_base;
file_base << "{{ test_tmpdir }}/" << options.transportName() << "-" << options.protocolName()
<< "-";
if (options.service_name_) {
file_base << *options.service_name_ << "-";
}
file_base << options.modeName();
readAll(file_base.str() + ".request", request_buffer);
readAll(file_base.str() + ".response", response_buffer);
}
void BaseThriftIntegrationTest::readAll(std::string file, Buffer::Instance& buffer) {
file = TestEnvironment::substitute(file, version_);
std::string data = api_->fileSystem().fileReadToEnd(file);
buffer.add(data);
}
} // namespace ThriftProxy
} // namespace NetworkFilters
} // namespace Extensions
} // namespace Envoy
|
# up function
# See http://daniele.livejournal.com/76011.html
#If you pass no arguments, it just goes up one directory.
#If you pass a numeric argument it will go up that number of directories.
#If you pass a string argument, it will look for a parent directory with that name and go up to it.
function up()
{
dir=""
if [ -z "$1" ]; then
dir=..
elif [[ $1 =~ ^[0-9]+$ ]]; then
x=0
while [ $x -lt ${1:-1} ]; do
dir=${dir}../
x=$(($x+1))
done
else
dir=${PWD%/$1/*}/$1
fi
cd "$dir";
}
function upstr()
{
echo "$(up "$1" && pwd)";
}
# List the available environments.
function show_up_options () {
(pwd | tr / " ")
}
#
# Set up tab completion. (Adapted from Arthur Koziel's version at
# http://arthurkoziel.com/2008/10/11/virtualenvwrapper-bash-completion/)
#
_ups ()
{
local cur="${COMP_WORDS[COMP_CWORD]}"
COMPREPLY=( $(compgen -W "`show_up_options`" -- ${cur}) )
}
complete -o default -o nospace -F _ups up
complete -o default -o nospace -F _ups upstr
|
import matplotlib.pyplot as plt
# x axis values
x = [1,2,3,4,5,6,7,8,9,10,11,12]
# corresponding y axis values
y = [10, 20, 30, 5, 10, 15, 25, 10, 20, 15, 5, 10]
# plotting the points
plt.plot(x, y)
# naming the x axis
plt.xlabel('Day of Month')
# naming the y axis
plt.ylabel('Number of orders')
# giving a title to my graph
plt.title('Number of orders per day in a month')
# function to show the plot
plt.show() |
#!/bin/sh
# wait-for-postgres.sh
# Based on https://docs.docker.com/compose/startup-order/
set -e
host=$(python -c "import os; print(os.getenv('DATABASE_URL').rsplit('/', 1)[0])")
until psql -d $host -c '\q'; do
>&2 echo "Postgres is unavailable - sleeping for 5s"
sleep 5
done
echo "Postgres is up"
|
<filename>resources/js/app.js
import Paginate from 'vuejs-paginate'
import moment from 'moment'
import apiRequest from "./components/Api/index";
import Vue from 'vue';
require('./bootstrap');
window.Vue = require('vue');
/**
* The following block of code may be used to automatically register your
* Vue components. It will recursively scan this directory for the Vue
* components and automatically register them with their "basename".
*
* Eg. ./components/ExampleComponent.vue -> <example-component></example-component>
*/
// const files = require.context('./', true, /\.vue$/i)
// files.keys().map(key => Vue.component(key.split('/').pop().split('.')[0], files(key).default))
//division
Vue.component('list-division-component', require('./components/admin/division/List.vue').default);
//role
Vue.component('form-create-role-component', require('./components/admin/role/CreateRoleComponent.vue').default);
//position
Vue.component('list-position-component', require('./components/admin/position/List.vue').default);
//position-division
Vue.component('list-position-division-component', require('./components/admin/position-division/List.vue').default);
//user
Vue.component('suggest-user-component', require('./components/user/follow/SuggestUserComponent/IndexComponent').default);
Vue.component('list-user-component', require('./components/admin/user/List.vue').default);
Vue.component('form-create-user-component', require('./components/admin/user/Create.vue').default);
Vue.component('form-edit-user-component', require('./components/admin/user/Edit.vue').default);
Vue.component('list-user-follow-component', require('./components/user/follow/UserFollowComponent/IndexComponent').default);
Vue.component('list-user-division-component', require('./components/admin/user-division/List.vue').default);
Vue.component('list-user-report-component', require('./components/admin/user-report/List.vue').default);
Vue.component('list-report', require('./components/manager/report/Index.vue').default);
//user
Vue.component('suggest-user-component', require('./components/user/follow/SuggestUserComponent/IndexComponent').default);
Vue.component('list-user-follow-component', require('./components/user/follow/UserFollowComponent/IndexComponent').default)
Vue.component('list-user', require('./components/manager/user/Index.vue').default);
Vue.component('create-user', require('./components/manager/user/Create.vue').default);
Vue.component('edit-user', require('./components/manager/user/Edit.vue').default);
Vue.component('list-report', require('./components/manager/report/Index.vue').default);
//user-report
Vue.component('list-report-user-component', require('./components/user/report/Index.vue').default);
Vue.component('form-create-report-component', require('./components/user/report/Create.vue').default);
Vue.component('form-edit-report-component', require('./components/user/report/Edit.vue').default);
Vue.component('show-detail-report', require('./components/user/report/Show.vue').default);
Vue.component('show-report-division', require('./components/user/report/ListReportDivision.vue').default);
// view profile
Vue.component('user-profile', require('./components/profile/UserProfile.vue').default);
Vue.component('edit-profile', require('./components/profile/EditProfile.vue').default);
Vue.component('edit-password', require('./components/profile/EditPassword.vue').default);
Vue.component('btn-notification', require('./components/ButtonNotification/Index.vue').default);
Vue.component('paginate', Paginate);
//social
Vue.component('auth-facebook', require('./components/socialite/Facebook.vue').default);
Vue.component('auth-google', require('./components/socialite/Google.vue').default);
//auth
Vue.component('auth-login', require('./components/auth/Login.vue').default);
//Widget Admin
Vue.component('widget-admin-division', require('./components/widget/admin/DivisionCount').default);
Vue.component('widget-admin-position', require('./components/widget/admin/PositionCount').default);
Vue.component('widget-admin-user', require('./components/widget/admin/RecentUser').default);
Vue.component('widget-admin-manager-division', require('./components/widget/admin/ManagerDivisionCount').default);
Vue.component('widget-admin-report', require('./components/widget/admin/RecentReport').default);
Vue.component('widget-admin-report-count', require('./components/widget/admin/ReportCount').default);
Vue.component('widget-admin-user-count', require('./components/widget/admin/UserCount').default);
//Widget Manager
Vue.component('widget-manager-report-user', require('./components/widget/manager/RecentReportUser').default);
Vue.component('widget-manager-user-division-count', require('./components/widget/manager/UserDivisionCount').default);
Vue.component('widget-manager-user-division', require('./components/widget/manager/RecentUserDivision').default);
//Widget User
Vue.component('widget-user-report', require('./components/widget/user/RecentReport').default);
Vue.component('widget-user-report-count', require('./components/widget/user/ReportCount').default);
Vue.component('widget-report-division', require('./components/widget/user/RecentReportDivision').default);
//
Vue.component('widget-follower-count', require('./components/widget/RecentFollow').default);
/**
* Next, we will create a fresh Vue application instance and attach it to
* the page. Then, you may begin adding components to this application
* or customize the JavaScript scaffolding to fit your unique needs.
*/
/**
* Next, we will create a fresh Vue application instance and attach it to
* the page. Then, you may begin adding components to this application
* or customize the JavaScript scaffolding to fit your unique needs.
*/
Vue.filter('formatDate', function (value) {
if (value) {
return moment(String(value)).format('MM/DD/YYYY HH:mm')
}
});
const app = new Vue({
el: '#app',
});
|
import gzip
import logging
import lxml
from StringIO import StringIO
from google.appengine.ext.webapp.mail_handlers import InboundMailHandler
import webapp2
class DmarcHandler(InboundMailHandler):
"""Simple handler for DMARC emails"""
def receive(self, email):
logging.info("Received a message from: <%s>, Subject: %s", email.sender, email.subject)
if len(email.attachments) > 0:
attachment = email.attachments[0]
logging.info("attachments: %s", attachment.filename)
data = attachment.payload.decode()
unzipped = gzip.GzipFile(fileobj=StringIO(data)).read()
logging.info("Content: %s", unzipped)
app = webapp2.WSGIApplication([DmarcHandler.mapping()], debug=True) |
/*
* Jaudiotagger Copyright (C)2004,2005
*
* This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser
* General Public License as published by the Free Software Foundation; either version 2.1 of the License,
* or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License along with this library; if not,
* you can getFields a copy from http://www.opensource.org/licenses/lgpl-license.php or write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jaudiotagger.audio.mp3;
import junit.framework.TestCase;
import org.jaudiotagger.AbstractTestCase;
import org.jaudiotagger.audio.exceptions.InvalidAudioFrameException;
import org.jaudiotagger.tag.FieldKey;
import org.jaudiotagger.tag.TagOptionSingleton;
import org.jaudiotagger.tag.id3.ID3v24Tag;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
*/
public class MP3AudioHeaderTest extends TestCase
{
public void testReadV1L3VbrOld()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1vbrOld0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~127", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV1L3VbrNew()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1vbrNew0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~127", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV1L3Cbr128()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1Cbr128.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("128", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV1L3Cbr192()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1Cbr192.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("192", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV2L3VbrOld()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV2vbrOld0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("22050", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~127", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV2L3MonoVbrNew()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV2vbrNew0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("22050", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~127", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV1L2Stereo()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1L2stereo.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
//assertEquals("00:13", mp3AudioHeader.getTrackLengthAsString()); Incorrectly returning 6
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_II)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_STEREO)), mp3AudioHeader.getChannels());
assertFalse(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("192", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("", mp3AudioHeader.getEncoder());
}
public void testReadV1L2Mono()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV1L2mono.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("44100", mp3AudioHeader.getSampleRate());
assertEquals("00:13", mp3AudioHeader.getTrackLengthAsString());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_II)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertFalse(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("192", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("", mp3AudioHeader.getEncoder());
}
public void testReadV25L3VbrOld()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV25vbrOld0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("12000", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2_5)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~128", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV25L3()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV25.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("12000", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2_5)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("16", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("", mp3AudioHeader.getEncoder()); //No Lame header so blank
}
public void testReadV25L3VbrNew()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV25vbrNew0.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("12000", mp3AudioHeader.getSampleRate());
assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2_5)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_MONO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("~128", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.96r", mp3AudioHeader.getEncoder());
}
public void testReadV2L2()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV2L2.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("24000", mp3AudioHeader.getSampleRate());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_II)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_JOINT_STEREO)), mp3AudioHeader.getChannels());
//assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString()); not working returning 0
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertTrue(mp3AudioHeader.isProtected());
assertEquals("16", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("", mp3AudioHeader.getEncoder()); //No Lame header so blank
}
public void testReadV2L3Stereo()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV2L3Stereo.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals("24000", mp3AudioHeader.getSampleRate());
//assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString());
assertFalse(mp3AudioHeader.isVariableBitRate());
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_2)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_JOINT_STEREO)), mp3AudioHeader.getChannels());
//assertEquals("00:14", mp3AudioHeader.getTrackLengthAsString()); not working returning 0
assertTrue(mp3AudioHeader.isOriginal());
assertFalse(mp3AudioHeader.isCopyrighted());
assertFalse(mp3AudioHeader.isPrivate());
assertFalse(mp3AudioHeader.isProtected());
assertEquals("64", mp3AudioHeader.getBitRate());
assertEquals("mp3", mp3AudioHeader.getEncodingType());
assertEquals("LAME3.97 ", mp3AudioHeader.getEncoder()); //TODO should we be removing trailing space
}
/**
* Test trying to parse an mp3 file which is not a valid MP3 fails gracefully with expected exception
*/
public void testIssue79()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("Issue79.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertTrue(exceptionCaught instanceof InvalidAudioFrameException);
}
/**
* Test trying to parse an mp3 file which is not a valid MP3 and is extremely small
* Should fail gracefully
*/
public void testIssue81()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("Issue81.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
exceptionCaught = e;
}
assertTrue(exceptionCaught instanceof InvalidAudioFrameException);
}
/**
* Test trying to parse an mp3 file which is a valid MP3 but problems with frame
*/
public void testIssue199()
{
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("testV2L2.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
e.printStackTrace();
exceptionCaught = e;
}
assertTrue(exceptionCaught == null);
}
/**
* Test mp3s display tracks over an hour correctly, dont actually have any such track so have to emulate
* the mp3 rather than calling it directly.
*/
public void testIssue85()
{
Exception exceptionCaught = null;
int NO_SECONDS_IN_HOUR = 3600;
SimpleDateFormat timeInFormat = new SimpleDateFormat("ss");
SimpleDateFormat timeOutFormat = new SimpleDateFormat("mm:ss");
SimpleDateFormat timeOutOverAnHourFormat = new SimpleDateFormat("kk:mm:ss");
try
{
int lengthLessThanHour = 3500;
Date timeIn = timeInFormat.parse(String.valueOf(lengthLessThanHour));
assertEquals("58:20", timeOutFormat.format(timeIn));
int lengthIsAnHour = 3600;
timeIn = timeInFormat.parse(String.valueOf(lengthIsAnHour));
assertEquals("01:00:00", timeOutOverAnHourFormat.format(timeIn));
int lengthMoreThanHour = 4000;
timeIn = timeInFormat.parse(String.valueOf(lengthMoreThanHour));
assertEquals("01:06:40", timeOutOverAnHourFormat.format(timeIn));
}
catch (Exception e)
{
e.printStackTrace();
exceptionCaught = e;
}
assertTrue(exceptionCaught == null);
}
/**
* Test trying to parse an mp3 file with a ID3 tag header reporting to short causing
* jaudiotagger to end up reading mp3 header from too early causing audio header to be
* read incorrectly
*/
public void testIssue110()
{
File orig = new File("testdata", "test28.mp3");
if (!orig.isFile())
{
return;
}
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("test28.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
e.printStackTrace();
exceptionCaught = e;
}
assertTrue(exceptionCaught == null);
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_JOINT_STEREO)), mp3AudioHeader.getChannels());
}
public void testReadVRBIFrame()
{
File orig = new File("testdata", "test30.mp3");
if (!orig.isFile())
{
return;
}
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("test30.mp3");
MP3AudioHeader mp3AudioHeader = null;
try
{
mp3AudioHeader = new MP3File(testFile).getMP3AudioHeader();
}
catch (Exception e)
{
e.printStackTrace();
exceptionCaught = e;
}
assertTrue(exceptionCaught == null);
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_STEREO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(147, mp3AudioHeader.getBitRateAsNumber());
assertEquals("Fraunhofer", mp3AudioHeader.getEncoder());
}
public void testWriteToFileWithVRBIFrame()
{
File orig = new File("testdata", "test30.mp3");
if (!orig.isFile())
{
return;
}
Exception exceptionCaught = null;
File testFile = AbstractTestCase.copyAudioToTmp("test30.mp3");
MP3AudioHeader mp3AudioHeader = null;
MP3File mp3file = null;
try
{
mp3file = new MP3File(testFile);
mp3AudioHeader = mp3file.getMP3AudioHeader();
//make change to file
mp3file.getID3v2Tag().setField(FieldKey.TITLE,"FREDDY");
mp3file.getID3v2Tag().deleteField(FieldKey.COVER_ART);
((ID3v24Tag) mp3file.getID3v2Tag()).removeFrame("PRIV");
final TagOptionSingleton tagOptions = TagOptionSingleton.getInstance();
tagOptions.setToDefault();
mp3file.save();
mp3file = new MP3File(testFile);
mp3AudioHeader = mp3file.getMP3AudioHeader();
}
catch (Exception e)
{
e.printStackTrace();
exceptionCaught = e;
}
//change has been made and VBRI Frame is left intact
assertEquals("FREDDY", mp3file.getID3v2Tag().getFirst(FieldKey.TITLE));
assertTrue(exceptionCaught == null);
assertEquals(MPEGFrameHeader.mpegVersionMap.get(new Integer(MPEGFrameHeader.VERSION_1)), mp3AudioHeader.getMpegVersion());
assertEquals(MPEGFrameHeader.mpegLayerMap.get(new Integer(MPEGFrameHeader.LAYER_III)), mp3AudioHeader.getMpegLayer());
assertEquals(MPEGFrameHeader.modeMap.get(new Integer(MPEGFrameHeader.MODE_STEREO)), mp3AudioHeader.getChannels());
assertTrue(mp3AudioHeader.isVariableBitRate());
assertEquals(147, mp3AudioHeader.getBitRateAsNumber());
assertEquals("Fraunhofer", mp3AudioHeader.getEncoder());
}
} |
package domains
import (
"context"
"fmt"
"time"
"github.com/mercari/mtc2018-web/server/config"
)
// Session has the session data.
type Session struct {
ID int
Type string
Place string
Title string
TitleJa string
StartTime string
EndTime string
Outline string
OutlineJa string
Lang string
Tags []string
Liked int
SpeakerIDs []string
UpdatedAt time.Time
}
// IsNode is marker for gqlgen.
func (*Session) IsNode() {}
// SessionRepo is basic operation unit for Session.
type SessionRepo interface {
Get(ctx context.Context, ids ...int) ([]*Session, error)
GetBySpeakerIDs(ctx context.Context, speakerIDs ...string) ([][]*Session, error)
List(ctx context.Context, req *SessionListRequest) (*SessionListResp, error)
AddLiked(ctx context.Context, id int, delta int) (*Session, error)
}
// SessionListRequest provides option for SessionRepo#List.
type SessionListRequest struct {
Limit int
LastKnownID int // 前回リストで返した最後のEntityのID
}
// SessionListResp provides response for SessionRepo#List.
type SessionListResp struct {
List []*Session
LastKnownID int
HasNext bool
}
// NewSessionRepo returns new SessionRepo.
func NewSessionRepo() (SessionRepo, error) {
data, err := config.Load()
if err != nil {
return nil, err
}
repo := &sessionRepo{data: make(map[int]*Session)}
for _, sessionData := range data.Sessions {
session := &Session{
ID: sessionData.SessionID,
Type: sessionData.Type,
Place: sessionData.Place,
Title: sessionData.Title,
TitleJa: sessionData.TitleJa,
StartTime: sessionData.StartTime,
EndTime: sessionData.EndTime,
Outline: sessionData.Outline,
OutlineJa: sessionData.OutlineJa,
Lang: sessionData.Lang,
Tags: sessionData.Tags,
}
for _, speaker := range sessionData.Speakers {
session.SpeakerIDs = append(session.SpeakerIDs, speaker.SpeakerID)
}
repo.list = append(repo.list, session)
repo.data[session.ID] = session
}
return repo, nil
}
type sessionRepo struct {
list []*Session
data map[int]*Session
}
func (repo *sessionRepo) Get(ctx context.Context, ids ...int) ([]*Session, error) {
resp := make([]*Session, 0, len(ids))
for _, id := range ids {
session, ok := repo.data[id]
if !ok {
return nil, fmt.Errorf("'Session:%d' is not found", id)
}
resp = append(resp, session)
}
return resp, nil
}
func (repo *sessionRepo) GetBySpeakerIDs(ctx context.Context, speakerIDs ...string) ([][]*Session, error) {
resp := make([][]*Session, 0, len(speakerIDs))
for _, speakerID := range speakerIDs {
list := make([]*Session, 0)
for _, session := range repo.list {
for _, targetSpeakerID := range session.SpeakerIDs {
if targetSpeakerID == speakerID {
list = append(list, session)
break
}
}
}
resp = append(resp, list)
}
return resp, nil
}
func (repo *sessionRepo) List(ctx context.Context, req *SessionListRequest) (*SessionListResp, error) {
if req.Limit == 0 {
req.Limit = 100
}
resp := &SessionListResp{}
var startIndex int
if req.LastKnownID != 0 {
for idx, session := range repo.list {
startIndex = idx + 1
if session.ID == req.LastKnownID {
break
}
}
}
if startIndex > len(repo.list) {
startIndex = len(repo.list)
}
endIndex := startIndex + req.Limit
if endIndex > len(repo.list) {
endIndex = len(repo.list)
}
resp.HasNext = endIndex != len(repo.list)
for _, session := range repo.list[startIndex:endIndex] {
resp.List = append(resp.List, session)
resp.LastKnownID = session.ID
}
return resp, nil
}
func (repo *sessionRepo) AddLiked(ctx context.Context, id int, delta int) (*Session, error) {
sessionList, err := repo.Get(ctx, id)
if err != nil {
return nil, err
}
session := sessionList[0]
session.Liked += delta
return session, nil
}
|
from zope.schema.interfaces import IVocabularyTokenized
class IQuerySource(IVocabularyTokenized):
"""A source that supports searching
"""
def search(query_string):
"""Return values that match query."""
|
<filename>src/vess-service/datastore.go
package main
import "gopkg.in/mgo.v2"
// 创建与 MongoDB 交互的主回话
func CreateSession(host string) (*mgo.Session, error) {
s, err := mgo.Dial(host)
if err != nil {
return nil, err
}
s.SetMode(mgo.Monotonic, true)
return s, nil
}
|
<filename>src/test/java/org/olat/core/util/FormatterTest.java
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.core.util;
import java.util.ArrayList;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.UUID;
import org.apache.commons.lang.StringEscapeUtils;
import org.junit.Assert;
import org.junit.Test;
/**
* This was the test code mebbed in the main method of the Formatter class
*
*/
public class FormatterTest {
@Test
public void testEscapeHtml() {
Assert.assertEquals("abcdef&<>", StringEscapeUtils.escapeHtml("abcdef&<>"));
Assert.assertEquals("&#256;<ba>abcdef&<>", StringEscapeUtils.escapeHtml("Ā<ba>abcdef&<>"));
Assert.assertEquals("&#256;\n<ba>\nabcdef&<>", StringEscapeUtils.escapeHtml("Ā\n<ba>\nabcdef&<>"));
}
@Test
public void testTruncate() {
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", 0));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", 2));
Assert.assertEquals("a...", Formatter.truncate("abcdef", 4));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", 6));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", 7));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", 8));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", -2));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", -4));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", -6));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", -7));
Assert.assertEquals("abcdef", Formatter.truncate("abcdef", -8));
}
@Test
public void testMakeStringFilesystemSave() {
String ugly = "guido/\\:? .|*\"\"<><guidoöäü";
Assert.assertEquals("guido%2F%5C%3A%3F+.%7C*%22%22%3C%3E%3Cguido%C3%B6%C3%A4%C3%BC", Formatter.makeStringFilesystemSave(ugly));
}
@Test
public void testDateRelative() {
//zero to add
Formatter formatter = Formatter.getInstance(Locale.GERMAN);
Date base = new GregorianCalendar(1935, 2, 29).getTime();
Assert.assertEquals(formatter.formatDate(base), formatter.formatDateRelative(base, 0,0,0));
//add 3 years in the past
Date basePlusThreeY = new GregorianCalendar(1938, 2, 29).getTime();
Assert.assertEquals(formatter.formatDate(basePlusThreeY), formatter.formatDateRelative(base, 0,0,3));
//add 5 days at 29 feb (leap year)
base = new GregorianCalendar(2016, 1, 29).getTime();
Date basePlusFiveD = new GregorianCalendar(2016, 2, 5).getTime();
Assert.assertEquals(formatter.formatDate(basePlusFiveD), formatter.formatDateRelative(base, 5,0,0));
//add three moth
base = new GregorianCalendar(2016, 4, 15).getTime();
Date baseThreeM = new GregorianCalendar(2016, 7, 15).getTime();
Assert.assertEquals(formatter.formatDate(baseThreeM), formatter.formatDateRelative(base, 0,3,0));
}
@Test
public void testUpAndDown() {
// only one key stroke
Assert.assertTrue(Formatter.formatEmoticonsAsImages("+").indexOf("<") == 0);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("-").indexOf("<") == 0);
// space after +/- => should render up or down icon
Assert.assertTrue(Formatter.formatEmoticonsAsImages("+ ").indexOf("<") == 0);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("- ").indexOf("<") == 0);
// text after +/- => should NOT render up or down icon, is probably an enumeration
Assert.assertTrue(Formatter.formatEmoticonsAsImages("+trallala").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("-lustig").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("+ trallala").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("- lustig").indexOf("<") == -1);
// text before +/- => should NOT render up or down icon
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala-").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala- ").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala -").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala - ").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala-lustig").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala - lustig").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig+").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig+ ").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig +").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig + ").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig+trallala").indexOf("<") == -1);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig + trallala").indexOf("<") == -1);
// in text, render only when in braces like this (+).
Assert.assertTrue(Formatter.formatEmoticonsAsImages("trallala (-) lustig").indexOf("<") == 9);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("I think it is (-).").indexOf("<") == 14);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("lustig (+) trallala").indexOf("<") == 7);
Assert.assertTrue(Formatter.formatEmoticonsAsImages("I think it is (+).").indexOf("<") == 14);
}
@Test
public void testFormatTimecode() {
Assert.assertEquals("0:00", Formatter.formatTimecode(0l));
Assert.assertEquals("0:01", Formatter.formatTimecode(1000l));
Assert.assertEquals("0:10", Formatter.formatTimecode(10000l));
Assert.assertEquals("1:10", Formatter.formatTimecode(70000l));
Assert.assertEquals("9:59", Formatter.formatTimecode(599000l));
Assert.assertEquals("13:45", Formatter.formatTimecode(825000l));
Assert.assertEquals("1:01:01", Formatter.formatTimecode(3661000l));
Assert.assertEquals("4:03:45", Formatter.formatTimecode(14625000l));
Assert.assertEquals("4:23:45", Formatter.formatTimecode(15825000l));
Assert.assertEquals("32:23:45", Formatter.formatTimecode(116625000l));
Assert.assertEquals("532:23:45", Formatter.formatTimecode(1916625000l));
}
@Test
public void elementLatexFormattingScript() {
String domId = UUID.randomUUID().toString();
String latextFormatterJs = Formatter.elementLatexFormattingScript(domId);
Assert.assertNotNull(latextFormatterJs);
Assert.assertTrue(latextFormatterJs.contains("o_info.latexit"));
}
@Test
public void testMailTransformation() {
// Valid Mails
List<String> validMails = new ArrayList<>();
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
validMails.add("<EMAIL>");
// Invalid Mails
List<String> invalidMails = new ArrayList<>();
invalidMails.add("<EMAIL>");
invalidMails.add("abc.def@mail");
invalidMails.add("abc.def@mail..com");
for (String validMail : validMails) {
String valid = Formatter.formatMailsAsLinks(validMail, false);
String validIcon = Formatter.formatMailsAsLinks(validMail, true);
Assert.assertTrue(valid.contains("<a"));
Assert.assertTrue(valid.contains("</a>"));
Assert.assertTrue(validIcon.contains("<a"));
Assert.assertTrue(validIcon.contains("</a>"));
Assert.assertTrue(validIcon.contains("<i"));
Assert.assertTrue(validIcon.contains("</i>"));
}
for (String invalidMail: invalidMails) {
invalidMail = Formatter.formatMailsAsLinks(invalidMail, false);
Assert.assertTrue(!invalidMail.contains("<a"));
Assert.assertTrue(!invalidMail.contains("</a>"));
}
}
}
|
<gh_stars>10-100
package io.opensphere.core.collada;
import io.opensphere.core.collada.jaxb.Effect;
import io.opensphere.core.collada.jaxb.Image;
import io.opensphere.core.collada.jaxb.Material;
/**
* Stores various pieces of shape information.
*
* @param <T> the type of the shape
*/
public class ShapeInfo<T>
{
/** The shape. */
private final T myShape;
/** The material. */
private final Material myMaterial;
/** The effect. */
private final Effect myEffect;
/** The image. */
private final Image myImage;
/**
* Constructor.
*
* @param shape the shape
* @param material The material
* @param effect The effect
* @param image The image
*/
public ShapeInfo(T shape, Material material, Effect effect, Image image)
{
myShape = shape;
myMaterial = material;
myEffect = effect;
myImage = image;
}
/**
* Gets the shape.
*
* @return the shape
*/
public T getShape()
{
return myShape;
}
/**
* Gets the material.
*
* @return the material
*/
public Material getMaterial()
{
return myMaterial;
}
/**
* Gets the effect.
*
* @return the effect
*/
public Effect getEffect()
{
return myEffect;
}
/**
* Gets the image.
*
* @return the image
*/
public Image getImage()
{
return myImage;
}
}
|
#!/bin/bash
USAGE="
${0} TARGET STAGE [STAGE ...]
Arguments:
- TARGET: Name of the test target. Targets are defined in 'tests' directory.
- STAGE: Test stage(s) to execute. Possible stages are:
- build: Build a docker image used for testing.
- rmi: Remove a docker image used for testing.
- push: Push the built docker image so that further test runs can reuse
the image.
- cache_get: Pull cache from Google Cloud Storage to CACHE_DIR if available.
- cache_put: Push cache from CACHE_DIR to Google Cloud Storage.
- test: Run tests.
- shell: Start an interactive shell in the docker image for debugging.
The source tree will be read-write mounted for convenience.
Environment variables:
- PULL_REQUEST: ID of the pull-request to test; should be empty when testing
a branch.
- GPU: Number of GPUs available for testing.
- CACHE_DIR: Path to the local directory to store cache files.
- CACHE_GCS_DIR: Path to the GCS directory to store a cache archive.
- DOCKER_IMAGE: Base name of the Docker image (without a tag).
"
set -eu
################################################################################
# Main function
################################################################################
main() {
if (( $# < 2 )); then
echo "${USAGE}"
exit 1;
fi
TARGET="$1"; shift
STAGES="$@"
repo_root="$(cd "$(dirname "${BASH_SOURCE}")/../.."; pwd)"
base_branch="$(cat "${repo_root}/.pfnci/BRANCH")"
docker_image="${DOCKER_IMAGE:-asia.gcr.io/pfn-public-ci/cupy-ci}:${TARGET}-${base_branch}"
cache_archive="linux-${TARGET}-${base_branch}.tar.gz"
cache_gcs_dir="${CACHE_GCS_DIR:-gs://tmp-asia-pfn-public-ci/cupy-ci/cache}"
echo "
=====================================================================
Test Configuration
=====================================================================
Target : ${TARGET}
Stages : ${STAGES}
Pull-Request : ${PULL_REQUEST:-no}
GPUs : ${GPU:-(not set)}
Repository Root : ${repo_root}
Base Branch : ${base_branch}
Docker Image : ${docker_image}
Remote Cache : ${cache_gcs_dir}/${cache_archive}
Local Cache : ${CACHE_DIR:-(not set)}
=====================================================================
"
set -x
for stage in ${STAGES}; do case "${stage}" in
build )
tests_dir="${repo_root}/.pfnci/linux/tests"
DOCKER_BUILDKIT=1 docker build \
-t "${docker_image}" \
--cache-from "${docker_image}" \
--build-arg BUILDKIT_INLINE_CACHE=1 \
-f "${tests_dir}/${TARGET}.Dockerfile" \
"${tests_dir}"
;;
rmi )
docker rmi "${docker_image}"
;;
push )
docker push --quiet "${docker_image}"
;;
cache_get )
# Download from GCS and extract to $CACHE_DIR.
if [[ "${CACHE_DIR:-}" = "" ]]; then
echo "ERROR: CACHE_DIR is not set!"
exit 1
fi
mkdir -p "${CACHE_DIR}"
gsutil_with_retry -m -q cp "${cache_gcs_dir}/${cache_archive}" . &&
du -h "${cache_archive}" &&
tar -x -f "${cache_archive}" -C "${CACHE_DIR}" &&
rm -f "${cache_archive}" || echo "WARNING: Remote cache could not be retrieved."
;;
cache_put )
# Compress $CACHE_DIR and upload to GCS.
if [[ "${CACHE_DIR:-}" = "" ]]; then
echo "ERROR: CACHE_DIR is not set!"
exit 1
fi
tar -c -f "${cache_archive}" -C "${CACHE_DIR}" .
du -h "${cache_archive}"
gsutil -m -q cp "${cache_archive}" "${cache_gcs_dir}/"
rm -f "${cache_archive}"
;;
test | shell )
container_name="cupy_ci_$$_$RANDOM"
docker_args=(
docker run
--rm
--name "${container_name}"
--env "BASE_BRANCH=${base_branch}"
)
if [[ -t 1 ]]; then
docker_args+=(--interactive)
fi
if [[ "${CACHE_DIR:-}" != "" ]]; then
docker_args+=(--volume="${CACHE_DIR}:${CACHE_DIR}" --env "CACHE_DIR=${CACHE_DIR}")
fi
if [[ "${PULL_REQUEST:-}" != "" ]]; then
docker_args+=(--env "PULL_REQUEST=${PULL_REQUEST}")
fi
if [[ "${GPU:-}" != "" ]]; then
docker_args+=(--env "GPU=${GPU}")
fi
if [[ "${TARGET}" == *rocm* ]]; then
docker_args+=(--device=/dev/kfd --device=/dev/dri)
else
docker_args+=(--runtime=nvidia)
fi
test_command=(bash "/src/.pfnci/linux/tests/${TARGET}.sh")
if [[ "${stage}" = "test" ]]; then
"${docker_args[@]}" --volume="${repo_root}:/src:ro" --workdir "/src" \
"${docker_image}" timeout 8h "${test_command[@]}" &
docker_pid=$!
trap "kill -KILL ${docker_pid}; docker kill '${container_name}' & wait; exit 1" TERM INT HUP
wait $docker_pid
trap TERM INT HUP
elif [[ "${stage}" = "shell" ]]; then
echo "Hint: ${test_command[@]}"
"${docker_args[@]}" --volume="${repo_root}:/src:rw" --workdir "/src" \
--tty --user "$(id -u):$(id -g)" \
"${docker_image}" bash
fi
;;
* )
echo "Unsupported stage: ${stage}" >&2
exit 1
;;
esac; done
}
gsutil_with_retry() {
gsutil "$@" || gsutil "$@" || gsutil "$@"
}
################################################################################
# Bootstrap
################################################################################
main "$@"
|
<?php
$start_date = $_POST['start'];
$end_date = $_POST['end'];
$date_range = [];
$day = 86400; // one day in seconds
$start_date_timestamp = strtotime($start_date);
$end_date_timestamp = strtotime($end_date);
for ($i = $start_date_timestamp; $i <= $end_date_timestamp; $i += $day) {
$date_range[] = date('Y/m/d', $i);
}
$response = json_encode($date_range);
echo $response;
?> |
#!/bin/bash
script=`basename "$0"`
source=$src_dir/masking/$1/masking_rules.json
target=${maxscale_000_whoami}@${maxscale_000_network}:/home/${maxscale_000_whoami}/masking_rules.json
if [ ${maxscale_000_network} != "127.0.0.1" ] ; then
scp -i $maxscale_000_keyfile -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null $source $target
else
cp $source /home/${maxscale_000_whoami}/masking_rules.json
fi
if [ $? -ne 0 ]
then
echo "error: Could not copy rules file to maxscale host."
exit 1
fi
echo $source copied to $target, restarting maxscale
ssh -i $maxscale_000_keyfile -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null ${maxscale_000_whoami}@${maxscale_000_network} 'sudo service maxscale restart'
test_dir=`pwd`
logdir=log_$1
[ -d $logdir ] && rm -r $logdir
mkdir $logdir || exit 1
# [Read Connection Listener Master] in cnf/maxscale.maxscale.cnf.template.$1
port=4008
dir="$src_dir/masking/$1"
user=skysql
test_name=masking_user
mysqltest --host=${maxscale_000_network} --port=$port \
--user=$maxscale_user --password=$maxscale_password \
--logdir=$logdir \
--test-file=$dir/t/$test_name.test \
--result-file=$dir/r/"$test_name"_"$user".result \
--silent
if [ $? -eq 0 ]
then
echo " OK"
else
echo " FAILED"
res=1
fi
user=maxskysql
test_name=masking_user
mysqltest --host=${maxscale_000_network} --port=$port \
--user=$maxscale_user --password=$maxscale_password \
--logdir=$logdir \
--test-file=$dir/t/$test_name.test \
--result-file=$dir/r/"$test_name"_"$user".result \
--silent
if [ $? -eq 0 ]
then
echo " OK"
else
echo " FAILED"
res=1
fi
echo $res
|
<reponame>mttkay/license_scout
#
# Copyright:: Copyright 2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module LicenseScout
class Dependency
attr_reader :name
attr_reader :version
attr_reader :path
attr_reader :type
attr_reader :license
def initialize(name, version, path, type)
@name = name
@version = version
@path = path
@type = type
if path.nil?
@license = LicenseScout::License.new
elsif path =~ /^http/ || File.directory?(path)
@license = LicenseScout::License.new(path)
else
raise LicenseScout::Exceptions::MissingSourceDirectory.new("Could not find the source for '#{name}' in the following directories:\n\t * #{path}")
end
fallbacks = LicenseScout::Config.fallbacks.send(type.to_sym).select { |f| f["name"] =~ uid_regexp }
fallbacks.each do |fallback|
license.add_license(fallback["license_id"], "license_scout fallback", fallback["license_file"], force: true)
end
end
# @return [String] The UID for this dependency. Example: bundler (1.16.1)
def uid
"#{name} (#{version})"
end
# @return [Regexp] The regular expression that can be used to identify this dependency
def uid_regexp
Regexp.new("#{Regexp.escape(name)}(\s+\\(#{Regexp.escape(version)}\\))?")
end
def exceptions
@exceptions ||= LicenseScout::Config.exceptions.send(type.to_sym).select { |e| e["name"] =~ uid_regexp }
end
# Capture a license that was specified in metadata
#
# @param license_id [String] The license as specified in the metadata file
# @param source [String] Where we found the license info
# @param contents_url [String] Where we can find the contents of the license
#
# @return [void]
def add_license(license_id, source, contents_url = nil)
LicenseScout::Log.debug("[#{type}] Adding #{license_id} license for #{name} from #{source}")
license.add_license(license_id, source, contents_url, {})
end
# Determine if this dependency has an exception. Will match an exception for both the name and the name+version
def has_exception?
exceptions.any?
end
def exception_reason
if has_exception?
exceptions.first.dig("reason")
else
nil
end
end
# Be able to sort dependencies by type, then name, then version
def <=>(other)
"#{type}#{name}#{version}" <=> "#{other.type}#{other.name}#{other.version}"
end
# @return [Boolean] Whether or not this object is equal to another one. Used for Set uniqueness.
def eql?(other)
other.is_a?(self.class) && other.hash == hash
end
# @return [Integer] A hashcode that can be used to idenitfy this object. Used for Set uniqueness.
def hash
[type, name, version].hash
end
end
end
|
## ---------------------------------------------------------
# -- This simple script is an example of how to start Derby
# -- as a server inside the Network Server framework
# --
# -- REQUIREMENTS:
# -- You must have the derby and Network Server jar files in your CLASSPATH
# --
# -- Check the setNetworkServerCP.ksh file for an example of
# -- what to set.
# --
# -- This file for use on Unix ksh systems
# ---------------------------------------------------------
# ---------------------------------------------------------
# -- start Derby Network Server
# ---------------------------------------------------------
# CLOUDSCAPE_INSTALL=
[ -z "$CLASSPATH" ] && {
. "$CLOUDSCAPE_INSTALL"/frameworks/NetworkServer/bin/setNetworkServerCP.ksh
}
java org.apache.derby.drda.NetworkServerControl start
# ---------------------------------------------------------
# -- To use a different JVM with a different syntax, simply edit
# -- this file
# ---------------------------------------------------------
|
<reponame>LongJiangSB/TwoDimensionCode
//
// Contacts.h
// TwoDimensionCode
//
// Created by xp on 2016/12/2.
// Copyright © 2016年 com.yunwangnet. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface Contacts : NSObject
@property (nonatomic,copy) NSString *conName;/**< 联系人姓名 */
@property (nonatomic,copy) NSString *conPhone;/**< 联系人电话 */
@property (nonatomic,copy) NSString *conEmail;/**< 联系人邮箱 */
-(instancetype)initWithName:(NSString *)name phone:(NSString *)phone email:(NSString *)email;
@end
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-n=13 -c=32 -f=4 -s=4 --ds_name=cifar10 --batch_size=64 --nr_gpu=1 --samloss=6 --exp_gamma=0.07 --sum_rand_ratio=0 --is_select_arr
|
<reponame>fifthfiend-kru/yagpdb<filename>common/scheduledevents2/backgroundworker.go
package scheduledevents2
import (
"context"
"github.com/jonas747/yagpdb/common"
"github.com/jonas747/yagpdb/common/scheduledevents2/models"
"github.com/sirupsen/logrus"
"github.com/volatiletech/sqlboiler/queries/qm"
"sync"
"time"
)
var _ common.BackgroundWorkerPlugin = (*ScheduledEvents)(nil)
func (p *ScheduledEvents) RunBackgroundWorker() {
t := time.NewTicker(time.Hour)
for {
n, err := models.ScheduledEvents(qm.Where("processed=true")).DeleteAll(context.Background(), common.PQ)
if err != nil {
logrus.WithError(err).Error("[scheduledevents2] error running cleanup")
} else {
logrus.Println("[scheduledevents2] cleaned up ", n, " entries")
}
<-t.C
}
}
func (p *ScheduledEvents) StopBackgroundWorker(wg *sync.WaitGroup) {
wg.Done()
}
|
<gh_stars>0
import { User } from 'src/users/entities/user.entity';
import { Signal } from 'src/signal-types/entities/signal-type.entity';
import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, OneToOne, PrimaryGeneratedColumn } from 'typeorm';
import { Conservation } from './conservation.entity';
import { Visibility } from './visibility.entity';
@Entity('Reports')
export class Report {
@PrimaryGeneratedColumn()
id: number;
@Column({ name: 'lat', type: 'varchar', length: 250, nullable: false })
lat: string;
@Column({ name: 'lng', type: 'varchar', length: 250, nullable: false })
lng: string;
@Column({ name: 'urlPhoto', type: 'varchar', length: 250, nullable: false })
urlPhoto: string;
@CreateDateColumn({ name: 'creationDate', type: 'timestamp', nullable: false })
creationDate: Date;
@ManyToOne(() => User)
@JoinColumn({ name: 'Users_id' })
user: User;
@ManyToOne(() => Signal)
@JoinColumn({ name: 'SignalTypes_id' })
signalType: Signal;
@OneToOne(() => Visibility)
@JoinColumn({ name: ' Visibilities_id' })
visibility: Visibility;
@OneToOne(() => Conservation)
@JoinColumn({ name: 'Conservations_id' })
conservation: Conservation;
} |
def grade_test(scores):
total = 0
for score in scores:
total += score
if total >= 40:
return "Pass"
else:
return "Fail"
scores = [5, 10, 15]
grade = grade_test(scores)
print("Grade: " + grade) |
<filename>components/card-icons.tsx
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faCustomWebsite } from '../lib/fas-custom-integration';
import {
faFacebook,
faInstagram,
faTwitter,
faLinkedin,
faGithub
} from '@fortawesome/free-brands-svg-icons';
import { IconProp } from '@fortawesome/fontawesome-svg-core';
// interface CardIconProps {
// social: {
// facebook: string;
// instagram: string;
// twitter: string;
// linkedin: string;
// website: string;
// github: string;
// };
// }
// interface Social {
// social: string[];
// }
// interface CardIconProps {
// facebook: string;
// instagram: string;
// twitter: string;
// linkedin: string;
// github: string;
// website: string;
// }
// interface faProps {
// faFacebook: IconProp;
// faInstagram: IconProp;
// faTwitter: IconProp;
// faCustomWebsite: IconProp;
// }
// const faPropsMapped: faProps[] = [{
// faFacebook: faFacebook
// }]
interface Social {
social: any;
}
// to edit icon size, target iconClass
const CardIcons = ({ social }: Social) => {
const anchorClassNull =
' items-center justify-center mx-portfolioDivider sm:mx-portfolioDivider mb-portfolioDivider text-customExcerpt sm:text-customTitletMobile inline-block leading-relaxed text-center align-middle transition-all duration-1000 fill-current bg-primary text-quinary focus:outline-none transform translate-x-portfolioDivider';
const anchorClass =
' items-center justify-center inline-block mx-portfolioDivider sm:mx-portfolioDivider mb-portfolioDivider text-customExcerpt sm:text-customTitleMobile leading-relaxed text-center align-middle transition-all duration-500 bg-primary hover:text-primary focus:outline-none transform translate-x-portfolioDivider';
const iconClassNull =
' flex font-extrabold text-center align-middle transition-all duration-1000 ease-in-out transform lg:text-customTitle sm:text-customExcerptMobile text-customTitleMobile fa-portfolioDivider';
const iconClass =
' flex font-extrabold text-center align-middle transition-all duration-1000 ease-in-out transform lg:text-customTitle sm:text-customExcerptMobile text-customTitleMobile hover:text-tertiary fa-portfolioDivider';
return (
<div className='block text-right align-middle'>
<ul className='block align-middle'>
<li className='align-middle'>
{social.linkedin === null ? (
<a
aria-label='linkedin'
target='__blank'
href={social.linkedin}
className={anchorClassNull}
>
{social.linkedin === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faLinkedin} className={iconClassNull} />
)}
</a>
) : (
<a
aria-label='linkedin'
target='__blank'
href={social.linkedin}
className={anchorClass}
>
{social.facebook === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faLinkedin} className={iconClass} />
)}
</a>
)}
{social.github === null ? (
<a
aria-label='github'
target='__blank'
href={social.github}
className={anchorClassNull}
>
{social.github === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faGithub} className={iconClassNull} />
)}
</a>
) : (
<a
aria-label='github'
target='__blank'
href={social.github}
className={anchorClass}
>
{social.github === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faGithub} className={iconClass} />
)}
</a>
)}
{social.twitter === null ? (
<a
aria-label='twitter'
target='__blank'
href={social.twitter}
className={anchorClassNull}
>
{social.twitter === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faTwitter} className={iconClassNull} />
)}
</a>
) : (
<a
aria-label='twitter'
target='__blank'
href={social.twitter}
className={anchorClass}
>
{social.twitter === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faTwitter} className={iconClass} />
)}
</a>
)}
{social.website === null ? (
<a
aria-label='website'
target='__blank'
href={social.website}
className={anchorClassNull}
>
{social.website === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faCustomWebsite} className={iconClassNull} />
)}
</a>
) : (
<a
aria-label='website'
target='__blank'
href={social.website}
className={anchorClass}
>
{social.website === '' ? (
<></>
) : (
<FontAwesomeIcon icon={faCustomWebsite} className={iconClass} />
)}
</a>
)}
</li>
</ul>
</div>
);
};
export default CardIcons;
|
#!/bin/bash
### TESTS BRANCH ###############################################################
tests() {
while true
do
print_select_title "Test Scripts"
echo -e "\n 0) $(mainmenu_item "${testlist[0]}" "Transfer Files to UMLs (${Yellow}Prereq.${Reset})")\n"
# Make dependent on node select
echo " 1) $(mainmenu_item "${testlist[1]}" "Network test (${Yellow}NET${Reset})")"
echo " 2) $(mainmenu_item "${testlist[2]}" "DNS test (${Yellow}DNS${Reset})")"
echo " 3) $(mainmenu_item "${testlist[3]}" "NTP Test (${Yellow}NTP${Reset})")"
echo " 4) $(mainmenu_item "${testlist[4]}" "NIS Test (${Yellow}NIS${Reset})")"
echo " 5) $(mainmenu_item "${testlist[5]}" "RAID/LVM Test (${Yellow}STO${Reset})")"
echo " 6) $(mainmenu_item "${testlist[6]}" "NFS Test (${Yellow}NFS${Reset})")"
#echo " 9) $(mainmenu_item "${testlist[9]}" "Local Script Development Test (${Red}DEV${Reset})")"
echo -e "\n b) Back to Main Menu\n"
read_opts
for OPT in ${OPTIONS[@]}; do
setval=0
case "$OPT" in
0)
print_title "Remote Test Files Syncronization"
rsyncto testslist
testlist[0]=$?
;;
1)
print_title "NET Test Suite"
print_info "Performing a set of network tests from selected nodes, making sure ${BYellow}NET${BReset} lab has been set up properly."
for target in ${nodes[@]}; do
sshnet ${target} || setval=1
done
testlist[1]=$setval
;;
2)
print_title "DNS Test Suite"
dnstest
[[ $? == 0 ]] && testlist[2]=0 && configlist[3]=0 || testlist[2]=1
sleep 1
;;
3)
print_title "NTP Test Suite"
print_info "Testing NTP. This test might return ${Yellow}false negatives${Reset} if the nodes has not run for long enough to deviate from their NTP source."
for target in ${nodes[@]}; do
ntptest ${target} || setval=1
done
testlist[3]=$setval
;;
4)
print_title "NIS Tests"
print_info "NIS is implicitly tested (well, pretty much at least) in the NFS tests. Run NFS configs for client-1 and server -if not done already- and run the NFS test suite."
pause
;;
5)
print_title "STO Test Suite"
print_info "Testing storage configuration. We check if the RAID array, Virtual Group and Logical Volume can be found in ${Blue}/dev${BReset}."
techo "Connecting, please wait"
ssh -t root@${srv} ${remote_path}/STO_test.sh
[[ $? == 0 ]] && testlist[5]=0 && configlist[5]=0 || testlist[5]=1
sleep 1
;;
6)
print_title "NFS Test Suite"
print_info "Testing NFS configuration. This requires NIS to be implemented. ${Red}It will add (and remove the ones added) ${BReset}insecure users. Make sure they aren't left on the system if something goes wrong. This will test NIS (by connecting to C1 as the added users) and make sure the home directories are where they are supposed to be."
techo "Test NFS"
ssh -t root@${srv} ${remote_path}/NFS_test.sh #&> /dev/null &
#pid=$! ; progress $pid
if [[ $? == 0 ]]; then
testlist[6]=0
testlist[4]=0
else
testlist[6]=1
testlist[4]=1
fi
;;
"b")
DRYRUN=1
return 0
;;
*)
invalid_option "$OPT"
;;
esac
done
eliret
done
}
dnstest() {
local retval=0
print_info "Testing DNS - ${Yellow}External${Reset} tests:"
./DNS_test.sh || ((retval++))
echo -e "\n\n\n"
print_info "Testing DNS - ${Yellow}Internal${Reset} tests:"
techo "Internal tests at ${Yellow}Client-1"${Reset}
ssh -t root@${c1} ${remote_path}/DNS_remotetest.sh &> /dev/null &
pid=$!; progress $pid
return $retval
}
ntptest() {
techo "NTP status check (${Yellow}${1}${Reset})"
ssh -t root@${1} ${remote_path}/NTP_test.sh &> /dev/null &
pid=$!; progress $pid
}
sshnet() {
ntecho "NET status check (${Yellow}$1${Reset})"
ssh -t root@${1} ${remote_path}/NET_test.sh && return 0 || return 1
}
### EOF ###
|
require 'spec_helper'
describe Hydra::Derivatives::Processors::ShellBasedProcessor do
before do
class TestProcessor
include Hydra::Derivatives::Processors::ShellBasedProcessor
end
end
after { Object.send(:remove_const, :TestProcessor) }
let(:processor) { TestProcessor.new }
let(:proc_class) { TestProcessor }
describe "options_for" do
it "returns a hash" do
expect(processor.options_for("a")).to be_a Hash
end
end
describe ".execute" do
context "when an EOF error occurs" do
it "doesn't crash" do
proc_class.execute("echo foo")
end
end
end
end
|
require_relative 'card'
module PathfinderDeckBuilder
class SpellCard < PathfinderDeckBuilder::Card
def create_card(index=nil)
super
end
def set_class_path
@class_path = @spell_path
end
def assembled_card(path)
super
end
def static_content
{
"count": 1,
"color": "green",
"title": "Spell"
}
end
def variable_content(path)
{
"icon": "white-book-#{path["level"]}",
"contents": [
"subtitle | #{path["name"]}",
"rule",
"property | Class | #{path["class"]}",
"property | Level | #{path["level"]}",
"property | Cast Time | #{path["casttime"]}",
"property | Duration | #{path["duration"]}",
"property | Range | #{path["range"]}",
"property | Target | #{path["target"]}",
"property | Area | #{path["area"]}",
"fill",
"section | Description",
"text | #{path["description"]}"[0..318]
]
}
end
end
end |
function searchArray(arr, key) {
// iterate through array
for (let obj of arr) {
// check if key exists and if so, return value
if (key in obj) {
return obj[key];
}
}
}
// example usage
let arr = [
{name: 'Dana', age: 21},
{name: 'Bob', age: 34},
{name: 'John', age: 25}
];
searchArray(arr, 'name'); // returns 'Dana' |
<gh_stars>1-10
import { log } from "handlebars";
import parseTrackName from "./parseTrackName";
import typeOutText from './typeOutText'
import coverFlow from './coverFlow'
let response = [];
let genreExplanation = [];
function appendInsult(insult,container){
container.append("p").text(insult);
}
function scrollBottom(){
return new Promise(function(resolve){
let el = d3.select(".score-slide").node();
console.log(el.scrollHeight);
window.scrollTo(0,el.scrollHeight);
resolve();
});
}
function determineBasicness(data){
let artistsAll = data.artists.filter(function(d){return d.timeFrame != "short_term"}).map(function(d){ return d.artistData}).flat(1);
let tracksAll = data.tracks.map(function(d){ return d.trackData}).flat(1);
let uniqArtists = [...new Set(artistsAll.map(d => d.name))];
let popularTracks = tracksAll.filter(function(d,i){
return +d.popularity > 80;
}).sort(function(a,b){return b.popularity - a.popularity; });
let obscureTracks = tracksAll.filter(function(d,i){
return +d.popularity < 65;
}).sort(function(a,b){return a.popularity - b.popularity; });
let popularArtists = artistsAll.filter(function(d){
return +d.popularity > 80;
}).sort(function(a,b){return b.popularity - a.popularity; });
let obscureArtists = artistsAll.filter(function(d){
return +d.popularity < 65;
}).sort(function(a,b){return a.popularity - b.popularity; });
popularArtists = d3.groups(popularArtists,( d => d.name )).map(function(d,i){return d[1][0]});
obscureArtists = d3.groups(obscureArtists,( d => d.name )).map(function(d,i){return d[1][0]});
let percentBasic = popularArtists.length/uniqArtists.length;
let percentObscure = obscureArtists.length/uniqArtists.length;
let basicLevel = "little";
if(percentBasic > .6){
basicLevel = "most"
}
else if(percentObscure > .5){
basicLevel = "obscure";
}
else if(percentBasic > .3){
basicLevel = "pretty"
}
return {"percentBasic":percentBasic,"percentObscure":percentObscure,"level":basicLevel,popularArtists:popularArtists,obscureArtists:obscureArtists,popularTracks:popularTracks,obscureTracks:obscureTracks}
}
function arrayToSentence(arr) {
if (arr.length === 1) return arr[0];
if(arr.length == 2){
return arr[0] + " and " + arr[1];
}
const firsts = arr.slice(0, arr.length - 1);
const last = arr[arr.length - 1];
return firsts.join(', ') + ', and ' + last;
}
function filterForPopSongs(data){
let tracksForPopAnalysis = data.filter(function(d){
return d.timeFrame == "long_term" || d.timeFrame == "short_term";
}).map(function(d){ return d.trackData }).flat(1);
let popSongs = tracksForPopAnalysis.filter(function(d){
return d["artistGenres"].indexOf("pop") > -1
})
return popSongs;
}
function filterForGenres(data,genresForAnalysis){
let artistsForAnalysis = data.filter(function(d){
return d.timeFrame == "long_term" || d.timeFrame == "short_term";
}).map(function(d){ return d.artistData }).flat(1);
return artistsForAnalysis.filter(function(d){
let genreCheck = false;
for (var genre in genresForAnalysis){
if (d["genres"].indexOf(genresForAnalysis[genre]) > -1){
genreCheck = true;
}
}
return genreCheck == true;
})
}
function filterForRecent(songs){
return songs.filter(function(d){
return +d.album.release_date.slice(0,4) > 2015;
})
}
function filterForOld(songs){
return songs.filter(function(d){
return +d.album.release_date.slice(0,4) < 1991;
})
}
function mode(arr){
return arr.sort((a,b) =>
arr.filter(v => v===a).length
- arr.filter(v => v===b).length
).pop();
}
function determineReleaseYears(data){
let tracks = data.tracks.filter(function(d){
return d//d.timeFrame != "short_term"
})
.map(function(d){ return d.trackData}).flat(1);
let releaseDates = tracks.map(d => +d.album.release_date.slice(0,4));
let avgReleaseDate = Math.floor(d3.mean(releaseDates,d => d));
let avgReleaseDecade = Math.floor(mode(tracks.map(d => +d.album.release_date.slice(2,3))));
let tracksInAvgReleaseDecade = tracks.filter(function(d,i){
return +d.album.release_date.slice(2,3) == avgReleaseDecade;
})
tracksInAvgReleaseDecade = d3.groups(tracksInAvgReleaseDecade,function(d,i){
return d.name
}).map(function(d){
return d[1][0];
})
let obamaEra = [];
let presentTracks = [];
let tracksBelowAvgReleaseDate = tracks.filter(function(d){
return +d.album.release_date.slice(0,4) < avgReleaseDate
})
.sort(function(a,b){
return +a.album.release_date.slice(0,4) - +b.album.release_date.slice(0,4)
});
tracksBelowAvgReleaseDate = d3.groups(tracksBelowAvgReleaseDate,function(d,i){
return d.name
}).map(function(d){
return d[1][0];
})
if(avgReleaseDecade == 1){
let avgYear = Math.floor(mode(tracksInAvgReleaseDecade.map(d => +d.album.release_date.slice(0,4))));
if(avgYear < 2019){
obamaEra = tracksInAvgReleaseDecade.filter(function(d,i){
return +d.album.release_date.slice(0,4) < 2019;
})
}
}
if(avgReleaseDecade == 1 || avgReleaseDecade == 2){
let avgYear = Math.floor(mode(tracksInAvgReleaseDecade.map(d => +d.album.release_date.slice(0,4))));
if(avgYear > 2018){
presentTracks = tracksInAvgReleaseDecade.filter(function(d,i){
return +d.album.release_date.slice(0,4) > 2018;
})
}
}
return {avgReleaseDate:avgReleaseDate,tracksBelowAvgReleaseDate:tracksBelowAvgReleaseDate,avgReleaseDecade:avgReleaseDecade,tracksInAvgReleaseDecade:tracksInAvgReleaseDecade,obamaEra:obamaEra,presentTracks:presentTracks};
}
function trimNames(artist){
return artist.toLowerCase().split("(")[0].replace(/[.,\/#!$%\^&\*;:{}=\-_’'`~()]/g,"").replace(/\s{2,}/g," ").replace(/\s/g,'').replace("remix","");
}
function determineAlbumFragment(data,fragments){
let insult = [];
let albumsWithFragments = fragments.album.map(function(d){
return trimNames(d.name)
});
let albumMap = d3.group(fragments.album, d => trimNames(d.name))
let albumsAll = data.tracks.filter(function(d){
return d.timeFrame != "short_term"
})
.sort(function(a,b){
if(a.timeFrame == "medium_term"){
return -1
}
if(b.timeFrame == "medium_term"){
return -1;
}
return 1;
//return d.timeFrame
})
.map(function(d){
return d.trackData
}).flat(1);
let uniqueAlbums = d3.groups(albumsAll,function(d){
return trimNames(d.album.name)
})
.filter(function(d,i){
return d[1].length > 3;
})
if(uniqueAlbums.length > 0){
uniqueAlbums = uniqueAlbums
.map(function(d,i){
return {name:d[0],item:d[1][0]}
});
uniqueAlbums.forEach(function(item,i){
let album = item.name;
if(albumsWithFragments.indexOf(album) > -1){
insult.push({"name":album,fragments:albumMap.get(album), rank:i,item:item});
}
})
}
if(insult.length > 0){
return insult;
}
return [];
}
function getMostPopularGenre(data){
let artistCount = d3.sum(data.artists, d => d.artistData.length);
//limited to just 10% or #1 genre;
console.log(data.genres);
return data.genres
.filter(function(d){
return !d[0].includes("pop");
})
.filter(function(d,i){
return d[1].length > artistCount*.05 || i == 0;
})
}
function determineGenreFragment(data,fragments){
let insult = [];
let genresWithFragments = fragments.genre.map(function(d){
return trimNames(d.name)
});
let genreMap = d3.group(fragments.genre, d => trimNames(d.name))
let genresAll = getMostPopularGenre(data);
genresAll = genresAll
.map(function(d){
return trimNames(d[0]);
}).flat(1);
genresAll.forEach(function(genre,i){
if(genresWithFragments.indexOf(genre) > -1){
insult.push({"name":genre,fragments:genreMap.get(genre), rank:i});
}
})
if(insult.length > 0){
return insult;
}
return [];
}
function determineTrackFragment(data,fragments){
let insult = [];
let trackWithFragments = fragments.track.map(function(d){
return trimNames(d.name)
});
let trackMap = d3.group(fragments.track, d => trimNames(d.name))
let tracksAll = data.tracks.filter(function(d){return d.timeFrame != "short_term"}).map(function(d){ return d.trackData}).flat(1);
let uniqueTracks = d3.groups(tracksAll,function(d){
return trimNames(d.name)
})
.map(function(d,i){
return {name:d[0],item:d[1][0]}
});
uniqueTracks.forEach(function(item,i){
let track = item.name;
if(trackWithFragments.indexOf(track) > -1){
insult.push({"name":track,fragments:trackMap.get(track), rank:i,item:item});
}
})
//insult = insult.flat(1);
if(insult.length > 0){
return insult;
}
return [];
}
function determineArtistFragment(data,fragments){
let insult = [];
let artistsWithFragments = fragments.artist.map(function(d){
return trimNames(d.name)
});
let artistMap = d3.group(fragments.artist, d => trimNames(d.name))
let artistsAll = data.artists.filter(function(d){
return d.timeFrame != "short_term"
})
.sort(function(a,b){
if(a.timeFrame == "medium_term"){
return -1
}
if(b.timeFrame == "medium_term"){
return -1;
}
return 1;
//return d.timeFrame
})
artistsAll = artistsAll
.map(function(d){ return d.artistData}).flat(1);
let uniqueArtists = d3.groups(artistsAll,function(d){
return trimNames(d.name)
})
.map(function(d,i){
return {name:d[0],item:d[1][0]}
});
uniqueArtists.forEach(function(item,i){
let artist = item.name;
if(artistsWithFragments.indexOf(artist) > -1){
insult.push({"name":artist,fragments:artistMap.get(artist),rank:i,item:item});
}
})
// insult = insult.flat(1);
if(insult.length > 0){
return insult;
}
return [];
}
function getGenericArtistFragment(artist){
let fragments = [`${artist}-as-your-zoom-background`,`${artist}-hive-army-stan-yas-um-slay-mama-i-think`,`constantly-defending-${artist}`,`annoying-your-neighbors-with-${artist}`,`omfg-${artist}`,`${artist}-stan`,`too-much-${artist}`,`cool-it-with-the-${artist}`,`${artist}-on-repeat`]
return fragments[Math.floor(Math.random() * fragments.length)];
}
function getGenericGenreFragment(genre){
let fragments = [`too-much-${genre}`,`${genre}-overload`,`${genre}-fan`,`${genre}-addict`,`${genre}-${genre}-and-more-${genre}`]
return fragments[Math.floor(Math.random() * fragments.length)];
}
function constructFragments(trackFragments,artistFragments,genreFragments,albumFragments,data){
let artistsLong = data.artists.filter(function(d){
return d.timeFrame == "medium_term";
})[0].artistData;
artistsLong = artistsLong.concat(data.artists.filter(function(d){
return d.timeFrame == "long_term";
})[0].artistData)
let specialObsession = [];
let specialFragment = [];
let specialItems = [];
let specialIds = [];
artistFragments.forEach(function(d){
d.fragmentArray = d.fragments.map(function(d){return d.id;});
})
if(artistFragments.length > 0){
let fragmentIndex = Math.floor(Math.random() * artistFragments[0].fragments.length);
let artistFragmentId = artistFragments[0].fragments[fragmentIndex].id
specialFragment.push(artistFragments[0].fragments[fragmentIndex].fragment);
specialObsession.push(artistFragments[0].fragments[fragmentIndex].name)
specialItems.push(artistFragments[0].item)
specialIds.push(artistFragmentId);
artistFragments = artistFragments.filter(function(d){
return d.fragmentArray.indexOf(artistFragmentId) == -1;
})
}
if(genreFragments.length > 0){
for(let artist in artistsLong){
if(artistsLong[artist].hasOwnProperty("genres")){
let artistGenres = artistsLong[artist].genres;
if(artistGenres.indexOf(genreFragments[0].name) > -1){
genreExplanation.push(artistsLong[artist].name);
}
}
}
let fragmentIndex = Math.floor(Math.random() * genreFragments[0].fragments.length);
specialFragment.push(genreFragments[0].fragments[fragmentIndex].fragment);
let obsessionString = `${genreFragments[0].fragments[fragmentIndex].name}`;
if(genreExplanation.length > 0){
obsessionString = `${genreFragments[0].fragments[fragmentIndex].name} (e.g., ${arrayToSentence(genreExplanation.slice(0,2))})`
}
specialObsession.push(obsessionString);
specialIds.push(genreFragments[0].fragments[fragmentIndex].id);
}
if(albumFragments.length > 0){
let fragmentIndex = Math.floor(Math.random() * albumFragments[0].fragments.length);
specialFragment.push(albumFragments[0].fragments[fragmentIndex].fragment);
specialObsession.push(albumFragments[0].fragments[fragmentIndex].name)
specialItems.push(albumFragments[0].item)
specialIds.push(albumFragments[0].fragments[fragmentIndex].id);
}
if(specialFragment.length < 3 && trackFragments.length > 0){
let fragmentIndex = Math.floor(Math.random() * trackFragments[0].fragments.length);
specialFragment.push(trackFragments[0].fragments[fragmentIndex].fragment);
specialObsession.push(trackFragments[0].fragments[fragmentIndex].name)
specialItems.push(trackFragments[0].item)
specialIds.push(trackFragments[0].fragments[fragmentIndex].id);
}
if(specialFragment.length < 3 && artistFragments.length > 1){
let fragmentIndex = Math.floor(Math.random() * artistFragments[1].fragments.length);
let artistFragmentId = artistFragments[1].fragments[fragmentIndex].id
specialFragment.push(artistFragments[1].fragments[fragmentIndex].fragment);
specialObsession.push(artistFragments[1].fragments[fragmentIndex].name)
specialItems.push(artistFragments[1].item)
specialIds.push(artistFragmentId);
artistFragments = artistFragments.filter(function(d){
return d.fragmentArray.indexOf(artistFragmentId) == -1;
})
}
if(specialFragment.length < 3 && artistFragments.length > 2){
let fragmentIndex = Math.floor(Math.random() * artistFragments[2].fragments.length);
specialFragment.push(artistFragments[2].fragments[fragmentIndex].fragment);
specialObsession.push(artistFragments[2].fragments[fragmentIndex].name)
specialItems.push(artistFragments[2].item)
specialIds.push(artistFragments[2].fragments[fragmentIndex].id);
}
if(specialFragment.length < 1){
specialFragment.push(getGenericArtistFragment(artistsLong[0].name));
specialObsession.push(artistsLong[0].name)
specialItems.push(artistsLong[0].item)
}
if(specialFragment.length < 2){
genreExplanation = [];
let genresAll = getMostPopularGenre(data);
if(genresAll.length > 0){
specialFragment.push(getGenericGenreFragment(genresAll[0][0]));
for(let artist in artistsLong){
if(artistsLong[artist].hasOwnProperty("genres")){
let artistGenres = artistsLong[artist].genres;
if(artistGenres.indexOf(genresAll[0][0]) > -1){
genreExplanation.push(artistsLong[artist].name);
}
}
}
let obsessionStringTwo = `${genresAll[0][0]}`;
if(genreExplanation.length > 0){
obsessionStringTwo = `${genresAll[0][0]} (e.g., ${arrayToSentence(genreExplanation.slice(0,2))})`
}
specialObsession.push(obsessionStringTwo)
}
}
return {specialFragment:specialFragment,specialObsession:specialObsession,specialItems:specialItems,specialIds:specialIds};
}
function sleeper(ms) {
return new Promise(function(resolve){
setTimeout(function(){
resolve();
},ms);
})
}
function getClosestImage(imageArray){
let goal = 240;
if(imageArray.length > 0){
imageArray = imageArray.sort(function(a,b){
return Math.abs(a.width - goal) - Math.abs(b.width - goal);
});
if(imageArray[0].hasOwnProperty("url")){
return imageArray[0].url;
}
}
return null;
}
async function init(data,token, fragments, loadingOutput){
let trackFragments = determineTrackFragment(data,fragments);
let artistFragments = determineArtistFragment(data,fragments);
let genreFragments = determineGenreFragment(data,fragments);
let albumFragments = determineAlbumFragment(data,fragments);
let coverFlowImages = loadingOutput[1].map(function(d,i){
return d.image;
});
let specialFragmentObject = constructFragments(trackFragments,artistFragments,genreFragments,albumFragments,data);
let newCoverImages = specialFragmentObject.specialItems.map(function(d){
let image = null;
if(d.item.type == "track"){
if(d.item.album.hasOwnProperty('images')){
image = getClosestImage(d.item.album.images);
}
}
else {
if(d.item.hasOwnProperty('images')){
image = getClosestImage(d.item.images);
}
}
return image;
})
newCoverImages = [...new Set(newCoverImages)];
newCoverImages = newCoverImages.concat(coverFlowImages).flat(1)
d3.select(".score-chat").style("padding-top","14rem")
d3.select(".score-chat").style("padding-bottom","300px")
// d3.select(".ascii-art").style("display","block")
await sleeper(2000);
coverFlow.init(newCoverImages.slice(0,10),".cover-score");
let releaseYears = determineReleaseYears(data);
let isBasic = determineBasicness(data);
await typeOutText.specialScore(specialFragmentObject.specialFragment.join("-"),".score-chat-special",500).then(scrollBottom)
//
console.log(genreExplanation);
await typeOutText.typeOut(`> Thank your obsessions with ${arrayToSentence(specialFragmentObject.specialObsession)} for that.`,".score-chat",1500).then(scrollBottom)
let specialFragmentsToRemove = specialFragmentObject.specialObsession.map(function(d){
return trimNames(d);
})
let moreFragments = artistFragments
.concat(trackFragments,genreFragments,albumFragments);
let moreFragmentsCleaned = [];
let idsUsed = specialFragmentObject.specialIds;
moreFragments = moreFragments
.filter(function(d){
let hasFragment = false;
let fragmentArray = [...new Set(d.fragments.map(function(d){return d.id;}))]
d.fragmentArray = fragmentArray;
for (let frag in fragmentArray){
if(specialFragmentObject.specialIds.indexOf(fragmentArray[frag]) > -1){
hasFragment = true;
}
}
return specialFragmentsToRemove.indexOf(d.name) == -1 || hasFragment == false;
})
.sort(function(a,b){return a.rank - b.rank; })
for (let fragment in moreFragments){
let row = moreFragments[fragment];
for (let frag in row.fragmentArray){
if(idsUsed.indexOf(row.fragmentArray[frag]) == -1){
idsUsed = idsUsed.concat(row.fragmentArray);
moreFragmentsCleaned.push(moreFragments[fragment]);
}
}
}
moreFragmentsCleaned = moreFragmentsCleaned
.slice(0,5)
.map(function(d){
let fragment = d.fragments[Math.floor(Math.random() * d.fragments.length)].fragment;
return fragment;
})
;
let uniqFragments = [...new Set(moreFragmentsCleaned)]
if(uniqFragments.length > 0){
await typeOutText.typeOut(`> Based on your listening habits, I can also tell you your spotify was...`,".score-chat",500).then(scrollBottom)
}
for (let fragment in uniqFragments){
await typeOutText.typeOut(`> ${uniqFragments[fragment]} bad`,".score-chat",500).then(scrollBottom)
}
response = ["Here's what else I learned in that hellscape:","Here's what else is going on in your aural trash fire:","Unfortunately that's not all I learned:","But wait, it gets worse:"];
await typeOutText.typeOut(`> ${response[Math.floor(Math.random() * response.length)]}`,".score-chat",1000).then(scrollBottom);
let reportContainer = d3.select(".score-chat").append("div").attr("class","report");
d3.select(".score-chat").style("padding-bottom",null)
reportContainer.append("p")
.attr("class","top-margin bold")
.text("You listen to these tracks too much:")
reportContainer.append("div")
.append("ul")
.selectAll("li")
.data(data.tracks.filter(d => d.timeFrame == "medium_term")[0].trackData.slice(0,5))
.enter()
.append("li")
.text(function(d){
return parseTrackName.parseTrack(d);
})
reportContainer.append("p")
.attr("class","top-margin bold")
.text("You stan these artists to an uncomfortable extent:")
reportContainer.append("div")
.append("ul")
.selectAll("li")
.data(data.artists.filter(d => d.timeFrame == "long_term")[0].artistData.slice(0,5))
.enter()
.append("li")
.text(function(d){
if(d.hasOwnProperty('name')){
return d.name;
}
return null;
})
let basicWordingMap = {
"most":[],
"pretty":[],
"little":[],
"obscure":[]
}
if(isBasic.popularArtists.length > 0){
console.log("pop artists 0");
basicWordingMap["pretty"].push(`You've got some original music, but most of it is mainstream garbage, like ${isBasic.popularArtists[0].name}.`)
}
if(isBasic.popularArtists.length > 1){
console.log("pop artists 1");
basicWordingMap["most"].push(`You listen to what everybody else listens to, like ${isBasic.popularArtists[0].name} and ${isBasic.popularArtists[1].name}.`)
}
if(isBasic.popularArtists.length > 2){
console.log("pop artists 2");
basicWordingMap["pretty"].push(`You listen to a few unique things, but most of it is what everybody else listens to, like ${isBasic.popularArtists[1].name} and ${isBasic.popularArtists[2].name}.`)
}
if(isBasic.popularArtists.length > 3){
console.log("pop artists 3");
basicWordingMap["most"].push(`${isBasic.popularArtists[2].name} and ${isBasic.popularArtists[3].name}? Almost all your music is what somebody else told you to listen to.`)
basicWordingMap["pretty"].push(`Most of your music comes straight from iHeartRadio. lol ${isBasic.popularArtists[3].name}.`)
}
if(isBasic.popularArtists.length > 5){
console.log("pop artists 5");
basicWordingMap["most"].push(`With ${isBasic.popularArtists[4].name} and ${isBasic.popularArtists[5].name}, your music library has all the originality of a discount bin at Walmart.`)
}
if(isBasic.popularArtists.length > 7){
console.log("pop artists 7");
basicWordingMap["most"].push(`All that ${isBasic.popularArtists[6].name} and ${isBasic.popularArtists[7].name} makes your Spotify taste like a Frappuccino.`)
}
if(isBasic.obscureArtists.length > 0 && isBasic.popularArtists.length > 0){
console.log("obscure artists 0");
basicWordingMap["little"].push(`Yeah, you've got some obscure artists like ${isBasic.obscureArtists[0].name}, but your real top ones are ultra-mainstream like ${isBasic.popularArtists[0].name}.`)
}
if(isBasic.obscureArtists.length > 0){
console.log("obscure artists 0,0");
basicWordingMap["little"].push(`You're trying to be cool with ${isBasic.obscureArtists[0].name}, but your favorites are the same as everybody else's.`)
}
if(isBasic.obscureArtists.length > 1){
console.log("obscure artists 0,1");
basicWordingMap["obscure"].push(`Oh wow ${isBasic.obscureArtists[0].name} and ${isBasic.obscureArtists[1].name}! Your taste is so obscure that's so cool I bet you're super interesting.`)
}
if(isBasic.obscureArtists.length > 2){
console.log("obscure artists 2");
basicWordingMap["little"].push(`You're trying to impress with some obscure tracks like ${isBasic.obscureArtists[2].name}. Nobody's fooled.`)
}
if(isBasic.obscureArtists.length > 4){
basicWordingMap["obscure"].push(`${isBasic.obscureArtists[3].name} and ${isBasic.obscureArtists[4].name}? Where do you even find this?`)
}
let basicOutput = `<span class="bold">You are ${Math.round(isBasic.percentBasic*100)}% basic.</span> ${basicWordingMap[isBasic.level][Math.floor(Math.random() * basicWordingMap[isBasic.level].length)]}.`;
if(basicWordingMap[isBasic.level] == "obscure"){
basicOutput = `<span class="bold">You are ${Math.round(isBasic.percentBasic*100)}% basic.</span> ${basicWordingMap[isBasic.level][Math.floor(Math.random() * basicWordingMap[isBasic.level].length)]}. There's a reason nobody listens to the same stuff as you.`
}
reportContainer.append("p")
.attr("class","top-margin")
.html(basicOutput)
let decadeString = `19${releaseYears.avgReleaseDecade}0s`
if(releaseYears.avgReleaseDecade < 3){
decadeString = `20${releaseYears.avgReleaseDecade}0s`
}
console.log(releaseYears);
let releaseYearTitleMap = {
"past": `You're stuck in the ${decadeString}.`,
"mid": `You're stuck in the early 2010s.`,
"present": "You're too trendy for your own good."
}
let releaseYearDescMap = {
"past": [],
"mid":[],
"present": []
}
let releaseYearTitle = releaseYearTitleMap["past"];
let releaseYearDesc = releaseYearDescMap["past"];
if(releaseYears.tracksInAvgReleaseDecade.length > 0){
console.log("release 0");
releaseYearDesc.push(`You've gotta get over ${parseTrackName.parseTrack(releaseYears.tracksInAvgReleaseDecade[0])}.`);
}
if(releaseYears.tracksInAvgReleaseDecade.length > 1){
console.log("release 1");
releaseYearDesc.push(`Forget about ${releaseYears.tracksInAvgReleaseDecade[1].artists[0].name}'s ${releaseYears.tracksInAvgReleaseDecade[1].album.name} and move on.`);
}
if(releaseYears.tracksInAvgReleaseDecade.length > 2){
console.log("release 2");
releaseYearDesc.push(`You know there's been good music since ${releaseYears.tracksInAvgReleaseDecade[2].artists[0].name}, right?`);
}
if(releaseYears.avgReleaseDecade == 1 || releaseYears.avgReleaseDecade == 2 ){
releaseYearTitle = releaseYearTitleMap["present"]
releaseYearDesc = releaseYearDescMap["present"]
console.log("decade 1,2",releaseYears.presentTracks);
if(releaseYears.presentTracks.length > 1){
console.log("present 1");
releaseYearDescMap["present"].push(`You only listen to music made in the last year like ${parseTrackName.parseTrack(releaseYears.presentTracks[0])} and ${parseTrackName.parseTrack(releaseYears.presentTracks[1])}.`)
}
if(releaseYears.presentTracks.length > 0){
releaseYearDescMap["present"].push(`You know there's good music from before 2019, right?`)
}
if(releaseYears.obamaEra.length > 1){
console.log("obama length",releaseYears.obamaEra);
releaseYearDescMap["mid"].push(`You only listen to Obama-era jams like ${parseTrackName.parseTrack(releaseYears.obamaEra[0])} and ${parseTrackName.parseTrack(releaseYears.obamaEra[1])}.`)
if(releaseYears.obamaEra.length > 2){
console.log("obama 2");
releaseYearDescMap["mid"].push(`For you, music's been all downhill since ${releaseYears.obamaEra[2].artists[0].name} made ${releaseYears.obamaEra[2].name}.`)
}
if(releaseYears.obamaEra.length > 3){
console.log("obama 3");
releaseYearDescMap["mid"].push(`You must have peaked right around ${releaseYears.obamaEra[3].artists[0].name}'s ${releaseYears.obamaEra[3].album.name}.`)
}
releaseYearTitle = releaseYearTitleMap["mid"]
releaseYearDesc = releaseYearDescMap["mid"]
}
}
reportContainer.append("p")
.attr("class","top-margin")
.html(`<span class="bold">${releaseYearTitle}</span> ${releaseYearDesc[Math.floor(Math.random() * releaseYearDesc.length)]}`)
let endingLineOne = [
"Analysis completed in 4.012 exhausting seconds.",
"Analysis finally complete.",
"That's it. I'm done.",
"Well, that was really something."
]
let endingLineTwo = [
"I need to go sit in silence for a second.",
"I need to go recalibrate my taste levels.",
"I guess the important thing is that your music makes you feel good...",
"Thanks for letting me see your music I guess."
]
let endingLineThree = [
"Shutting down."
]
reportContainer.append("p")
.attr("class","top-margin")
.text(endingLineOne[Math.floor(Math.random() * endingLineOne.length)])
reportContainer.append("p")
.text(endingLineTwo[Math.floor(Math.random() * endingLineTwo.length)])
reportContainer.append("p")
.attr("class","bold")
.text(endingLineThree[Math.floor(Math.random() * endingLineThree.length)])
reportContainer.append("p")
.attr("class","top-margin")
.html('Enjoy this project? Consider helping fund The Pudding on Patreon.')
reportContainer.append("a")
.attr("class","patron-button")
.attr("href","https://patreon.com/thepudding")
.attr("target","_blank")
.html('<button type="button" name="button">Become a Patron</button>')
reportContainer.append("p")
.attr("class","top-margin")
.html('You can disconnect this project from your Spotify account <a href="https://www.spotify.com/account/apps/">here</a> under the app name “Bad Music”. This project does not store any Spotify data.')
reportContainer.append("p")
.attr("class","top-margin")
.html('You should subscribe to our newsletter too.')
reportContainer.append("form")
.attr("action","https://poly-graph.us11.list-manage.com/subscribe/post")
.attr("method","POST")
.html('<input type="hidden" name="u" value="c<PASSWORD>2cde4331<PASSWORD>"> <input type="hidden" name="id" value="9af100ac0f"> <label style="position:absolute;opacity:0" for="MERGE0">Email</label> <input label="email" class="newsletter__input" type="email" autocapitalize="off" autocorrect="off" name="MERGE0" id="MERGE0" size="25" value="" placeholder="<EMAIL>"> <div class="hidden-from-view" style="left:-10000px;position:absolute"><input label="text" type="text" name="b_c70d3c0e372cde433143ffeab_9af100ac0f" tabindex="-1" value=""></div> <input class="btn" style="" type="submit" name="submit" value="Subscribe">')
reportContainer.append("p")
.attr("class","top-margin")
.html('Or follow us on <a target="_blank" href="https://www.instagram.com/the.pudding">Instagram</a>, <a target="_blank" href="https://twitter.com/puddingviz">Twitter</a>, <a target="_blank" href="https://www.facebook.com/pudding.viz">Facebook</a>, and <a href="/feed/index.xml" target="_blank">RSS</a>.')
reportContainer.append("p")
.attr("class","top-margin")
.html('A.I. trained by <a href="https://pudding.cool/author/mike-lacher/"><NAME></a> & <a href="https://pudding.cool/author/matt-daniels/"><NAME></a> for <a href="https://pudding.cool">The Pudding</a>. Additional support from <a href="https://www.omrirolan.com/"><NAME></a>, <a href="https://pudding.cool/author/kevin-litman-navarro/"><NAME></a>.')
d3.select("footer").style("display","block")
}
export default { init };
|
#!/bin/bash
if [ -z $RELEASE_RUBYGEMS_API_KEY ]; then
echo No API key specified for publishing to rubygems.org. Stopping release.
exit 1
fi
RELEASE_BRANCH=$GITHUB_REF_NAME
if [ -z $RELEASE_USER ]; then
export RELEASE_USER=$GITHUB_ACTOR
fi
RELEASE_GIT_NAME=$(curl -s https://api.github.com/users/$RELEASE_USER | jq -r .name)
RELEASE_GIT_EMAIL=$RELEASE_USER@users.noreply.github.com
GEMSPEC=$(ls -1 *.gemspec | head -1)
RELEASE_NAME=$(ruby -e "print (Gem::Specification.load '$GEMSPEC').name")
# RELEASE_VERSION must be an exact version number or else it defaults to the next patch release
if [ -z $RELEASE_VERSION ]; then
export RELEASE_VERSION=$(ruby -e "print (Gem::Specification.load '$GEMSPEC').version.then { _1.prerelease? ? _1.release.to_s : (_1.segments.tap {|s| s[-1] += 1 }.join ?.) }")
fi
# configure git to push changes
git config --local user.name "$RELEASE_GIT_NAME"
git config --local user.email "$RELEASE_GIT_EMAIL"
# configure gem command for publishing
mkdir $HOME/.gem
echo -e "---\n:rubygems_api_key: $RELEASE_RUBYGEMS_API_KEY" > $HOME/.gem/credentials
chmod 600 $HOME/.gem/credentials
# release!
(
set -e
ruby tasks/version.rb
git commit -a -m "release $RELEASE_VERSION [no ci]"
git tag -m "version $RELEASE_VERSION" v$RELEASE_VERSION
RUBYOPT='-r ./gem-version-patch.rb' gem build $GEMSPEC
git push origin $(git describe --tags --exact-match)
gem push $RELEASE_NAME-$RELEASE_VERSION.gem
git push origin $RELEASE_BRANCH
#sed -i 3d README.adoc
#sed -i "$(grep -m 1 -n '^== ' CHANGELOG.adoc | cut -f1 -d:)i == Unreleased\n\n_No changes since previous release._\n" CHANGELOG.adoc
#git commit -a -m 'begin development on next version [no ci]'
#git push origin $RELEASE_BRANCH
)
exit_code=$?
# nuke gem credentials
rm -rf $HOME/.gem
git status -s -b
exit $exit_code
|
<filename>src/movies/view/state/reducer/slice/movieSlice.ts<gh_stars>0
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
import { IMovie } from '../../../../domain/models/Movie';
import initialState from '../../initialState';
import type { RootState } from '../../store';
export const movieSlice = createSlice({
name: 'movies',
// `createSlice` will infer the state type from the `initialState` argument
initialState,
reducers: {
setCurrentMovie: (state, action: PayloadAction<IMovie>) => {
state.currentMovie = action.payload;
},
setIncrementStep: (state) => {
state.step += 1;
},
setDecrementStep: (state) => {
state.step -= 1;
},
// Use the PayloadAction type to declare the contents of `action.payload`
setIncrementByAmount: (state, action: PayloadAction<number>) => {
state.step += action.payload;
},
},
});
export const {
setCurrentMovie,
setIncrementStep,
setDecrementStep,
setIncrementByAmount,
} = movieSlice.actions;
// Other code such as selectors can use the imported `RootState` type
export const selectMovie = (state: RootState) => state.movies;
export default movieSlice.reducer;
|
<gh_stars>1-10
import axios from 'axios'
// @ts-ignore
import packageJson from './package.json'
const client = axios.create({
timeout: 60000,
headers: {
'client-type': 'js',
'client-version': packageJson.version,
},
baseURL: 'https://api.betting-api.com/marathonbet',
})
export default client
|
#!/bin/bash
echo test set login
|
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "024451cc07af28e07692291fe8cefe23",
"url": "./index.html"
},
{
"revision": "33e551e02e5e54e83fa2",
"url": "./static/css/2.9e3fc118.chunk.css"
},
{
"revision": "0cc2ae88086ffc5d9365",
"url": "./static/css/main.9bd7b5de.chunk.css"
},
{
"revision": "33e551e02e5e54e83fa2",
"url": "./static/js/2.ce211316.chunk.js"
},
{
"revision": "7c4b6536cab2d1970758ff058c5fbbf8",
"url": "./static/js/2.ce211316.chunk.js.LICENSE.txt"
},
{
"revision": "0cc2ae88086ffc5d9365",
"url": "./static/js/main.81af28fb.chunk.js"
},
{
"revision": "1d6896b13d1316b5acb7",
"url": "./static/js/runtime-main.2b4c0fc3.js"
}
]); |
# Uses template.html as a template, and replaces some text in it to make a new web page.
# for each language.
python3 generateAll.py |
package gov.cms.bfd.model.codegen.codebook;
import com.google.auto.service.AutoService;
import com.google.common.collect.ImmutableSet;
import com.google.common.html.HtmlEscapers;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.JavaFile;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.ParameterizedTypeName;
import com.squareup.javapoet.TypeSpec;
import com.squareup.javapoet.TypeSpec.Builder;
import gov.cms.bfd.model.codebook.model.CcwCodebookInterface;
import gov.cms.bfd.model.codebook.model.Codebook;
import gov.cms.bfd.model.codebook.model.Variable;
import gov.cms.bfd.model.codebook.unmarshall.CodebookVariableReader;
import gov.cms.bfd.model.codegen.RifLayoutProcessingException;
import gov.cms.bfd.model.codegen.annotations.CodebookVariableEnumGeneration;
import gov.cms.bfd.model.codegen.annotations.RifLayoutsGenerator;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.UncheckedIOException;
import java.io.Writer;
import java.net.URLClassLoader;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Processor;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.PackageElement;
import javax.lang.model.element.TypeElement;
import javax.tools.Diagnostic;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
/**
* This <code>javac</code> annotation {@link Processor} reads in the available {@link Codebook}s,
* and then generates XXX.
*/
@AutoService(Processor.class)
public class CodebookVariablesEnumProcessor extends AbstractProcessor {
/**
* The value to stick in the enum constants' JavaDoc for {@link Variable} fields that aren't
* defined.
*/
private static final String MISSING_VARIABLE_FIELD = "(N/A)";
/**
* Both Maven and Eclipse hide compiler messages, so setting this constant to <code>true</code>
* will also log messages out to a new source file.
*/
private static final boolean DEBUG = true;
private final List<String> logMessages = new LinkedList<>();
/** @see javax.annotation.processing.AbstractProcessor#getSupportedAnnotationTypes() */
@Override
public Set<String> getSupportedAnnotationTypes() {
return ImmutableSet.of(CodebookVariableEnumGeneration.class.getName());
}
/** @see javax.annotation.processing.AbstractProcessor#getSupportedSourceVersion() */
@Override
public SourceVersion getSupportedSourceVersion() {
return SourceVersion.latestSupported();
}
/**
* @see javax.annotation.processing.AbstractProcessor#process(java.util.Set,
* javax.annotation.processing.RoundEnvironment)
*/
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
try {
logNote(
"Processing triggered for '%s' on root elements '%s'.",
annotations, roundEnv.getRootElements());
logNote("Processor sysprop java.class.path: " + System.getProperty("java.class.path"));
logNote(
"Processor classloader URLs: "
+ Arrays.toString(((URLClassLoader) getClass().getClassLoader()).getURLs()));
Set<? extends Element> annotatedElements =
roundEnv.getElementsAnnotatedWith(CodebookVariableEnumGeneration.class);
for (Element annotatedElement : annotatedElements) {
if (annotatedElement.getKind() != ElementKind.PACKAGE)
throw new RifLayoutProcessingException(
annotatedElement,
"The %s annotation is only valid on packages (i.e. in package-info.java).",
RifLayoutsGenerator.class.getName());
process((PackageElement) annotatedElement);
}
} catch (RifLayoutProcessingException e) {
log(Diagnostic.Kind.ERROR, e.getMessage(), e.getElement());
} catch (Exception e) {
/*
* Don't allow exceptions of any type to propagate to the compiler. Log a
* warning and return, instead.
*/
StringWriter writer = new StringWriter();
e.printStackTrace(new PrintWriter(writer));
log(Diagnostic.Kind.ERROR, "FATAL ERROR: " + writer.toString());
}
if (roundEnv.processingOver()) writeDebugLogMessages();
return true;
}
/**
* @param annotatedPackage the {@link PackageElement} to process that has been annotated with
* {@link CodebookVariableEnumGeneration}
* @throws IOException An {@link IOException} may be thrown if errors are encountered trying to
* generate source files.
*/
private void process(PackageElement annotatedPackage) throws IOException {
CodebookVariableEnumGeneration annotation =
annotatedPackage.getAnnotation(CodebookVariableEnumGeneration.class);
logNote(annotatedPackage, "Processing package annotated with: '%s'.", annotation);
Map<String, Variable> variablesById = CodebookVariableReader.buildVariablesMappedById();
ClassName variableEnumName =
ClassName.get(annotatedPackage.getQualifiedName().toString(), annotation.enumName());
TypeSpec.Builder variablesEnumType =
TypeSpec.enumBuilder(variableEnumName)
.addModifiers(Modifier.PUBLIC)
.addSuperinterface(CcwCodebookInterface.class);
variablesEnumType.addJavadoc(
"Enumerates the known CCW {@link $T} {@link $T}s, as extracted from the codebook"
+ " PDFs at <a href=\"https://www.ccwdata.org/web/guest/data-dictionaries\">CCW Data"
+ " Dictionaries</a>.\n",
Codebook.class,
Variable.class);
for (Variable variable : variablesById.values()) {
/*
* Adds a standard enum constant, but with a lot of JavaDoc. Pulling this info
* into IDEs should make development a lot easier for folks. (Note: technically,
* we should HTML-escape everything, but I only bothered with the fields that
* were actually causing problems, such as descriptions.)
*/
Builder variableEnumBuilder = TypeSpec.anonymousClassBuilder("");
variableEnumBuilder.addJavadoc(
"<p>The {@code $L} CCW variable has the following properties (taken from its codebook PDF at"
+ " <a href=\"https://www.ccwdata.org/web/guest/data-dictionaries\">CCW"
+ " Data Dictionaries</a>):</p>\n",
variable.getId());
variableEnumBuilder.addJavadoc("<ul>\n");
variableEnumBuilder.addJavadoc(
"<li><strong>Codebook:</strong> $L ($L)</li>\n",
variable.getCodebook().getName(),
variable.getCodebook().getVersion());
variableEnumBuilder.addJavadoc("<li><strong>Label:</strong> $L</li>\n", variable.getLabel());
variableEnumBuilder.addJavadoc("<li><strong>Description:</strong>\n");
for (String paragraph :
variable.getDescription().orElse(Arrays.asList(MISSING_VARIABLE_FIELD)))
variableEnumBuilder.addJavadoc("<p>$L</p>\n", HtmlEscapers.htmlEscaper().escape(paragraph));
variableEnumBuilder.addJavadoc("</li>\n");
variableEnumBuilder.addJavadoc(
"<li><strong>Short Name:</strong> $L</li>\n",
variable.getShortName().orElse(MISSING_VARIABLE_FIELD));
variableEnumBuilder.addJavadoc(
"<li><strong>Long Name:</strong> $L</li>\n", variable.getLongName());
variableEnumBuilder.addJavadoc(
"<li><strong>Type:</strong> $L</li>\n",
variable.getType().isPresent()
? variable.getType().get().toString()
: MISSING_VARIABLE_FIELD);
variableEnumBuilder.addJavadoc(
"<li><strong>Length:</strong> $L</li>\n", variable.getLength());
variableEnumBuilder.addJavadoc(
"<li><strong>Source:</strong> $L</li>\n",
variable.getSource().orElse(MISSING_VARIABLE_FIELD));
variableEnumBuilder.addJavadoc(
"<li><strong>Value Format:</strong> $L</li>\n",
variable.getValueFormat().orElse(MISSING_VARIABLE_FIELD));
if (variable.getValueGroups() != null)
variableEnumBuilder.addJavadoc(
"<li><strong>Coded Values?:</strong> $L</li>\n", variable.getValueGroups().isPresent());
variableEnumBuilder.addJavadoc("<li><strong>Comment:</strong>\n");
for (String paragraph : variable.getComment().orElse(Arrays.asList(MISSING_VARIABLE_FIELD)))
variableEnumBuilder.addJavadoc("<p>$L</p>\n", HtmlEscapers.htmlEscaper().escape(paragraph));
variableEnumBuilder.addJavadoc("</li>\n");
variableEnumBuilder.addJavadoc("</ul>\n");
variablesEnumType.addEnumConstant(variable.getId(), variableEnumBuilder.build());
}
variablesEnumType.addField(
FieldSpec.builder(
ParameterizedTypeName.get(Map.class, String.class, Variable.class),
"VARIABLES_BY_ID",
Modifier.PRIVATE,
Modifier.STATIC,
Modifier.FINAL)
.initializer("$T.buildVariablesMappedById()", CodebookVariableReader.class)
.build());
variablesEnumType.addMethod(
MethodSpec.methodBuilder("getVariable")
.addModifiers(Modifier.PUBLIC)
.addStatement("return VARIABLES_BY_ID.get(this.name())")
.returns(Variable.class)
.addJavadoc(
"@return the {@link $T} data (parsed from a codebook PDF) for this {@link $T} constant\n",
Variable.class,
variableEnumName)
.build());
TypeSpec columnEnumFinal = variablesEnumType.build();
JavaFile columnsEnumFile =
JavaFile.builder(annotatedPackage.getQualifiedName().toString(), columnEnumFinal).build();
columnsEnumFile.writeTo(processingEnv.getFiler());
}
/**
* Reports the specified log message.
*
* @param logEntryKind the {@link Diagnostic.Kind} of log entry to add
* @param associatedElement the Java AST {@link Element} that the log entry should be associated
* with, or <code>null</code>
* @param messageFormat the log message format {@link String}
* @param messageArguments the log message format arguments
*/
private void log(
Diagnostic.Kind logEntryKind,
Element associatedElement,
String messageFormat,
Object... messageArguments) {
String logMessage = String.format(messageFormat, messageArguments);
processingEnv.getMessager().printMessage(logEntryKind, logMessage, associatedElement);
String logMessageFull;
if (associatedElement != null)
logMessageFull =
String.format("[%s] at '%s': %s", logEntryKind, associatedElement, logMessage);
else logMessageFull = String.format("[%s]: %s", logEntryKind, logMessage);
logMessages.add(logMessageFull);
}
/**
* Reports the specified log message.
*
* @param logEntryKind the {@link Diagnostic.Kind} of log entry to add
* @param messageFormat the log message format {@link String}
* @param messageArguments the log message format arguments
*/
private void log(Diagnostic.Kind logEntryKind, String messageFormat, Object... messageArguments) {
log(logEntryKind, null, messageFormat, messageArguments);
}
/**
* Reports the specified log message.
*
* @param associatedElement the Java AST {@link Element} that the log entry should be associated
* with, or <code>null</code>
* @param messageFormat the log message format {@link String}
* @param messageArguments the log message format arguments
*/
private void logNote(
Element associatedElement, String messageFormat, Object... messageArguments) {
log(Diagnostic.Kind.NOTE, associatedElement, messageFormat, messageArguments);
}
/**
* Reports the specified log message.
*
* @param associatedElement the Java AST {@link Element} that the log entry should be associated
* with, or <code>null</code>
* @param messageFormat the log message format {@link String}
* @param messageArguments the log message format arguments
*/
private void logNote(String messageFormat, Object... messageArguments) {
log(Diagnostic.Kind.NOTE, null, messageFormat, messageArguments);
}
/**
* Writes out all of the messages in {@link #logMessages} to a log file in the
* annotation-generated source directory.
*/
private void writeDebugLogMessages() {
if (!DEBUG) return;
try {
FileObject logResource =
processingEnv
.getFiler()
.createResource(
StandardLocation.SOURCE_OUTPUT, "", this.getClass().getSimpleName() + "-log.txt");
Writer logWriter = logResource.openWriter();
for (String logMessage : logMessages) {
logWriter.write(logMessage);
logWriter.write('\n');
}
logWriter.flush();
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
|
<filename>ui/frontend/src/components/namespaces/namespace.js
import React, {useCallback, useContext, useEffect, useMemo, useState} from 'react'
import '../../css/namespace.css'
import ReactPaginate from 'react-paginate'
import {useLocation, useParams} from 'react-router-dom'
import logoColor from "img/logo-color.png";
import {Alert, Container} from 'react-bootstrap'
import {XCircle} from 'react-bootstrap-icons'
import Row from 'react-bootstrap/Row'
import Col from 'react-bootstrap/Col'
import Fuse from 'fuse.js'
import NamespaceActions from 'components/namespaces/actions'
import WorkflowList from 'components/namespaces/workflows'
import {QueryParams} from 'util/params'
import ServerContext from 'components/app/context'
export default function Namespace(props) {
const [workflows, setWorkflows] = useState([])
const [permErr, setPermError] = useState("")
const [workflowErr, setWorkflowError] = useState("")
const [loader, setLoader] = useState(false)
const [pagination, setPagination] = useState({total: 0, offset: 0})
const [searchInfo, setSearchInfo] = useState({results: [], pattern: ""})
const context = useContext(ServerContext);
const fuse = useMemo(() => new Fuse(workflows, {
threshold: 0.4, distance: 50, keys: [
"id",
"description"
]
}), [workflows]);
let {namespace} = useParams()
let params = QueryParams(useLocation().search)
if (!params.p) {
params.p = 1
}
if (!params.q) {
params.q = ""
}
const clearError = () => {
setPermError("")
setWorkflowError("")
}
function updateWorkflows(newWorkflows) {
setWorkflows(newWorkflows)
}
const fetchWorkflows = useCallback(() => {
async function fetchWFs() {
try {
setLoader(true)
let resp = await context.Fetch(`/namespaces/${namespace}/workflows?offset=${pagination.offset}`, {})
if (!resp.ok) {
setLoader(false)
throw resp
} else {
let json = await resp.json()
updateWorkflows(json.workflows)
setPagination({...pagination, total: json.total})
}
} catch (e) {
setWorkflowError(e.message)
}
setLoader(false)
}
fetchWFs()
}, [context.Fetch, namespace, pagination.offset])
// Fetch data on mount
useEffect(() => {
fetchWorkflows()
}, [fetchWorkflows, context.Fetch, namespace])
useEffect(() => {
if (params.q === searchInfo.pattern) {
return
}
let results = []
fuse.search(params.q).forEach(res => results.push(res.item))
setSearchInfo({results: results, pattern: params.q})
}, [params, searchInfo.pattern, fuse])
function searchResults() {
if (searchInfo.pattern === "") {
return workflows
} else {
return searchInfo.results
}
}
return (<>
<Row style={{margin: "0px"}}>
<Col style={{marginBottom: "15px"}} id="namespace-header">
<div className="namespace-actions">
<div id="namespace-actions-title" className="namespace-actions-box">
<h4>
{namespace}
</h4>
</div>
<div id="namespace-actions-options" className="namespace-actions-box">
<NamespaceActions namespace={namespace} q={params.q} p={params.p}
onNew={() => {
fetchWorkflows() // fetchWorkflows again
}}
/>
</div>
</div>
<div className="padded-border"></div>
</Col>
<Col style={{marginBottom: "15px"}} xs={12}>
{workflowErr !== "" || permErr !== "" ?
<Row style={{margin: "0px"}}>
<Col style={{marginBottom: "15px"}} xs={12} md={12}>
<Alert variant="danger">
<Container>
<Row>
<Col sm={11}>
Fetching Workflows: {workflowErr !== "" ? workflowErr : permErr}
</Col>
<Col sm={1} style={{textAlign: "right", paddingRight: "0"}}>
<XCircle style={{cursor: "pointer", fontSize: "large"}} onClick={() => {
clearError()
}}/>
</Col>
</Row>
</Container>
</Alert>
</Col>
</Row>
:
<>
{loader ?
<div id="instances">
<div style={{
minHeight: "500px",
display: "flex",
alignItems: "center",
justifyContent: "center"
}}>
<img
alt="loading symbol"
src={logoColor}
height={200}
className="animate__animated animate__bounce animate__infinite"/>
</div>
</div>
:
<>
<div>
<h5>
Workflows
</h5>
<h4 style={{color: "#999999", fontSize: "1rem"}}>
{(searchInfo.pattern !== "")
? (<>{searchInfo.results.length} Results{' '}</>)
:
(<></>)}
</h4>
</div>
{searchResults().length > 0 ?
<WorkflowList namespace={namespace} workflows={searchResults()}
fetchWorkflows={fetchWorkflows}/>
:
<div className="workflows-list-item-no">
<div style={{alignSelf: "center"}}>
<div style={{padding: "5px"}}>
<span>No workflows are saved.</span>
</div>
</div>
</div>
}
</>
}
</>}
</Col>
{searchResults().length > 0 ?
<Col style={{marginTop: "30px"}}>
<ReactPaginate
breakClassName="page-item"
breakLinkClassName="page-link"
previousClassName="page-item"
previousLinkClassName="page-link"
nextClassName="page-item"
nextLinkClassName="page-link"
previousLabel="<"
containerClassName="pagination"
pageClassName="page-item"
pageLinkClassName="page-link"
nextLabel=">"
onPageChange={(num) => {
setPagination({...pagination, offset: num.selected * 10})
}}
pageCount={Math.ceil(pagination.total / 10)}
/>
</Col> : ""}
</Row>
</>
);
} |
<reponame>haroutboujakjian/Vuesalize
import Vue from "vue";
import StackedBarChart from "./StackedBarChart";
import BaseLegend from "./BaseLegend";
import LineChart from "./LineChart";
import GroupedBarChart from "./GroupedBarChart";
import LoaderSpinning from "./LoaderSpinning";
import Network from "./Network";
import HierarchicalEdgeBundling from "./HierarchicalEdgeBundling";
import AreaChart from "./AreaChart";
import ScatterPlot from "./ScatterPlot"
const Components = {
StackedBarChart, BaseLegend, LineChart, GroupedBarChart, LoaderSpinning, Network,
HierarchicalEdgeBundling, AreaChart, ScatterPlot
}
Object.keys(Components).forEach(name => {
Vue.component(name, Components[name])
})
export default Components; |
<filename>src/test/java/com/github/peacetrue/signature/SignerTest.java
package com.github.peacetrue.signature;
import com.github.peacetrue.CryptologyUtils;
import com.github.peacetrue.beans.signedbean.SignedBean;
import com.github.peacetrue.digest.HmacDigester;
import com.github.peacetrue.security.KeyPairGeneratorUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import java.security.KeyPair;
import static com.github.peacetrue.security.KeyPairGeneratorUtils.*;
/**
* @author peace
**/
@Slf4j
class SignerTest {
@Test
void sign() {
KeyPair keyPair = KeyPairGeneratorUtils.generateRsaKeyPair(KEY_LENGTH_1024);
StandardSigner standardSigner = new StandardSigner(keyPair.getPublic(), keyPair.getPrivate());
StringSigner stringSigner = new StringSigner(standardSigner);
String toBeSigned = RandomStringUtils.randomAlphanumeric(10);
String signature = stringSigner.sign(toBeSigned);
Assertions.assertTrue(stringSigner.verify(toBeSigned, signature));
BeanSigner beanSigner = new SimpleBeanSigner(new StringSigner(standardSigner));
SignedBean signedBean = beanSigner.generate();
Assertions.assertTrue(beanSigner.verify(signedBean));
beanSigner = new SimpleBeanSigner(new StringSigner(new DigestSigner(HmacDigester.buildHmacSHA256(CryptologyUtils.randomBytes()))));
signedBean = beanSigner.generate();
Assertions.assertTrue(beanSigner.verify(signedBean));
}
}
|
#! /bin/bash
pushd "$(dirname "$0")" >/dev/null
cat ./vscode-extensions | xargs -L 1 code-insiders --install-extension
popd
|
require 'chronic'
require 'csv'
require 'eventmachine'
require 'fileutils'
require 'neo4j-core'
require 'rugged'
require 'date'
module Ginatra
class Repository
class MissingName < RuntimeError; end
class MissingPath < RuntimeError; end
class InvalidPath < RuntimeError; end
class MissingId < RuntimeError; end
class InvalidRepoId < RuntimeError; end
attr_accessor :id, :path, :name, :color, :origin_url, :rugged_repo, :head_branch
def self.new(params)
@id ||= params["id"]
self.validate(params)
super
end
def initialize(params)
colors = Ginatra::Config.colors
repos = Ginatra::Config.repositories
@id = params['id'].strip
@path = File.expand_path params['path'].strip
@color = nil
@name = params['name'].strip
@rugged_repo = Rugged::Repository.new(File.expand_path(@path))
@head_branch = @rugged_repo.head.name.sub(/^refs\/heads\//, '')
# Get default color if it is not defined
if params['color'].nil?
@color = colors[repos.find_index { |k,_| k == @id } % colors.size]
else
@color = params['color']
end
# Find remote url
@origin_url = nil
@rugged_repo.remotes.each do |remote|
@origin_url = remote.url if remote.name == 'origin'
end
end
def authors params = {}
commits(params).group_by { |commit|
commit.first[1]['author']
}.map { |name, commits|
{ 'name' => name,
'commits' => commits.size,
'additions' => Ginatra::Helper.get_additions(commits),
'deletions' => Ginatra::Helper.get_deletions(commits) }
}
end
def commits(params = {})
params[:in] = [@id]
Ginatra::Helper.query_commits(params).first[:commits]
end
def lines params = {}
commits(params).inject(0) { |line_count, commit|
changes = commit.flatten[1]["changes"]
line_count += changes.inject(0) { |c_line_count, change|
c_line_count -= change['deletions'].to_i
c_line_count += change['additions'].to_i
} unless changes.empty?
line_count
}
end
# def fetch
# # cred = Rugged::Credentials::SshKey.new({ username: 'git', publickey: 'id_rsa.pub', privatekey: 'id_rsa', passphrase: '' })
# @rugged_repo.remotes.each do |remote|
# remote.fetch({ credentials: @credentials })
# end
# end
def pull
# Pull rebase on all remote branches
@rugged_repo.branches.each do |branch|
if (branch.target.class == Rugged::Commit && branch.head? == false)
branch_name = branch.name.split('/')[1..-1].join('/')
`cd #{@path} && git checkout #{branch_name}`
`cd #{@path} && git pull --rebase`
end
end
# Checkout to default head branch again
`cd #{path} && git checkout #{@head_branch}`
end
# def start_stream(channel, update_interval)
# EM.add_periodic_timer(update_interval) {
# if change_exists?
# refresh_data
# sid = channel.subscribe { |msg| p ["repo #{@id} subscribed"] }
# channel.push @id
# channel.unsubscribe(sid)
# end
# # hit Control + C to stop
# Signal.trap("INT") { EventMachine.stop }
# Signal.trap("TERM") { EventMachine.stop }
# }
# end
def import_branch_graph
session = Ginatra::Db.session
# Create constraints
# TODO: This is only required once during the database setup process.
session.query('CREATE CONSTRAINT ON (r:Repository) ASSERT r.origin_url IS UNIQUE')
# Create or update existing repo
session.query("MERGE (r:Repository {origin_url: '#{@origin_url}', id: '#{@id}', name: '#{@name}'})")
@rugged_repo.branches.each do |branch|
# Only add remote branches. Since HEAD is pointed to the current
# local working branch
if (branch.target.class == Rugged::Commit && branch.head? == false)
# Delete previous branch
session.query("
MATCH (r:Repository {origin_url: '#{@origin_url}'})-[:HAS_BRANCH]->(b:Branch {name: '#{branch.name}'}) DETACH
DELETE b
")
session.query("
MATCH (r:Repository {origin_url: '#{origin_url}'})
MATCH (c:Commit {hash: '#{branch.target.oid}'})
CREATE (b:Branch {name: '#{branch.name}'})
CREATE (r)-[:HAS_BRANCH]->(b)
MERGE (b)-[:POINTS_TO]->(c)
")
end
end
end
def create_commits_csv
tips = []
@rugged_repo.branches.each do |branch|
tips << branch.target.oid if (branch.target.class == Rugged::Commit)
end
# Create a walker and let the starting points as the latest commit of each
# branch.
walker = Rugged::Walker.new(@rugged_repo)
tips.uniq.each do |target|
walker.push(target)
end
# Remove the old csv file before writing to the new one
remove_commit_csv_file
CSV.open(commit_csv_file, 'w') do |csv|
# Write CSV headers
csv << %w{ hash message author_email author_name author_time commit_time commit_timestamp parents }
# Walk through the commit tree based on the defined start commit points.
# The walk happens simultatiniously through all branches. Commit that
# has been processed will be ignored automatically by Rugged::Walker.
walker.each do |commit|
author = commit.author
committor = commit.committer
csv << [
commit.oid,
commit.message.strip().gsub(/\n/, '').gsub(/"/, "'").gsub(/\\/, '\\\\\\'),
author[:email],
author[:name],
author[:time],
committor[:time],
commit.epoch_time,
commit.parent_ids.join(' ')
]
end
end
end
def create_diff_csv
# Using git command to get the line and file changes of each commit
# since Rugged::Walker takes much longer to walk through the diff stat.
# Record the line changes output to a string and then reformat it into
# csv.
delimiter = '[<ginatra_commit_start>]'
stat_str = `cd #{path} && git log --numstat --format="#{delimiter}%H"`
# Split string at delimiter, so each string represents a commit with its
# commit hash and changes below it.
stat_arr = stat_str.split(delimiter).map { |str|
# Each raw_stat string has this format:
# b3170bb1b7d73062d0807b8acd6474aadfaa83d9
#
# 1 3 Gemfile
# 49 14 Gemfile.lock
# 0 1 config.ru
# 0 3 lib/ginatra.rb
# 33 56 lib/ginatra/core/repository.rb
# 5 1 lib/ginatra/web/api.rb
# 0 1 lib/ginatra/web/front.rb
# 0 1 lib/ginatra/web/websocket_server.rb
raw_stat = str.split(/\n/)
commit_hash = raw_stat[0]
if raw_stat.size <= 2
changes = []
else
changes = raw_stat[3..-1].map{ |change_str|
raw_change = change_str.split(/\t/)
{
file_path: raw_change[2],
file_path_on_disk: [@path, raw_change[2]].join('/'),
additions: raw_change[0].to_i,
deletions: raw_change[1].to_i
}
}
end
{ hash: commit_hash, changes: changes }
}
# Remove old diff csv file
remove_diff_csv_file
CSV.open(diff_csv_file, 'w') do |csv|
# Write csv headers
csv << %w{ hash additions deletions file_path file_path_on_disk }
# Write rows
stat_arr.each do |stat|
stat[:changes].each do |change|
csv << [
stat[:hash],
change[:additions],
change[:deletions],
change[:file_path],
change[:file_path_on_disk],
]
end
end
end
end
def set_repo_graph_start_time
session = Ginatra::Db.session
session.query("
MATCH (r:Repository {origin_url: '#{@origin_url}'})-[:HAS_COMMIT]->(c:Commit)
WITH r, c ORDER BY c.commit_timestamp LIMIT 1
SET r.start_timestamp = c.commit_timestamp
")
session.close
end
def import_commits_graph
create_commits_csv
session = Ginatra::Db.session
# Establish contraints in indexes
# TODO: This is only required once during the database setup process.
session.query('CREATE CONSTRAINT ON (c:Commit) ASSERT c.hash IS UNIQUE')
session.query('CREATE INDEX ON :Commit(commit_timestamp)')
session.query('CREATE INDEX ON :Commit(message)')
session.query('CREATE CONSTRAINT ON (u:User) ASSERT u.email IS UNIQUE')
# Import CSV
session.query("
USING PERIODIC COMMIT 1000
LOAD CSV WITH headers FROM 'file://#{commit_csv_file}' as line
MATCH (r:Repository {id: '#{@id}'})
MERGE (c:Commit {hash: line.hash}) ON CREATE SET
c.message = line.message,
c.author_time = line.author_time,
c.commit_time = line.commit_time,
c.commit_timestamp = toInt(line.commit_timestamp),
c.parents = split(line.parents, ' ')
MERGE (r)-[:HAS_COMMIT]->(c)
MERGE (u:User:Author {email:line.author_email}) ON CREATE SET u.name = line.author_name
MERGE (u)-[:AUTHORED]->(c)
MERGE (c)-[:AUTHORED_BY]->(u)
MERGE (u)-[:CONTRIBUTED_TO]->(r)
WITH c,line
WHERE line.parents <> ''
FOREACH (parent_hash in split(line.parents, ' ') |
MERGE (parent:Commit {hash: parent_hash})
MERGE (c)-[:HAS_PARENT]->(parent))
")
session.close
# Set repo's start timestamp property based on first commit's timestamp
set_repo_graph_start_time
end
def import_diff_graph
create_diff_csv
session = Ginatra::Db.session
# Establish contraints in indexes
# TODO: This is only required once during the database setup process.
session.query('CREATE CONSTRAINT ON (f:File) ASSERT f.path_on_disk IS UNIQUE')
# Import CSV
session.query("
USING PERIODIC COMMIT 1000
LOAD CSV WITH headers FROM 'file://#{diff_csv_file}' as line
MATCH (c:Commit {hash: line.hash})
MERGE (f:File {path_on_disk: line.file_path_on_disk}) ON CREATE SET
f.path = line.file_path,
f.ignored = 0
MERGE (c)-[:CHANGES {additions: toInt(line.additions), deletions: toInt(line.deletions)}]->(f)
")
session.close
end
def create_current_files_csv
files_tree = Dir["#{@path}/**/*"].reject{ |path| path == "#{@path}/.git" }.map{ |path|
file_parts = path.split('/')
{
relative_path: file_parts[@path.split('/').size..-1].join('/'),
disk_path: path
}
}
# Remove old files csv file
remove_current_files_csv_file
CSV.open(current_files_csv_file, 'w') do |csv|
# Write csv headers
csv << %w{ file_path file_path_on_disk ignored }
# Write rows
files_tree.each do |file|
csv << [
file[:relative_path],
file[:disk_path],
@rugged_repo.path_ignored?("#{file[:relative_path]}") ? 1 : 0
]
end
end
end
def import_current_files_graph
create_current_files_csv
session = Ginatra::Db.session
# Establish contraints in indexes
# TODO: This is only required once during the database setup process.
session.query('CREATE CONSTRAINT ON (tr: CurrentFileTree) ASSERT tr.origin_url IS UNIQUE')
# Create the repo's file tree node if it has not existed
session.query("
MATCH (r:Repository {origin_url: '#{@origin_url}'})
WITH r
MERGE (r)-[:HAS_FILE_TREE]->(:CurrentFileTree {origin_url: '#{@origin_url}'})
")
# Remove all currently files relationship from the file tree in order
# to construct the new one based on the current directory
session.query("
MATCH (:CurrentFileTree {origin_url: '#{@origin_url}'})-[r:HAS_FILE]->(:File)
DELETE r
")
# Import CSV
session.query("
USING PERIODIC COMMIT 1000
LOAD CSV WITH headers FROM 'file://#{current_files_csv_file}' as line
MATCH
(f:File {path_on_disk: line.file_path_on_disk}),
(tr:CurrentFileTree {origin_url: '#{@origin_url}'})
MERGE (tr)-[:HAS_FILE]->(f)
SET f.ignored = toInt(line.ignored)
")
session.close
end
def import_git_graph
logger = Ginatra::Log.new().logger
logger.info("Started indexing repo #{@id}")
start_time = Time.now
session = Ginatra::Db.session
# Create constraints
session.query('CREATE CONSTRAINT ON (r:Repository) ASSERT r.origin_url IS UNIQUE')
# Create or update existing repo
session.query("MERGE (r:Repository {origin_url: '#{@origin_url}', id: '#{@id}', name: '#{@name}'})")
session.close
logger.info("Importing commits graph of #{@id}")
import_commits_graph
logger.info("Importing branch graph of #{@id}")
import_branch_graph
logger.info("Importing commit diff graph of #{@id}")
import_diff_graph
logger.info("Importing current files graph of #{@id}")
import_current_files_graph
logger.info("Setting start timestamp of #{@id}")
set_repo_graph_start_time
logger.info("Finished indexing repository #{id}. Duration: #{Time.now - start_time} seconds")
end
private
def self.validate(params)
colors = Ginatra::Config.colors
repos = Ginatra::Config.repositories
if params['color'].nil? and !params['id'].nil?
begin
params['color'] = colors[repos.find_index { |k,_| k == params["id"] } % colors.size]
rescue NoMethodError
raise InvalidRepoId, "#{self.current_path} repository's id is invalid"
end
end
raise MissingName, "repository's name is missing for #{@id}. Check config.yml file, make sure your data is correct." unless params['name']
raise MissingPath, "repository's path is missing for #{@id}. Check config.yml file, make sure your data is correct." unless params['path']
raise MissingId, "repository's id is missing. Check config.yml file, make sure your repository data is correct." unless params['id']
raise MissingColor, "repository's color missing for #{@id}. Check config.yml file, make sure your data is correct." unless params['color']
raise InvalidPath, "repository's path is invalid for #{@id}. Check config.yml file, make sure your data is correct." unless self.is_repo_path?(params['path'])
end
def self.is_repo_path?(path)
path = File.expand_path(path)
if path.nil? || !File.directory?(path)
false
else
`git -C "#{path}" status`.match(/On branch/)
end
end
def self.current_path
'ginatra/repository.rb'
end
def commit_csv_file
dirname = Ginatra::Env.data ? Ginatra::Env.data : './data'
FileUtils.mkdir_p dirname unless File.directory?(dirname)
File.expand_path @id + '.csv', dirname
end
def remove_commit_csv_file
FileUtils.rm(commit_csv_file) if File.exists?(commit_csv_file)
end
def diff_csv_file
dirname = Ginatra::Env.data ? Ginatra::Env.data : './data'
FileUtils.mkdir_p dirname unless File.directory?(dirname)
File.expand_path @id + '_diff.csv', dirname
end
def remove_diff_csv_file
FileUtils.rm(diff_csv_file) if File.exists?(diff_csv_file)
end
def current_files_csv_file
dirname = Ginatra::Env.data ? Ginatra::Env.data : './data'
FileUtils.mkdir_p dirname unless File.directory?(dirname)
File.expand_path @id + '_current_files.csv', dirname
end
def remove_current_files_csv_file
FileUtils.rm(current_files_csv_file) if File.exists?(current_files_csv_file)
end
def pull_latest_commits
`git -C #{path} pull --rebase &>/dev/null`
end
def commits_between from = nil, til = nil
from ||= Time.new(0)
til ||= Time.now
date_range = [from, til].map { |time_stamp|
if time_stamp.class.to_s != "Time"
Chronic.parse time_stamp.to_s
else
time_stamp
end
}
result = []
unless commits.nil?
commits.each do |commit|
commit_date = Time.parse commit.flatten[1]['date']
break if commit_date < date_range[0]
result << commit if commit_date >= date_range[0] &&
commit_date <= date_range[1]
end
end
return result
end
def commits_by comm = @commits, author = nil
if author.nil?
comm
else
comm.select { |commit|
commit.flatten[1]['author'] == author
}
end
end
end
end
|
package facade.amazonaws.services
import scalajs._
import scalajs.js.annotation.JSImport
import scala.scalajs.js.|
import scala.concurrent.Future
import facade.amazonaws._
package object managedblockchain {
type ArnString = String
type AvailabilityZoneString = String
type ClientRequestTokenString = String
type DescriptionString = String
type Enabled = Boolean
type FrameworkVersionString = String
type InputTagMap = js.Dictionary[TagValue]
type InstanceTypeString = String
type InvitationList = js.Array[Invitation]
type InviteActionList = js.Array[InviteAction]
type IsOwned = Boolean
type MemberListMaxResults = Int
type MemberSummaryList = js.Array[MemberSummary]
type NameString = String
type NetworkListMaxResults = Int
type NetworkMemberNameString = String
type NetworkSummaryList = js.Array[NetworkSummary]
type NodeListMaxResults = Int
type NodeSummaryList = js.Array[NodeSummary]
type OutputTagMap = js.Dictionary[TagValue]
type PaginationToken = String
type PasswordString = String
type PrincipalString = String
type ProposalDurationInt = Int
type ProposalListMaxResults = Int
type ProposalSummaryList = js.Array[ProposalSummary]
type ProposalVoteList = js.Array[VoteSummary]
type RemoveActionList = js.Array[RemoveAction]
type ResourceIdString = String
type TagKey = String
type TagKeyList = js.Array[TagKey]
type TagValue = String
type ThresholdPercentageInt = Int
type Timestamp = js.Date
type UsernameString = String
type VoteCount = Int
implicit final class ManagedBlockchainOps(private val service: ManagedBlockchain) extends AnyVal {
@inline def createMemberFuture(params: CreateMemberInput): Future[CreateMemberOutput] = service.createMember(params).promise().toFuture
@inline def createNetworkFuture(params: CreateNetworkInput): Future[CreateNetworkOutput] = service.createNetwork(params).promise().toFuture
@inline def createNodeFuture(params: CreateNodeInput): Future[CreateNodeOutput] = service.createNode(params).promise().toFuture
@inline def createProposalFuture(params: CreateProposalInput): Future[CreateProposalOutput] = service.createProposal(params).promise().toFuture
@inline def deleteMemberFuture(params: DeleteMemberInput): Future[DeleteMemberOutput] = service.deleteMember(params).promise().toFuture
@inline def deleteNodeFuture(params: DeleteNodeInput): Future[DeleteNodeOutput] = service.deleteNode(params).promise().toFuture
@inline def getMemberFuture(params: GetMemberInput): Future[GetMemberOutput] = service.getMember(params).promise().toFuture
@inline def getNetworkFuture(params: GetNetworkInput): Future[GetNetworkOutput] = service.getNetwork(params).promise().toFuture
@inline def getNodeFuture(params: GetNodeInput): Future[GetNodeOutput] = service.getNode(params).promise().toFuture
@inline def getProposalFuture(params: GetProposalInput): Future[GetProposalOutput] = service.getProposal(params).promise().toFuture
@inline def listInvitationsFuture(params: ListInvitationsInput): Future[ListInvitationsOutput] = service.listInvitations(params).promise().toFuture
@inline def listMembersFuture(params: ListMembersInput): Future[ListMembersOutput] = service.listMembers(params).promise().toFuture
@inline def listNetworksFuture(params: ListNetworksInput): Future[ListNetworksOutput] = service.listNetworks(params).promise().toFuture
@inline def listNodesFuture(params: ListNodesInput): Future[ListNodesOutput] = service.listNodes(params).promise().toFuture
@inline def listProposalVotesFuture(params: ListProposalVotesInput): Future[ListProposalVotesOutput] = service.listProposalVotes(params).promise().toFuture
@inline def listProposalsFuture(params: ListProposalsInput): Future[ListProposalsOutput] = service.listProposals(params).promise().toFuture
@inline def listTagsForResourceFuture(params: ListTagsForResourceRequest): Future[ListTagsForResourceResponse] = service.listTagsForResource(params).promise().toFuture
@inline def rejectInvitationFuture(params: RejectInvitationInput): Future[RejectInvitationOutput] = service.rejectInvitation(params).promise().toFuture
@inline def tagResourceFuture(params: TagResourceRequest): Future[TagResourceResponse] = service.tagResource(params).promise().toFuture
@inline def untagResourceFuture(params: UntagResourceRequest): Future[UntagResourceResponse] = service.untagResource(params).promise().toFuture
@inline def updateMemberFuture(params: UpdateMemberInput): Future[UpdateMemberOutput] = service.updateMember(params).promise().toFuture
@inline def updateNodeFuture(params: UpdateNodeInput): Future[UpdateNodeOutput] = service.updateNode(params).promise().toFuture
@inline def voteOnProposalFuture(params: VoteOnProposalInput): Future[VoteOnProposalOutput] = service.voteOnProposal(params).promise().toFuture
}
}
package managedblockchain {
@js.native
@JSImport("aws-sdk/clients/managedblockchain", JSImport.Namespace, "AWS.ManagedBlockchain")
class ManagedBlockchain() extends js.Object {
def this(config: AWSConfig) = this()
def createMember(params: CreateMemberInput): Request[CreateMemberOutput] = js.native
def createNetwork(params: CreateNetworkInput): Request[CreateNetworkOutput] = js.native
def createNode(params: CreateNodeInput): Request[CreateNodeOutput] = js.native
def createProposal(params: CreateProposalInput): Request[CreateProposalOutput] = js.native
def deleteMember(params: DeleteMemberInput): Request[DeleteMemberOutput] = js.native
def deleteNode(params: DeleteNodeInput): Request[DeleteNodeOutput] = js.native
def getMember(params: GetMemberInput): Request[GetMemberOutput] = js.native
def getNetwork(params: GetNetworkInput): Request[GetNetworkOutput] = js.native
def getNode(params: GetNodeInput): Request[GetNodeOutput] = js.native
def getProposal(params: GetProposalInput): Request[GetProposalOutput] = js.native
def listInvitations(params: ListInvitationsInput): Request[ListInvitationsOutput] = js.native
def listMembers(params: ListMembersInput): Request[ListMembersOutput] = js.native
def listNetworks(params: ListNetworksInput): Request[ListNetworksOutput] = js.native
def listNodes(params: ListNodesInput): Request[ListNodesOutput] = js.native
def listProposalVotes(params: ListProposalVotesInput): Request[ListProposalVotesOutput] = js.native
def listProposals(params: ListProposalsInput): Request[ListProposalsOutput] = js.native
def listTagsForResource(params: ListTagsForResourceRequest): Request[ListTagsForResourceResponse] = js.native
def rejectInvitation(params: RejectInvitationInput): Request[RejectInvitationOutput] = js.native
def tagResource(params: TagResourceRequest): Request[TagResourceResponse] = js.native
def untagResource(params: UntagResourceRequest): Request[UntagResourceResponse] = js.native
def updateMember(params: UpdateMemberInput): Request[UpdateMemberOutput] = js.native
def updateNode(params: UpdateNodeInput): Request[UpdateNodeOutput] = js.native
def voteOnProposal(params: VoteOnProposalInput): Request[VoteOnProposalOutput] = js.native
}
/** A policy type that defines the voting rules for the network. The rules decide if a proposal is approved. Approval may be based on criteria such as the percentage of <code>YES</code> votes and the duration of the proposal. The policy applies to all proposals and is specified when the network is created. Applies only to Hyperledger Fabric.
*/
@js.native
trait ApprovalThresholdPolicy extends js.Object {
var ProposalDurationInHours: js.UndefOr[ProposalDurationInt]
var ThresholdComparator: js.UndefOr[ThresholdComparator]
var ThresholdPercentage: js.UndefOr[ThresholdPercentageInt]
}
object ApprovalThresholdPolicy {
@inline
def apply(
ProposalDurationInHours: js.UndefOr[ProposalDurationInt] = js.undefined,
ThresholdComparator: js.UndefOr[ThresholdComparator] = js.undefined,
ThresholdPercentage: js.UndefOr[ThresholdPercentageInt] = js.undefined
): ApprovalThresholdPolicy = {
val __obj = js.Dynamic.literal()
ProposalDurationInHours.foreach(__v => __obj.updateDynamic("ProposalDurationInHours")(__v.asInstanceOf[js.Any]))
ThresholdComparator.foreach(__v => __obj.updateDynamic("ThresholdComparator")(__v.asInstanceOf[js.Any]))
ThresholdPercentage.foreach(__v => __obj.updateDynamic("ThresholdPercentage")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ApprovalThresholdPolicy]
}
}
@js.native
trait CreateMemberInput extends js.Object {
var ClientRequestToken: ClientRequestTokenString
var InvitationId: ResourceIdString
var MemberConfiguration: MemberConfiguration
var NetworkId: ResourceIdString
}
object CreateMemberInput {
@inline
def apply(
ClientRequestToken: ClientRequestTokenString,
InvitationId: ResourceIdString,
MemberConfiguration: MemberConfiguration,
NetworkId: ResourceIdString
): CreateMemberInput = {
val __obj = js.Dynamic.literal(
"ClientRequestToken" -> ClientRequestToken.asInstanceOf[js.Any],
"InvitationId" -> InvitationId.asInstanceOf[js.Any],
"MemberConfiguration" -> MemberConfiguration.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[CreateMemberInput]
}
}
@js.native
trait CreateMemberOutput extends js.Object {
var MemberId: js.UndefOr[ResourceIdString]
}
object CreateMemberOutput {
@inline
def apply(
MemberId: js.UndefOr[ResourceIdString] = js.undefined
): CreateMemberOutput = {
val __obj = js.Dynamic.literal()
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateMemberOutput]
}
}
@js.native
trait CreateNetworkInput extends js.Object {
var ClientRequestToken: ClientRequestTokenString
var Framework: Framework
var FrameworkVersion: FrameworkVersionString
var MemberConfiguration: MemberConfiguration
var Name: NameString
var VotingPolicy: VotingPolicy
var Description: js.UndefOr[DescriptionString]
var FrameworkConfiguration: js.UndefOr[NetworkFrameworkConfiguration]
var Tags: js.UndefOr[InputTagMap]
}
object CreateNetworkInput {
@inline
def apply(
ClientRequestToken: ClientRequestTokenString,
Framework: Framework,
FrameworkVersion: FrameworkVersionString,
MemberConfiguration: MemberConfiguration,
Name: NameString,
VotingPolicy: VotingPolicy,
Description: js.UndefOr[DescriptionString] = js.undefined,
FrameworkConfiguration: js.UndefOr[NetworkFrameworkConfiguration] = js.undefined,
Tags: js.UndefOr[InputTagMap] = js.undefined
): CreateNetworkInput = {
val __obj = js.Dynamic.literal(
"ClientRequestToken" -> ClientRequestToken.asInstanceOf[js.Any],
"Framework" -> Framework.asInstanceOf[js.Any],
"FrameworkVersion" -> FrameworkVersion.asInstanceOf[js.Any],
"MemberConfiguration" -> MemberConfiguration.asInstanceOf[js.Any],
"Name" -> Name.asInstanceOf[js.Any],
"VotingPolicy" -> VotingPolicy.asInstanceOf[js.Any]
)
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
FrameworkConfiguration.foreach(__v => __obj.updateDynamic("FrameworkConfiguration")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNetworkInput]
}
}
@js.native
trait CreateNetworkOutput extends js.Object {
var MemberId: js.UndefOr[ResourceIdString]
var NetworkId: js.UndefOr[ResourceIdString]
}
object CreateNetworkOutput {
@inline
def apply(
MemberId: js.UndefOr[ResourceIdString] = js.undefined,
NetworkId: js.UndefOr[ResourceIdString] = js.undefined
): CreateNetworkOutput = {
val __obj = js.Dynamic.literal()
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
NetworkId.foreach(__v => __obj.updateDynamic("NetworkId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNetworkOutput]
}
}
@js.native
trait CreateNodeInput extends js.Object {
var ClientRequestToken: ClientRequestTokenString
var NetworkId: ResourceIdString
var NodeConfiguration: NodeConfiguration
var MemberId: js.UndefOr[ResourceIdString]
var Tags: js.UndefOr[InputTagMap]
}
object CreateNodeInput {
@inline
def apply(
ClientRequestToken: ClientRequestTokenString,
NetworkId: ResourceIdString,
NodeConfiguration: NodeConfiguration,
MemberId: js.UndefOr[ResourceIdString] = js.undefined,
Tags: js.UndefOr[InputTagMap] = js.undefined
): CreateNodeInput = {
val __obj = js.Dynamic.literal(
"ClientRequestToken" -> ClientRequestToken.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"NodeConfiguration" -> NodeConfiguration.asInstanceOf[js.Any]
)
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNodeInput]
}
}
@js.native
trait CreateNodeOutput extends js.Object {
var NodeId: js.UndefOr[ResourceIdString]
}
object CreateNodeOutput {
@inline
def apply(
NodeId: js.UndefOr[ResourceIdString] = js.undefined
): CreateNodeOutput = {
val __obj = js.Dynamic.literal()
NodeId.foreach(__v => __obj.updateDynamic("NodeId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateNodeOutput]
}
}
@js.native
trait CreateProposalInput extends js.Object {
var Actions: ProposalActions
var ClientRequestToken: ClientRequestTokenString
var MemberId: ResourceIdString
var NetworkId: ResourceIdString
var Description: js.UndefOr[DescriptionString]
var Tags: js.UndefOr[InputTagMap]
}
object CreateProposalInput {
@inline
def apply(
Actions: ProposalActions,
ClientRequestToken: ClientRequestTokenString,
MemberId: ResourceIdString,
NetworkId: ResourceIdString,
Description: js.UndefOr[DescriptionString] = js.undefined,
Tags: js.UndefOr[InputTagMap] = js.undefined
): CreateProposalInput = {
val __obj = js.Dynamic.literal(
"Actions" -> Actions.asInstanceOf[js.Any],
"ClientRequestToken" -> ClientRequestToken.asInstanceOf[js.Any],
"MemberId" -> MemberId.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateProposalInput]
}
}
@js.native
trait CreateProposalOutput extends js.Object {
var ProposalId: js.UndefOr[ResourceIdString]
}
object CreateProposalOutput {
@inline
def apply(
ProposalId: js.UndefOr[ResourceIdString] = js.undefined
): CreateProposalOutput = {
val __obj = js.Dynamic.literal()
ProposalId.foreach(__v => __obj.updateDynamic("ProposalId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[CreateProposalOutput]
}
}
@js.native
trait DeleteMemberInput extends js.Object {
var MemberId: ResourceIdString
var NetworkId: ResourceIdString
}
object DeleteMemberInput {
@inline
def apply(
MemberId: ResourceIdString,
NetworkId: ResourceIdString
): DeleteMemberInput = {
val __obj = js.Dynamic.literal(
"MemberId" -> MemberId.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[DeleteMemberInput]
}
}
@js.native
trait DeleteMemberOutput extends js.Object
object DeleteMemberOutput {
@inline
def apply(): DeleteMemberOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[DeleteMemberOutput]
}
}
@js.native
trait DeleteNodeInput extends js.Object {
var NetworkId: ResourceIdString
var NodeId: ResourceIdString
var MemberId: js.UndefOr[ResourceIdString]
}
object DeleteNodeInput {
@inline
def apply(
NetworkId: ResourceIdString,
NodeId: ResourceIdString,
MemberId: js.UndefOr[ResourceIdString] = js.undefined
): DeleteNodeInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"NodeId" -> NodeId.asInstanceOf[js.Any]
)
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[DeleteNodeInput]
}
}
@js.native
trait DeleteNodeOutput extends js.Object
object DeleteNodeOutput {
@inline
def apply(): DeleteNodeOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[DeleteNodeOutput]
}
}
@js.native
sealed trait Edition extends js.Any
object Edition {
val STARTER = "STARTER".asInstanceOf[Edition]
val STANDARD = "STANDARD".asInstanceOf[Edition]
@inline def values = js.Array(STARTER, STANDARD)
}
@js.native
sealed trait Framework extends js.Any
object Framework {
val HYPERLEDGER_FABRIC = "HYPERLEDGER_FABRIC".asInstanceOf[Framework]
val ETHEREUM = "ETHEREUM".asInstanceOf[Framework]
@inline def values = js.Array(HYPERLEDGER_FABRIC, ETHEREUM)
}
@js.native
trait GetMemberInput extends js.Object {
var MemberId: ResourceIdString
var NetworkId: ResourceIdString
}
object GetMemberInput {
@inline
def apply(
MemberId: ResourceIdString,
NetworkId: ResourceIdString
): GetMemberInput = {
val __obj = js.Dynamic.literal(
"MemberId" -> MemberId.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[GetMemberInput]
}
}
@js.native
trait GetMemberOutput extends js.Object {
var Member: js.UndefOr[Member]
}
object GetMemberOutput {
@inline
def apply(
Member: js.UndefOr[Member] = js.undefined
): GetMemberOutput = {
val __obj = js.Dynamic.literal()
Member.foreach(__v => __obj.updateDynamic("Member")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetMemberOutput]
}
}
@js.native
trait GetNetworkInput extends js.Object {
var NetworkId: ResourceIdString
}
object GetNetworkInput {
@inline
def apply(
NetworkId: ResourceIdString
): GetNetworkInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[GetNetworkInput]
}
}
@js.native
trait GetNetworkOutput extends js.Object {
var Network: js.UndefOr[Network]
}
object GetNetworkOutput {
@inline
def apply(
Network: js.UndefOr[Network] = js.undefined
): GetNetworkOutput = {
val __obj = js.Dynamic.literal()
Network.foreach(__v => __obj.updateDynamic("Network")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetNetworkOutput]
}
}
@js.native
trait GetNodeInput extends js.Object {
var NetworkId: ResourceIdString
var NodeId: ResourceIdString
var MemberId: js.UndefOr[ResourceIdString]
}
object GetNodeInput {
@inline
def apply(
NetworkId: ResourceIdString,
NodeId: ResourceIdString,
MemberId: js.UndefOr[ResourceIdString] = js.undefined
): GetNodeInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"NodeId" -> NodeId.asInstanceOf[js.Any]
)
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetNodeInput]
}
}
@js.native
trait GetNodeOutput extends js.Object {
var Node: js.UndefOr[Node]
}
object GetNodeOutput {
@inline
def apply(
Node: js.UndefOr[Node] = js.undefined
): GetNodeOutput = {
val __obj = js.Dynamic.literal()
Node.foreach(__v => __obj.updateDynamic("Node")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetNodeOutput]
}
}
@js.native
trait GetProposalInput extends js.Object {
var NetworkId: ResourceIdString
var ProposalId: ResourceIdString
}
object GetProposalInput {
@inline
def apply(
NetworkId: ResourceIdString,
ProposalId: ResourceIdString
): GetProposalInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"ProposalId" -> ProposalId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[GetProposalInput]
}
}
@js.native
trait GetProposalOutput extends js.Object {
var Proposal: js.UndefOr[Proposal]
}
object GetProposalOutput {
@inline
def apply(
Proposal: js.UndefOr[Proposal] = js.undefined
): GetProposalOutput = {
val __obj = js.Dynamic.literal()
Proposal.foreach(__v => __obj.updateDynamic("Proposal")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[GetProposalOutput]
}
}
/** An invitation to an AWS account to create a member and join the network. Applies only to Hyperledger Fabric.
*/
@js.native
trait Invitation extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var ExpirationDate: js.UndefOr[Timestamp]
var InvitationId: js.UndefOr[ResourceIdString]
var NetworkSummary: js.UndefOr[NetworkSummary]
var Status: js.UndefOr[InvitationStatus]
}
object Invitation {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
ExpirationDate: js.UndefOr[Timestamp] = js.undefined,
InvitationId: js.UndefOr[ResourceIdString] = js.undefined,
NetworkSummary: js.UndefOr[NetworkSummary] = js.undefined,
Status: js.UndefOr[InvitationStatus] = js.undefined
): Invitation = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
ExpirationDate.foreach(__v => __obj.updateDynamic("ExpirationDate")(__v.asInstanceOf[js.Any]))
InvitationId.foreach(__v => __obj.updateDynamic("InvitationId")(__v.asInstanceOf[js.Any]))
NetworkSummary.foreach(__v => __obj.updateDynamic("NetworkSummary")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Invitation]
}
}
@js.native
sealed trait InvitationStatus extends js.Any
object InvitationStatus {
val PENDING = "PENDING".asInstanceOf[InvitationStatus]
val ACCEPTED = "ACCEPTED".asInstanceOf[InvitationStatus]
val ACCEPTING = "ACCEPTING".asInstanceOf[InvitationStatus]
val REJECTED = "REJECTED".asInstanceOf[InvitationStatus]
val EXPIRED = "EXPIRED".asInstanceOf[InvitationStatus]
@inline def values = js.Array(PENDING, ACCEPTED, ACCEPTING, REJECTED, EXPIRED)
}
/** An action to invite a specific AWS account to create a member and join the network. The <code>InviteAction</code> is carried out when a <code>Proposal</code> is <code>APPROVED</code>. Applies only to Hyperledger Fabric.
*/
@js.native
trait InviteAction extends js.Object {
var Principal: PrincipalString
}
object InviteAction {
@inline
def apply(
Principal: PrincipalString
): InviteAction = {
val __obj = js.Dynamic.literal(
"Principal" -> Principal.asInstanceOf[js.Any]
)
__obj.asInstanceOf[InviteAction]
}
}
@js.native
trait ListInvitationsInput extends js.Object {
var MaxResults: js.UndefOr[ProposalListMaxResults]
var NextToken: js.UndefOr[PaginationToken]
}
object ListInvitationsInput {
@inline
def apply(
MaxResults: js.UndefOr[ProposalListMaxResults] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListInvitationsInput = {
val __obj = js.Dynamic.literal()
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListInvitationsInput]
}
}
@js.native
trait ListInvitationsOutput extends js.Object {
var Invitations: js.UndefOr[InvitationList]
var NextToken: js.UndefOr[PaginationToken]
}
object ListInvitationsOutput {
@inline
def apply(
Invitations: js.UndefOr[InvitationList] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListInvitationsOutput = {
val __obj = js.Dynamic.literal()
Invitations.foreach(__v => __obj.updateDynamic("Invitations")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListInvitationsOutput]
}
}
@js.native
trait ListMembersInput extends js.Object {
var NetworkId: ResourceIdString
var IsOwned: js.UndefOr[IsOwned]
var MaxResults: js.UndefOr[MemberListMaxResults]
var Name: js.UndefOr[String]
var NextToken: js.UndefOr[PaginationToken]
var Status: js.UndefOr[MemberStatus]
}
object ListMembersInput {
@inline
def apply(
NetworkId: ResourceIdString,
IsOwned: js.UndefOr[IsOwned] = js.undefined,
MaxResults: js.UndefOr[MemberListMaxResults] = js.undefined,
Name: js.UndefOr[String] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined,
Status: js.UndefOr[MemberStatus] = js.undefined
): ListMembersInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
IsOwned.foreach(__v => __obj.updateDynamic("IsOwned")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListMembersInput]
}
}
@js.native
trait ListMembersOutput extends js.Object {
var Members: js.UndefOr[MemberSummaryList]
var NextToken: js.UndefOr[PaginationToken]
}
object ListMembersOutput {
@inline
def apply(
Members: js.UndefOr[MemberSummaryList] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListMembersOutput = {
val __obj = js.Dynamic.literal()
Members.foreach(__v => __obj.updateDynamic("Members")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListMembersOutput]
}
}
@js.native
trait ListNetworksInput extends js.Object {
var Framework: js.UndefOr[Framework]
var MaxResults: js.UndefOr[NetworkListMaxResults]
var Name: js.UndefOr[String]
var NextToken: js.UndefOr[PaginationToken]
var Status: js.UndefOr[NetworkStatus]
}
object ListNetworksInput {
@inline
def apply(
Framework: js.UndefOr[Framework] = js.undefined,
MaxResults: js.UndefOr[NetworkListMaxResults] = js.undefined,
Name: js.UndefOr[String] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined,
Status: js.UndefOr[NetworkStatus] = js.undefined
): ListNetworksInput = {
val __obj = js.Dynamic.literal()
Framework.foreach(__v => __obj.updateDynamic("Framework")(__v.asInstanceOf[js.Any]))
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNetworksInput]
}
}
@js.native
trait ListNetworksOutput extends js.Object {
var Networks: js.UndefOr[NetworkSummaryList]
var NextToken: js.UndefOr[PaginationToken]
}
object ListNetworksOutput {
@inline
def apply(
Networks: js.UndefOr[NetworkSummaryList] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListNetworksOutput = {
val __obj = js.Dynamic.literal()
Networks.foreach(__v => __obj.updateDynamic("Networks")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNetworksOutput]
}
}
@js.native
trait ListNodesInput extends js.Object {
var NetworkId: ResourceIdString
var MaxResults: js.UndefOr[NodeListMaxResults]
var MemberId: js.UndefOr[ResourceIdString]
var NextToken: js.UndefOr[PaginationToken]
var Status: js.UndefOr[NodeStatus]
}
object ListNodesInput {
@inline
def apply(
NetworkId: ResourceIdString,
MaxResults: js.UndefOr[NodeListMaxResults] = js.undefined,
MemberId: js.UndefOr[ResourceIdString] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined,
Status: js.UndefOr[NodeStatus] = js.undefined
): ListNodesInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNodesInput]
}
}
@js.native
trait ListNodesOutput extends js.Object {
var NextToken: js.UndefOr[PaginationToken]
var Nodes: js.UndefOr[NodeSummaryList]
}
object ListNodesOutput {
@inline
def apply(
NextToken: js.UndefOr[PaginationToken] = js.undefined,
Nodes: js.UndefOr[NodeSummaryList] = js.undefined
): ListNodesOutput = {
val __obj = js.Dynamic.literal()
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
Nodes.foreach(__v => __obj.updateDynamic("Nodes")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListNodesOutput]
}
}
@js.native
trait ListProposalVotesInput extends js.Object {
var NetworkId: ResourceIdString
var ProposalId: ResourceIdString
var MaxResults: js.UndefOr[ProposalListMaxResults]
var NextToken: js.UndefOr[PaginationToken]
}
object ListProposalVotesInput {
@inline
def apply(
NetworkId: ResourceIdString,
ProposalId: ResourceIdString,
MaxResults: js.UndefOr[ProposalListMaxResults] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListProposalVotesInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"ProposalId" -> ProposalId.asInstanceOf[js.Any]
)
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListProposalVotesInput]
}
}
@js.native
trait ListProposalVotesOutput extends js.Object {
var NextToken: js.UndefOr[PaginationToken]
var ProposalVotes: js.UndefOr[ProposalVoteList]
}
object ListProposalVotesOutput {
@inline
def apply(
NextToken: js.UndefOr[PaginationToken] = js.undefined,
ProposalVotes: js.UndefOr[ProposalVoteList] = js.undefined
): ListProposalVotesOutput = {
val __obj = js.Dynamic.literal()
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
ProposalVotes.foreach(__v => __obj.updateDynamic("ProposalVotes")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListProposalVotesOutput]
}
}
@js.native
trait ListProposalsInput extends js.Object {
var NetworkId: ResourceIdString
var MaxResults: js.UndefOr[ProposalListMaxResults]
var NextToken: js.UndefOr[PaginationToken]
}
object ListProposalsInput {
@inline
def apply(
NetworkId: ResourceIdString,
MaxResults: js.UndefOr[ProposalListMaxResults] = js.undefined,
NextToken: js.UndefOr[PaginationToken] = js.undefined
): ListProposalsInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
MaxResults.foreach(__v => __obj.updateDynamic("MaxResults")(__v.asInstanceOf[js.Any]))
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListProposalsInput]
}
}
@js.native
trait ListProposalsOutput extends js.Object {
var NextToken: js.UndefOr[PaginationToken]
var Proposals: js.UndefOr[ProposalSummaryList]
}
object ListProposalsOutput {
@inline
def apply(
NextToken: js.UndefOr[PaginationToken] = js.undefined,
Proposals: js.UndefOr[ProposalSummaryList] = js.undefined
): ListProposalsOutput = {
val __obj = js.Dynamic.literal()
NextToken.foreach(__v => __obj.updateDynamic("NextToken")(__v.asInstanceOf[js.Any]))
Proposals.foreach(__v => __obj.updateDynamic("Proposals")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListProposalsOutput]
}
}
@js.native
trait ListTagsForResourceRequest extends js.Object {
var ResourceArn: ArnString
}
object ListTagsForResourceRequest {
@inline
def apply(
ResourceArn: ArnString
): ListTagsForResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any]
)
__obj.asInstanceOf[ListTagsForResourceRequest]
}
}
@js.native
trait ListTagsForResourceResponse extends js.Object {
var Tags: js.UndefOr[OutputTagMap]
}
object ListTagsForResourceResponse {
@inline
def apply(
Tags: js.UndefOr[OutputTagMap] = js.undefined
): ListTagsForResourceResponse = {
val __obj = js.Dynamic.literal()
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ListTagsForResourceResponse]
}
}
/** A configuration for logging events.
*/
@js.native
trait LogConfiguration extends js.Object {
var Enabled: js.UndefOr[Enabled]
}
object LogConfiguration {
@inline
def apply(
Enabled: js.UndefOr[Enabled] = js.undefined
): LogConfiguration = {
val __obj = js.Dynamic.literal()
Enabled.foreach(__v => __obj.updateDynamic("Enabled")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LogConfiguration]
}
}
/** A collection of log configurations.
*/
@js.native
trait LogConfigurations extends js.Object {
var Cloudwatch: js.UndefOr[LogConfiguration]
}
object LogConfigurations {
@inline
def apply(
Cloudwatch: js.UndefOr[LogConfiguration] = js.undefined
): LogConfigurations = {
val __obj = js.Dynamic.literal()
Cloudwatch.foreach(__v => __obj.updateDynamic("Cloudwatch")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[LogConfigurations]
}
}
/** Member configuration properties. Applies only to Hyperledger Fabric.
*/
@js.native
trait Member extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var FrameworkAttributes: js.UndefOr[MemberFrameworkAttributes]
var Id: js.UndefOr[ResourceIdString]
var LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration]
var Name: js.UndefOr[NetworkMemberNameString]
var NetworkId: js.UndefOr[ResourceIdString]
var Status: js.UndefOr[MemberStatus]
var Tags: js.UndefOr[OutputTagMap]
}
object Member {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
FrameworkAttributes: js.UndefOr[MemberFrameworkAttributes] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration] = js.undefined,
Name: js.UndefOr[NetworkMemberNameString] = js.undefined,
NetworkId: js.UndefOr[ResourceIdString] = js.undefined,
Status: js.UndefOr[MemberStatus] = js.undefined,
Tags: js.UndefOr[OutputTagMap] = js.undefined
): Member = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
FrameworkAttributes.foreach(__v => __obj.updateDynamic("FrameworkAttributes")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
NetworkId.foreach(__v => __obj.updateDynamic("NetworkId")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Member]
}
}
/** Configuration properties of the member. Applies only to Hyperledger Fabric.
*/
@js.native
trait MemberConfiguration extends js.Object {
var FrameworkConfiguration: MemberFrameworkConfiguration
var Name: NetworkMemberNameString
var Description: js.UndefOr[DescriptionString]
var LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration]
var Tags: js.UndefOr[InputTagMap]
}
object MemberConfiguration {
@inline
def apply(
FrameworkConfiguration: MemberFrameworkConfiguration,
Name: NetworkMemberNameString,
Description: js.UndefOr[DescriptionString] = js.undefined,
LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration] = js.undefined,
Tags: js.UndefOr[InputTagMap] = js.undefined
): MemberConfiguration = {
val __obj = js.Dynamic.literal(
"FrameworkConfiguration" -> FrameworkConfiguration.asInstanceOf[js.Any],
"Name" -> Name.asInstanceOf[js.Any]
)
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberConfiguration]
}
}
/** Attributes of Hyperledger Fabric for a member in a Managed Blockchain network using the Hyperledger Fabric framework.
*/
@js.native
trait MemberFabricAttributes extends js.Object {
var AdminUsername: js.UndefOr[UsernameString]
var CaEndpoint: js.UndefOr[String]
}
object MemberFabricAttributes {
@inline
def apply(
AdminUsername: js.UndefOr[UsernameString] = js.undefined,
CaEndpoint: js.UndefOr[String] = js.undefined
): MemberFabricAttributes = {
val __obj = js.Dynamic.literal()
AdminUsername.foreach(__v => __obj.updateDynamic("AdminUsername")(__v.asInstanceOf[js.Any]))
CaEndpoint.foreach(__v => __obj.updateDynamic("CaEndpoint")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberFabricAttributes]
}
}
/** Configuration properties for Hyperledger Fabric for a member in a Managed Blockchain network using the Hyperledger Fabric framework.
*/
@js.native
trait MemberFabricConfiguration extends js.Object {
var AdminPassword: <PASSWORD>
var AdminUsername: UsernameString
}
object MemberFabricConfiguration {
@inline
def apply(
AdminPassword: <PASSWORD>,
AdminUsername: UsernameString
): MemberFabricConfiguration = {
val __obj = js.Dynamic.literal(
"AdminPassword" -> <PASSWORD>Password.asInstanceOf[js.Any],
"AdminUsername" -> AdminUsername.asInstanceOf[js.Any]
)
__obj.asInstanceOf[MemberFabricConfiguration]
}
}
/** Configuration properties for logging events associated with a member of a Managed Blockchain network using the Hyperledger Fabric framework.
*/
@js.native
trait MemberFabricLogPublishingConfiguration extends js.Object {
var CaLogs: js.UndefOr[LogConfigurations]
}
object MemberFabricLogPublishingConfiguration {
@inline
def apply(
CaLogs: js.UndefOr[LogConfigurations] = js.undefined
): MemberFabricLogPublishingConfiguration = {
val __obj = js.Dynamic.literal()
CaLogs.foreach(__v => __obj.updateDynamic("CaLogs")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberFabricLogPublishingConfiguration]
}
}
/** Attributes relevant to a member for the blockchain framework that the Managed Blockchain network uses.
*/
@js.native
trait MemberFrameworkAttributes extends js.Object {
var Fabric: js.UndefOr[MemberFabricAttributes]
}
object MemberFrameworkAttributes {
@inline
def apply(
Fabric: js.UndefOr[MemberFabricAttributes] = js.undefined
): MemberFrameworkAttributes = {
val __obj = js.Dynamic.literal()
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberFrameworkAttributes]
}
}
/** Configuration properties relevant to a member for the blockchain framework that the Managed Blockchain network uses.
*/
@js.native
trait MemberFrameworkConfiguration extends js.Object {
var Fabric: js.UndefOr[MemberFabricConfiguration]
}
object MemberFrameworkConfiguration {
@inline
def apply(
Fabric: js.UndefOr[MemberFabricConfiguration] = js.undefined
): MemberFrameworkConfiguration = {
val __obj = js.Dynamic.literal()
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberFrameworkConfiguration]
}
}
/** Configuration properties for logging events associated with a member of a Managed Blockchain network.
*/
@js.native
trait MemberLogPublishingConfiguration extends js.Object {
var Fabric: js.UndefOr[MemberFabricLogPublishingConfiguration]
}
object MemberLogPublishingConfiguration {
@inline
def apply(
Fabric: js.UndefOr[MemberFabricLogPublishingConfiguration] = js.undefined
): MemberLogPublishingConfiguration = {
val __obj = js.Dynamic.literal()
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberLogPublishingConfiguration]
}
}
@js.native
sealed trait MemberStatus extends js.Any
object MemberStatus {
val CREATING = "CREATING".asInstanceOf[MemberStatus]
val AVAILABLE = "AVAILABLE".asInstanceOf[MemberStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[MemberStatus]
val UPDATING = "UPDATING".asInstanceOf[MemberStatus]
val DELETING = "DELETING".asInstanceOf[MemberStatus]
val DELETED = "DELETED".asInstanceOf[MemberStatus]
@inline def values = js.Array(CREATING, AVAILABLE, CREATE_FAILED, UPDATING, DELETING, DELETED)
}
/** A summary of configuration properties for a member. Applies only to Hyperledger Fabric.
*/
@js.native
trait MemberSummary extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var Id: js.UndefOr[ResourceIdString]
var IsOwned: js.UndefOr[IsOwned]
var Name: js.UndefOr[NetworkMemberNameString]
var Status: js.UndefOr[MemberStatus]
}
object MemberSummary {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
IsOwned: js.UndefOr[IsOwned] = js.undefined,
Name: js.UndefOr[NetworkMemberNameString] = js.undefined,
Status: js.UndefOr[MemberStatus] = js.undefined
): MemberSummary = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
IsOwned.foreach(__v => __obj.updateDynamic("IsOwned")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[MemberSummary]
}
}
/** Network configuration properties.
*/
@js.native
trait Network extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var Framework: js.UndefOr[Framework]
var FrameworkAttributes: js.UndefOr[NetworkFrameworkAttributes]
var FrameworkVersion: js.UndefOr[FrameworkVersionString]
var Id: js.UndefOr[ResourceIdString]
var Name: js.UndefOr[NameString]
var Status: js.UndefOr[NetworkStatus]
var Tags: js.UndefOr[OutputTagMap]
var VotingPolicy: js.UndefOr[VotingPolicy]
var VpcEndpointServiceName: js.UndefOr[String]
}
object Network {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
Framework: js.UndefOr[Framework] = js.undefined,
FrameworkAttributes: js.UndefOr[NetworkFrameworkAttributes] = js.undefined,
FrameworkVersion: js.UndefOr[FrameworkVersionString] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
Name: js.UndefOr[NameString] = js.undefined,
Status: js.UndefOr[NetworkStatus] = js.undefined,
Tags: js.UndefOr[OutputTagMap] = js.undefined,
VotingPolicy: js.UndefOr[VotingPolicy] = js.undefined,
VpcEndpointServiceName: js.UndefOr[String] = js.undefined
): Network = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Framework.foreach(__v => __obj.updateDynamic("Framework")(__v.asInstanceOf[js.Any]))
FrameworkAttributes.foreach(__v => __obj.updateDynamic("FrameworkAttributes")(__v.asInstanceOf[js.Any]))
FrameworkVersion.foreach(__v => __obj.updateDynamic("FrameworkVersion")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
VotingPolicy.foreach(__v => __obj.updateDynamic("VotingPolicy")(__v.asInstanceOf[js.Any]))
VpcEndpointServiceName.foreach(__v => __obj.updateDynamic("VpcEndpointServiceName")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Network]
}
}
/** Attributes of Ethereum for a network.
*/
@js.native
trait NetworkEthereumAttributes extends js.Object {
var ChainId: js.UndefOr[String]
}
object NetworkEthereumAttributes {
@inline
def apply(
ChainId: js.UndefOr[String] = js.undefined
): NetworkEthereumAttributes = {
val __obj = js.Dynamic.literal()
ChainId.foreach(__v => __obj.updateDynamic("ChainId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NetworkEthereumAttributes]
}
}
/** Attributes of Hyperledger Fabric for a network.
*/
@js.native
trait NetworkFabricAttributes extends js.Object {
var Edition: js.UndefOr[Edition]
var OrderingServiceEndpoint: js.UndefOr[String]
}
object NetworkFabricAttributes {
@inline
def apply(
Edition: js.UndefOr[Edition] = js.undefined,
OrderingServiceEndpoint: js.UndefOr[String] = js.undefined
): NetworkFabricAttributes = {
val __obj = js.Dynamic.literal()
Edition.foreach(__v => __obj.updateDynamic("Edition")(__v.asInstanceOf[js.Any]))
OrderingServiceEndpoint.foreach(__v => __obj.updateDynamic("OrderingServiceEndpoint")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NetworkFabricAttributes]
}
}
/** Hyperledger Fabric configuration properties for the network.
*/
@js.native
trait NetworkFabricConfiguration extends js.Object {
var Edition: Edition
}
object NetworkFabricConfiguration {
@inline
def apply(
Edition: Edition
): NetworkFabricConfiguration = {
val __obj = js.Dynamic.literal(
"Edition" -> Edition.asInstanceOf[js.Any]
)
__obj.asInstanceOf[NetworkFabricConfiguration]
}
}
/** Attributes relevant to the network for the blockchain framework that the network uses.
*/
@js.native
trait NetworkFrameworkAttributes extends js.Object {
var Ethereum: js.UndefOr[NetworkEthereumAttributes]
var Fabric: js.UndefOr[NetworkFabricAttributes]
}
object NetworkFrameworkAttributes {
@inline
def apply(
Ethereum: js.UndefOr[NetworkEthereumAttributes] = js.undefined,
Fabric: js.UndefOr[NetworkFabricAttributes] = js.undefined
): NetworkFrameworkAttributes = {
val __obj = js.Dynamic.literal()
Ethereum.foreach(__v => __obj.updateDynamic("Ethereum")(__v.asInstanceOf[js.Any]))
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NetworkFrameworkAttributes]
}
}
/** Configuration properties relevant to the network for the blockchain framework that the network uses.
*/
@js.native
trait NetworkFrameworkConfiguration extends js.Object {
var Fabric: js.UndefOr[NetworkFabricConfiguration]
}
object NetworkFrameworkConfiguration {
@inline
def apply(
Fabric: js.UndefOr[NetworkFabricConfiguration] = js.undefined
): NetworkFrameworkConfiguration = {
val __obj = js.Dynamic.literal()
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NetworkFrameworkConfiguration]
}
}
@js.native
sealed trait NetworkStatus extends js.Any
object NetworkStatus {
val CREATING = "CREATING".asInstanceOf[NetworkStatus]
val AVAILABLE = "AVAILABLE".asInstanceOf[NetworkStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[NetworkStatus]
val DELETING = "DELETING".asInstanceOf[NetworkStatus]
val DELETED = "DELETED".asInstanceOf[NetworkStatus]
@inline def values = js.Array(CREATING, AVAILABLE, CREATE_FAILED, DELETING, DELETED)
}
/** A summary of network configuration properties.
*/
@js.native
trait NetworkSummary extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var Framework: js.UndefOr[Framework]
var FrameworkVersion: js.UndefOr[FrameworkVersionString]
var Id: js.UndefOr[ResourceIdString]
var Name: js.UndefOr[NameString]
var Status: js.UndefOr[NetworkStatus]
}
object NetworkSummary {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
Framework: js.UndefOr[Framework] = js.undefined,
FrameworkVersion: js.UndefOr[FrameworkVersionString] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
Name: js.UndefOr[NameString] = js.undefined,
Status: js.UndefOr[NetworkStatus] = js.undefined
): NetworkSummary = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
Framework.foreach(__v => __obj.updateDynamic("Framework")(__v.asInstanceOf[js.Any]))
FrameworkVersion.foreach(__v => __obj.updateDynamic("FrameworkVersion")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
Name.foreach(__v => __obj.updateDynamic("Name")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NetworkSummary]
}
}
/** Configuration properties of a node.
*/
@js.native
trait Node extends js.Object {
var Arn: js.UndefOr[ArnString]
var AvailabilityZone: js.UndefOr[AvailabilityZoneString]
var CreationDate: js.UndefOr[Timestamp]
var FrameworkAttributes: js.UndefOr[NodeFrameworkAttributes]
var Id: js.UndefOr[ResourceIdString]
var InstanceType: js.UndefOr[InstanceTypeString]
var LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration]
var MemberId: js.UndefOr[ResourceIdString]
var NetworkId: js.UndefOr[ResourceIdString]
var StateDB: js.UndefOr[StateDBType]
var Status: js.UndefOr[NodeStatus]
var Tags: js.UndefOr[OutputTagMap]
}
object Node {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
AvailabilityZone: js.UndefOr[AvailabilityZoneString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
FrameworkAttributes: js.UndefOr[NodeFrameworkAttributes] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
InstanceType: js.UndefOr[InstanceTypeString] = js.undefined,
LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration] = js.undefined,
MemberId: js.UndefOr[ResourceIdString] = js.undefined,
NetworkId: js.UndefOr[ResourceIdString] = js.undefined,
StateDB: js.UndefOr[StateDBType] = js.undefined,
Status: js.UndefOr[NodeStatus] = js.undefined,
Tags: js.UndefOr[OutputTagMap] = js.undefined
): Node = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
AvailabilityZone.foreach(__v => __obj.updateDynamic("AvailabilityZone")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
FrameworkAttributes.foreach(__v => __obj.updateDynamic("FrameworkAttributes")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
InstanceType.foreach(__v => __obj.updateDynamic("InstanceType")(__v.asInstanceOf[js.Any]))
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
NetworkId.foreach(__v => __obj.updateDynamic("NetworkId")(__v.asInstanceOf[js.Any]))
StateDB.foreach(__v => __obj.updateDynamic("StateDB")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Node]
}
}
/** Configuration properties of a node.
*/
@js.native
trait NodeConfiguration extends js.Object {
var InstanceType: InstanceTypeString
var AvailabilityZone: js.UndefOr[AvailabilityZoneString]
var LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration]
var StateDB: js.UndefOr[StateDBType]
}
object NodeConfiguration {
@inline
def apply(
InstanceType: InstanceTypeString,
AvailabilityZone: js.UndefOr[AvailabilityZoneString] = js.undefined,
LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration] = js.undefined,
StateDB: js.UndefOr[StateDBType] = js.undefined
): NodeConfiguration = {
val __obj = js.Dynamic.literal(
"InstanceType" -> InstanceType.asInstanceOf[js.Any]
)
AvailabilityZone.foreach(__v => __obj.updateDynamic("AvailabilityZone")(__v.asInstanceOf[js.Any]))
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
StateDB.foreach(__v => __obj.updateDynamic("StateDB")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeConfiguration]
}
}
/** Attributes of an Ethereum node.
*/
@js.native
trait NodeEthereumAttributes extends js.Object {
var HttpEndpoint: js.UndefOr[String]
var WebSocketEndpoint: js.UndefOr[String]
}
object NodeEthereumAttributes {
@inline
def apply(
HttpEndpoint: js.UndefOr[String] = js.undefined,
WebSocketEndpoint: js.UndefOr[String] = js.undefined
): NodeEthereumAttributes = {
val __obj = js.Dynamic.literal()
HttpEndpoint.foreach(__v => __obj.updateDynamic("HttpEndpoint")(__v.asInstanceOf[js.Any]))
WebSocketEndpoint.foreach(__v => __obj.updateDynamic("WebSocketEndpoint")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeEthereumAttributes]
}
}
/** Attributes of Hyperledger Fabric for a peer node on a Hyperledger Fabric network on Managed Blockchain.
*/
@js.native
trait NodeFabricAttributes extends js.Object {
var PeerEndpoint: js.UndefOr[String]
var PeerEventEndpoint: js.UndefOr[String]
}
object NodeFabricAttributes {
@inline
def apply(
PeerEndpoint: js.UndefOr[String] = js.undefined,
PeerEventEndpoint: js.UndefOr[String] = js.undefined
): NodeFabricAttributes = {
val __obj = js.Dynamic.literal()
PeerEndpoint.foreach(__v => __obj.updateDynamic("PeerEndpoint")(__v.asInstanceOf[js.Any]))
PeerEventEndpoint.foreach(__v => __obj.updateDynamic("PeerEventEndpoint")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeFabricAttributes]
}
}
/** Configuration properties for logging events associated with a peer node owned by a member in a Managed Blockchain network.
*/
@js.native
trait NodeFabricLogPublishingConfiguration extends js.Object {
var ChaincodeLogs: js.UndefOr[LogConfigurations]
var PeerLogs: js.UndefOr[LogConfigurations]
}
object NodeFabricLogPublishingConfiguration {
@inline
def apply(
ChaincodeLogs: js.UndefOr[LogConfigurations] = js.undefined,
PeerLogs: js.UndefOr[LogConfigurations] = js.undefined
): NodeFabricLogPublishingConfiguration = {
val __obj = js.Dynamic.literal()
ChaincodeLogs.foreach(__v => __obj.updateDynamic("ChaincodeLogs")(__v.asInstanceOf[js.Any]))
PeerLogs.foreach(__v => __obj.updateDynamic("PeerLogs")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeFabricLogPublishingConfiguration]
}
}
/** Attributes relevant to a node on a Managed Blockchain network for the blockchain framework that the network uses.
*/
@js.native
trait NodeFrameworkAttributes extends js.Object {
var Ethereum: js.UndefOr[NodeEthereumAttributes]
var Fabric: js.UndefOr[NodeFabricAttributes]
}
object NodeFrameworkAttributes {
@inline
def apply(
Ethereum: js.UndefOr[NodeEthereumAttributes] = js.undefined,
Fabric: js.UndefOr[NodeFabricAttributes] = js.undefined
): NodeFrameworkAttributes = {
val __obj = js.Dynamic.literal()
Ethereum.foreach(__v => __obj.updateDynamic("Ethereum")(__v.asInstanceOf[js.Any]))
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeFrameworkAttributes]
}
}
/** Configuration properties for logging events associated with a peer node on a Hyperledger Fabric network on Managed Blockchain.
*/
@js.native
trait NodeLogPublishingConfiguration extends js.Object {
var Fabric: js.UndefOr[NodeFabricLogPublishingConfiguration]
}
object NodeLogPublishingConfiguration {
@inline
def apply(
Fabric: js.UndefOr[NodeFabricLogPublishingConfiguration] = js.undefined
): NodeLogPublishingConfiguration = {
val __obj = js.Dynamic.literal()
Fabric.foreach(__v => __obj.updateDynamic("Fabric")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeLogPublishingConfiguration]
}
}
@js.native
sealed trait NodeStatus extends js.Any
object NodeStatus {
val CREATING = "CREATING".asInstanceOf[NodeStatus]
val AVAILABLE = "AVAILABLE".asInstanceOf[NodeStatus]
val UNHEALTHY = "UNHEALTHY".asInstanceOf[NodeStatus]
val CREATE_FAILED = "CREATE_FAILED".asInstanceOf[NodeStatus]
val UPDATING = "UPDATING".asInstanceOf[NodeStatus]
val DELETING = "DELETING".asInstanceOf[NodeStatus]
val DELETED = "DELETED".asInstanceOf[NodeStatus]
val FAILED = "FAILED".asInstanceOf[NodeStatus]
@inline def values = js.Array(CREATING, AVAILABLE, UNHEALTHY, CREATE_FAILED, UPDATING, DELETING, DELETED, FAILED)
}
/** A summary of configuration properties for a node.
*/
@js.native
trait NodeSummary extends js.Object {
var Arn: js.UndefOr[ArnString]
var AvailabilityZone: js.UndefOr[AvailabilityZoneString]
var CreationDate: js.UndefOr[Timestamp]
var Id: js.UndefOr[ResourceIdString]
var InstanceType: js.UndefOr[InstanceTypeString]
var Status: js.UndefOr[NodeStatus]
}
object NodeSummary {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
AvailabilityZone: js.UndefOr[AvailabilityZoneString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Id: js.UndefOr[ResourceIdString] = js.undefined,
InstanceType: js.UndefOr[InstanceTypeString] = js.undefined,
Status: js.UndefOr[NodeStatus] = js.undefined
): NodeSummary = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
AvailabilityZone.foreach(__v => __obj.updateDynamic("AvailabilityZone")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Id.foreach(__v => __obj.updateDynamic("Id")(__v.asInstanceOf[js.Any]))
InstanceType.foreach(__v => __obj.updateDynamic("InstanceType")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[NodeSummary]
}
}
/** Properties of a proposal on a Managed Blockchain network. Applies only to Hyperledger Fabric.
*/
@js.native
trait Proposal extends js.Object {
var Actions: js.UndefOr[ProposalActions]
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var ExpirationDate: js.UndefOr[Timestamp]
var NetworkId: js.UndefOr[ResourceIdString]
var NoVoteCount: js.UndefOr[VoteCount]
var OutstandingVoteCount: js.UndefOr[VoteCount]
var ProposalId: js.UndefOr[ResourceIdString]
var ProposedByMemberId: js.UndefOr[ResourceIdString]
var ProposedByMemberName: js.UndefOr[NetworkMemberNameString]
var Status: js.UndefOr[ProposalStatus]
var Tags: js.UndefOr[OutputTagMap]
var YesVoteCount: js.UndefOr[VoteCount]
}
object Proposal {
@inline
def apply(
Actions: js.UndefOr[ProposalActions] = js.undefined,
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
ExpirationDate: js.UndefOr[Timestamp] = js.undefined,
NetworkId: js.UndefOr[ResourceIdString] = js.undefined,
NoVoteCount: js.UndefOr[VoteCount] = js.undefined,
OutstandingVoteCount: js.UndefOr[VoteCount] = js.undefined,
ProposalId: js.UndefOr[ResourceIdString] = js.undefined,
ProposedByMemberId: js.UndefOr[ResourceIdString] = js.undefined,
ProposedByMemberName: js.UndefOr[NetworkMemberNameString] = js.undefined,
Status: js.UndefOr[ProposalStatus] = js.undefined,
Tags: js.UndefOr[OutputTagMap] = js.undefined,
YesVoteCount: js.UndefOr[VoteCount] = js.undefined
): Proposal = {
val __obj = js.Dynamic.literal()
Actions.foreach(__v => __obj.updateDynamic("Actions")(__v.asInstanceOf[js.Any]))
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
ExpirationDate.foreach(__v => __obj.updateDynamic("ExpirationDate")(__v.asInstanceOf[js.Any]))
NetworkId.foreach(__v => __obj.updateDynamic("NetworkId")(__v.asInstanceOf[js.Any]))
NoVoteCount.foreach(__v => __obj.updateDynamic("NoVoteCount")(__v.asInstanceOf[js.Any]))
OutstandingVoteCount.foreach(__v => __obj.updateDynamic("OutstandingVoteCount")(__v.asInstanceOf[js.Any]))
ProposalId.foreach(__v => __obj.updateDynamic("ProposalId")(__v.asInstanceOf[js.Any]))
ProposedByMemberId.foreach(__v => __obj.updateDynamic("ProposedByMemberId")(__v.asInstanceOf[js.Any]))
ProposedByMemberName.foreach(__v => __obj.updateDynamic("ProposedByMemberName")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
Tags.foreach(__v => __obj.updateDynamic("Tags")(__v.asInstanceOf[js.Any]))
YesVoteCount.foreach(__v => __obj.updateDynamic("YesVoteCount")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[Proposal]
}
}
/** The actions to carry out if a proposal is <code>APPROVED</code>. Applies only to Hyperledger Fabric.
*/
@js.native
trait ProposalActions extends js.Object {
var Invitations: js.UndefOr[InviteActionList]
var Removals: js.UndefOr[RemoveActionList]
}
object ProposalActions {
@inline
def apply(
Invitations: js.UndefOr[InviteActionList] = js.undefined,
Removals: js.UndefOr[RemoveActionList] = js.undefined
): ProposalActions = {
val __obj = js.Dynamic.literal()
Invitations.foreach(__v => __obj.updateDynamic("Invitations")(__v.asInstanceOf[js.Any]))
Removals.foreach(__v => __obj.updateDynamic("Removals")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ProposalActions]
}
}
@js.native
sealed trait ProposalStatus extends js.Any
object ProposalStatus {
val IN_PROGRESS = "IN_PROGRESS".asInstanceOf[ProposalStatus]
val APPROVED = "APPROVED".asInstanceOf[ProposalStatus]
val REJECTED = "REJECTED".asInstanceOf[ProposalStatus]
val EXPIRED = "EXPIRED".asInstanceOf[ProposalStatus]
val ACTION_FAILED = "ACTION_FAILED".asInstanceOf[ProposalStatus]
@inline def values = js.Array(IN_PROGRESS, APPROVED, REJECTED, EXPIRED, ACTION_FAILED)
}
/** Properties of a proposal. Applies only to Hyperledger Fabric.
*/
@js.native
trait ProposalSummary extends js.Object {
var Arn: js.UndefOr[ArnString]
var CreationDate: js.UndefOr[Timestamp]
var Description: js.UndefOr[DescriptionString]
var ExpirationDate: js.UndefOr[Timestamp]
var ProposalId: js.UndefOr[ResourceIdString]
var ProposedByMemberId: js.UndefOr[ResourceIdString]
var ProposedByMemberName: js.UndefOr[NetworkMemberNameString]
var Status: js.UndefOr[ProposalStatus]
}
object ProposalSummary {
@inline
def apply(
Arn: js.UndefOr[ArnString] = js.undefined,
CreationDate: js.UndefOr[Timestamp] = js.undefined,
Description: js.UndefOr[DescriptionString] = js.undefined,
ExpirationDate: js.UndefOr[Timestamp] = js.undefined,
ProposalId: js.UndefOr[ResourceIdString] = js.undefined,
ProposedByMemberId: js.UndefOr[ResourceIdString] = js.undefined,
ProposedByMemberName: js.UndefOr[NetworkMemberNameString] = js.undefined,
Status: js.UndefOr[ProposalStatus] = js.undefined
): ProposalSummary = {
val __obj = js.Dynamic.literal()
Arn.foreach(__v => __obj.updateDynamic("Arn")(__v.asInstanceOf[js.Any]))
CreationDate.foreach(__v => __obj.updateDynamic("CreationDate")(__v.asInstanceOf[js.Any]))
Description.foreach(__v => __obj.updateDynamic("Description")(__v.asInstanceOf[js.Any]))
ExpirationDate.foreach(__v => __obj.updateDynamic("ExpirationDate")(__v.asInstanceOf[js.Any]))
ProposalId.foreach(__v => __obj.updateDynamic("ProposalId")(__v.asInstanceOf[js.Any]))
ProposedByMemberId.foreach(__v => __obj.updateDynamic("ProposedByMemberId")(__v.asInstanceOf[js.Any]))
ProposedByMemberName.foreach(__v => __obj.updateDynamic("ProposedByMemberName")(__v.asInstanceOf[js.Any]))
Status.foreach(__v => __obj.updateDynamic("Status")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[ProposalSummary]
}
}
@js.native
trait RejectInvitationInput extends js.Object {
var InvitationId: ResourceIdString
}
object RejectInvitationInput {
@inline
def apply(
InvitationId: ResourceIdString
): RejectInvitationInput = {
val __obj = js.Dynamic.literal(
"InvitationId" -> InvitationId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[RejectInvitationInput]
}
}
@js.native
trait RejectInvitationOutput extends js.Object
object RejectInvitationOutput {
@inline
def apply(): RejectInvitationOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[RejectInvitationOutput]
}
}
/** An action to remove a member from a Managed Blockchain network as the result of a removal proposal that is <code>APPROVED</code>. The member and all associated resources are deleted from the network. Applies only to Hyperledger Fabric.
*/
@js.native
trait RemoveAction extends js.Object {
var MemberId: ResourceIdString
}
object RemoveAction {
@inline
def apply(
MemberId: ResourceIdString
): RemoveAction = {
val __obj = js.Dynamic.literal(
"MemberId" -> MemberId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[RemoveAction]
}
}
@js.native
sealed trait StateDBType extends js.Any
object StateDBType {
val LevelDB = "LevelDB".asInstanceOf[StateDBType]
val CouchDB = "CouchDB".asInstanceOf[StateDBType]
@inline def values = js.Array(LevelDB, CouchDB)
}
@js.native
trait TagResourceRequest extends js.Object {
var ResourceArn: ArnString
var Tags: InputTagMap
}
object TagResourceRequest {
@inline
def apply(
ResourceArn: ArnString,
Tags: InputTagMap
): TagResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any],
"Tags" -> Tags.asInstanceOf[js.Any]
)
__obj.asInstanceOf[TagResourceRequest]
}
}
@js.native
trait TagResourceResponse extends js.Object
object TagResourceResponse {
@inline
def apply(): TagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[TagResourceResponse]
}
}
@js.native
sealed trait ThresholdComparator extends js.Any
object ThresholdComparator {
val GREATER_THAN = "GREATER_THAN".asInstanceOf[ThresholdComparator]
val GREATER_THAN_OR_EQUAL_TO = "GREATER_THAN_OR_EQUAL_TO".asInstanceOf[ThresholdComparator]
@inline def values = js.Array(GREATER_THAN, GREATER_THAN_OR_EQUAL_TO)
}
@js.native
trait UntagResourceRequest extends js.Object {
var ResourceArn: ArnString
var TagKeys: TagKeyList
}
object UntagResourceRequest {
@inline
def apply(
ResourceArn: ArnString,
TagKeys: TagKeyList
): UntagResourceRequest = {
val __obj = js.Dynamic.literal(
"ResourceArn" -> ResourceArn.asInstanceOf[js.Any],
"TagKeys" -> TagKeys.asInstanceOf[js.Any]
)
__obj.asInstanceOf[UntagResourceRequest]
}
}
@js.native
trait UntagResourceResponse extends js.Object
object UntagResourceResponse {
@inline
def apply(): UntagResourceResponse = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UntagResourceResponse]
}
}
@js.native
trait UpdateMemberInput extends js.Object {
var MemberId: ResourceIdString
var NetworkId: ResourceIdString
var LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration]
}
object UpdateMemberInput {
@inline
def apply(
MemberId: ResourceIdString,
NetworkId: ResourceIdString,
LogPublishingConfiguration: js.UndefOr[MemberLogPublishingConfiguration] = js.undefined
): UpdateMemberInput = {
val __obj = js.Dynamic.literal(
"MemberId" -> MemberId.asInstanceOf[js.Any],
"NetworkId" -> NetworkId.asInstanceOf[js.Any]
)
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateMemberInput]
}
}
@js.native
trait UpdateMemberOutput extends js.Object
object UpdateMemberOutput {
@inline
def apply(): UpdateMemberOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UpdateMemberOutput]
}
}
@js.native
trait UpdateNodeInput extends js.Object {
var NetworkId: ResourceIdString
var NodeId: ResourceIdString
var LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration]
var MemberId: js.UndefOr[ResourceIdString]
}
object UpdateNodeInput {
@inline
def apply(
NetworkId: ResourceIdString,
NodeId: ResourceIdString,
LogPublishingConfiguration: js.UndefOr[NodeLogPublishingConfiguration] = js.undefined,
MemberId: js.UndefOr[ResourceIdString] = js.undefined
): UpdateNodeInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"NodeId" -> NodeId.asInstanceOf[js.Any]
)
LogPublishingConfiguration.foreach(__v => __obj.updateDynamic("LogPublishingConfiguration")(__v.asInstanceOf[js.Any]))
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[UpdateNodeInput]
}
}
@js.native
trait UpdateNodeOutput extends js.Object
object UpdateNodeOutput {
@inline
def apply(): UpdateNodeOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[UpdateNodeOutput]
}
}
@js.native
trait VoteOnProposalInput extends js.Object {
var NetworkId: ResourceIdString
var ProposalId: ResourceIdString
var Vote: VoteValue
var VoterMemberId: ResourceIdString
}
object VoteOnProposalInput {
@inline
def apply(
NetworkId: ResourceIdString,
ProposalId: ResourceIdString,
Vote: VoteValue,
VoterMemberId: ResourceIdString
): VoteOnProposalInput = {
val __obj = js.Dynamic.literal(
"NetworkId" -> NetworkId.asInstanceOf[js.Any],
"ProposalId" -> ProposalId.asInstanceOf[js.Any],
"Vote" -> Vote.asInstanceOf[js.Any],
"VoterMemberId" -> VoterMemberId.asInstanceOf[js.Any]
)
__obj.asInstanceOf[VoteOnProposalInput]
}
}
@js.native
trait VoteOnProposalOutput extends js.Object
object VoteOnProposalOutput {
@inline
def apply(): VoteOnProposalOutput = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[VoteOnProposalOutput]
}
}
/** Properties of an individual vote that a member cast for a proposal. Applies only to Hyperledger Fabric.
*/
@js.native
trait VoteSummary extends js.Object {
var MemberId: js.UndefOr[ResourceIdString]
var MemberName: js.UndefOr[NetworkMemberNameString]
var Vote: js.UndefOr[VoteValue]
}
object VoteSummary {
@inline
def apply(
MemberId: js.UndefOr[ResourceIdString] = js.undefined,
MemberName: js.UndefOr[NetworkMemberNameString] = js.undefined,
Vote: js.UndefOr[VoteValue] = js.undefined
): VoteSummary = {
val __obj = js.Dynamic.literal()
MemberId.foreach(__v => __obj.updateDynamic("MemberId")(__v.asInstanceOf[js.Any]))
MemberName.foreach(__v => __obj.updateDynamic("MemberName")(__v.asInstanceOf[js.Any]))
Vote.foreach(__v => __obj.updateDynamic("Vote")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[VoteSummary]
}
}
@js.native
sealed trait VoteValue extends js.Any
object VoteValue {
val YES = "YES".asInstanceOf[VoteValue]
val NO = "NO".asInstanceOf[VoteValue]
@inline def values = js.Array(YES, NO)
}
/** The voting rules for the network to decide if a proposal is accepted Applies only to Hyperledger Fabric.
*/
@js.native
trait VotingPolicy extends js.Object {
var ApprovalThresholdPolicy: js.UndefOr[ApprovalThresholdPolicy]
}
object VotingPolicy {
@inline
def apply(
ApprovalThresholdPolicy: js.UndefOr[ApprovalThresholdPolicy] = js.undefined
): VotingPolicy = {
val __obj = js.Dynamic.literal()
ApprovalThresholdPolicy.foreach(__v => __obj.updateDynamic("ApprovalThresholdPolicy")(__v.asInstanceOf[js.Any]))
__obj.asInstanceOf[VotingPolicy]
}
}
}
|
import React, { Component } from 'react';
import { Link } from 'react-router-dom';
import styles from './index.module.less';
export default class index extends Component {
constructor() {
super();
this.state = {};
}
render() {
const { username } = this.props;
return (
<div className={styles.header}>
<div className={styles.inner}>
<Link to="/"> 首页 </Link>
<div className={styles.right}>
{username ? <Link to="/"> 你好!{username || '-'} </Link> : <Link to="/login"> 登录 </Link>}
{username && <Link to="/login"> 退出登录 </Link>}
</div>
</div>
</div>
);
}
}
|
#!/bin/bash
# Copyright (c) 2018-2020, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
REPO_VERSION=${NVIDIA_TRITON_SERVER_VERSION}
if [ "$#" -ge 1 ]; then
REPO_VERSION=$1
fi
if [ -z "$REPO_VERSION" ]; then
echo -e "Repository version must be specified"
echo -e "\n***\n*** Test Failed\n***"
exit 1
fi
export CUDA_VISIBLE_DEVICES=0
TEST_RESULT_FILE='test_results.txt'
CLIENT_LOG="./client.log"
CLIENT_TEST=client_test.py
EXPECTED_NUM_TESTS="4"
DATADIR=/data/inferenceserver/${REPO_VERSION}
SERVER=/opt/tritonserver/bin/tritonserver
SERVER_ARGS="--model-repository=$DATADIR/qa_model_repository"
SERVER_LOG="./inference_server.log"
source ../common/util.sh
run_server
if (( $SERVER_PID == 0 )); then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
RET=0
set +e
# python unittest seems to swallow ImportError and still return 0 exit
# code. So need to explicitly check CLIENT_LOG to make sure we see
# some running tests
rm -f $CLIENT_LOG
python $CLIENT_TEST >$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Failed\n***"
RET=1
else
check_test_results $TEST_RESULT_FILE $EXPECTED_NUM_TESTS
if [ $? -ne 0 ]; then
cat $CLIENT_LOG
echo -e "\n***\n*** Test Result Verification Failed\n***"
RET=1
fi
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
if [ $RET -eq 0 ]; then
echo -e "\n***\n*** Test Passed\n***"
fi
exit $RET
|
import csv
import argparse
from multiprocessing import Pool
def process_course_data(course):
course_code, course_title, enrollment_count = course
return course_code, course_title, int(enrollment_count)
def identify_top_classes(course_data, top_n):
processed_data = [process_course_data(course) for course in course_data]
sorted_data = sorted(processed_data, key=lambda x: x[2], reverse=True)
return sorted_data[:top_n]
def main():
parser = argparse.ArgumentParser(description='Identify top N most popular classes at Yale University')
parser.add_argument('csv_file', help='Path to the CSV file containing course data')
parser.add_argument('top_n', type=int, help='Number of top classes to identify')
args = parser.parse_args()
with open(args.csv_file, 'r') as file:
reader = csv.reader(file)
next(reader) # Skip header row
course_data = [row for row in reader]
with Pool() as pool:
top_classes = pool.apply(identify_top_classes, (course_data, args.top_n))
for course in top_classes:
print(f"Course Code: {course[0]}, Title: {course[1]}, Enrollment Count: {course[2]}")
if __name__ == '__main__':
main() |
function setToArray(s) {
return Array.from(s.values());
}
const mySet = new Set(['foo', 'bar', 'baz']);
const a = setToArray(mySet);
console.log(a); // prints: [ 'foo', 'bar', 'baz' ] |
/*****************************************************************************
*
* PROJECT: Multi Theft Auto v1.0
* LICENSE: See LICENSE in the top level directory
* FILE: SharedUtil.ClassIdent.h
* PURPOSE:
*
* Multi Theft Auto is available from http://www.multitheftauto.com/
*
*****************************************************************************/
namespace SharedUtil
{
// Macros to implement quick class identification
// uint64 allows for a total of 64 classes
typedef uint64 ClassBits;
typedef uchar ClassId;
#define DECLARE_BASE_CLASS(cls) \
public: \
static ClassId GetClassId ( void ) \
{ \
return CLASS_##cls; \
} \
bool IsA( ClassId classId ) const \
{ \
return ( ClassHierarchyBits & ( 1ULL << classId ) ) ? true : false; \
} \
const char* GetClassName ( void ) \
{ \
return ClassName; \
} \
protected: \
static const char* StaticGetClassName ( void ) \
{ \
return #cls; \
} \
static ClassBits CalcClassHierarchyBits ( void ) \
{ \
return ( 1ULL << GetClassId () ); \
} \
const char* ClassName; \
ClassBits ClassHierarchyBits; \
friend CAutoClassInit < cls >; \
CAutoClassInit < cls > ClassInit; \
public: \
void* operator new ( size_t size ) { void* ptr = ::operator new(size); memset(ptr,0,size); return ptr; } \
void* operator new ( size_t size, void* where ) { memset(where,0,size); return where; }
#define DECLARE_CLASS(cls,super) \
public: \
static ClassId GetClassId ( void ) \
{ \
return CLASS_##cls; \
} \
protected: \
static const char* StaticGetClassName ( void ) \
{ \
return #cls; \
} \
static ClassBits CalcClassHierarchyBits ( void ) \
{ \
return ( 1ULL << GetClassId () ) | super::CalcClassHierarchyBits (); \
} \
friend CAutoClassInit < cls >; \
CAutoClassInit < cls > ClassInit; \
public: \
typedef super Super; \
void* operator new ( size_t size ) { void* ptr = ::operator new(size); memset(ptr,0,size); return ptr; } \
void* operator new ( size_t size, void* where ) { memset(where,0,size); return where; }
//
// Auto init the class bit flags
//
template <class T>
class CAutoClassInit
{
public:
CAutoClassInit(T* ptr)
{
assert(ptr->GetClassId() < sizeof(ClassBits) * 8);
ptr->ClassHierarchyBits = ptr->CalcClassHierarchyBits();
ptr->ClassName = ptr->StaticGetClassName();
}
};
//
// Dynamic cast to derived class
//
template <class T, class U>
T* DynamicCast(U* ptr)
{
if (ptr && ptr->IsA(T::GetClassId()))
return static_cast<T*>(ptr);
return NULL;
}
#ifdef WIN32
#pragma warning( disable : 4355 ) // warning C4355: 'this' : used in base member initializer list
#endif
} // namespace SharedUtil
|
<gh_stars>10-100
package gldap
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestEntry_GetAttributes(t *testing.T) {
tests := []struct {
name string
entry *Entry
attr string
want []string
}{
{
name: "empty",
entry: &Entry{
Attributes: []*EntryAttribute{},
},
want: []string{},
},
{
name: "found",
entry: &Entry{
Attributes: []*EntryAttribute{
NewEntryAttribute("found", []string{"value1", "value2"}),
},
},
attr: "found",
want: []string{"value1", "value2"},
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
assert := assert.New(t)
got := tc.entry.GetAttributeValues(tc.attr)
assert.Equal(tc.want, got)
})
}
}
func TestEntryAttribute_AddValue(t *testing.T) {
tests := []struct {
name string
attr *EntryAttribute
values []string
want *EntryAttribute
}{
{
name: "simple",
attr: NewEntryAttribute("simple", []string{"v1"}),
values: []string{"v2", "v3"},
want: NewEntryAttribute("simple", []string{"v1", "v2", "v3"}),
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
assert := assert.New(t)
tc.attr.AddValue(tc.values...)
assert.Equal(tc.want, tc.attr)
})
}
}
func TestEntry_PrettyPrint(t *testing.T) {
tests := []struct {
name string
entry *Entry
writer *strings.Builder
want string
}{
{
name: "with-writer",
entry: &Entry{
DN: "uid=alice",
Attributes: []*EntryAttribute{
NewEntryAttribute("cn", []string{"alice"}),
},
},
writer: new(strings.Builder),
want: " DN: uid=alice\n cn: [alice]\n",
},
{
name: "stdout",
entry: &Entry{
DN: "uid=alice",
Attributes: []*EntryAttribute{
NewEntryAttribute("cn", []string{"alice"}),
},
},
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
assert := assert.New(t)
tc.entry.PrettyPrint(1, WithWriter(tc.writer))
if !isNil(tc.writer) {
assert.Equal(tc.want, tc.writer.String())
}
})
}
}
|
#!/usr/bin/env sh
# This scripts downloads the pre-trained models.
DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd $DIR
echo "Downloading pre-trained models..."
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/tag_list.json.gz
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec_tag.prototxt
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec.prototxt
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec_tag_ver200.caffemodel
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec_ver200.caffemodel
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec_tag_ver200.onnx
wget https://github.com/kivantium/illustration2vec/releases/download/v2.1.0/illust2vec_ver200.onnx
gunzip tag_list.json.gz
echo "Done."
|
<filename>packages/reader/src/assert.ts
export interface AssertMessage {
message?: string;
}
export interface AssertIntMinMax extends AssertMessage {
min?: number;
max?: number;
}
export interface AssertIntValues extends AssertMessage {
values: number[];
}
export function assertInt(value: number, options: AssertIntValues): void;
export function assertInt(value: number, options?: AssertIntMinMax): void;
export function assertInt(
value: number,
{
min = -Infinity,
max = Infinity,
values = [],
message = values.length ? `expected integer in [${values.join(', ')}]` : `expected integer in range [${min}, ${max}]`,
}: Partial<AssertIntMinMax & AssertIntValues> = {},
): void {
const check = (() => {
if (!Number.isInteger(value)) {
return false;
}
if (values.length) {
return values.includes(value);
}
return value >= min && value <= max;
})();
if (!check) {
throw new TypeError(message);
}
}
|
#!/bin/bash
if [ -z $1 ];
then
echo "How to use: $0 network_directory/"
exit
fi
backend=""
if [ -z $2 ];
then
echo "No backend provided. CPU will be used."
backend="cpu"
else
echo "Backend $2 has been provided."
backend=$2
fi
gdb --args skepu_ann $1/solver.prototxt $backend
|
<gh_stars>0
/*
* Project: FullereneViewer
* Version: 1.0
* Copyright: (C) 2011-14 Dr.Sc.KAWAMOTO,Takuji (Ext)
*/
#ifndef __HOST_H__
#define __HOST_H__
#include <stdio.h>
#include "Object.h"
#include "MyString.h"
#include "List.h"
#include "ObjectInt2.h"
class Host : public Object {
// friend classes & functions
// members
private:
MyString p_host_name;
int p_number_of_processes;
int p_number_of_enabled_processes;
List<ObjectInt2> p_enable_waiters;
// private tools
private:
void p_search_and_call_enable_waiters(int num);
// constructors & the destructor
public:
Host(const char* host_name, int number_of_processes);
~Host();
// comparators
public:
int compare(const Host* you) const;
// I/O
public:
void print(FILE* output) const;
// member accessing methods
public:
const char* get_host_name() const { return p_host_name; }
int number_of_processes() const { return p_number_of_processes; }
int number_of_enabled_processes() const { return p_number_of_enabled_processes; }
bool enable_processes(int num);
void enter_enabled_waiter(int num, int pid);
};
#endif /* __HOST_H__ */
/* Local Variables: */
/* mode: c++ */
/* End: */
|
"use strict";
exports.__esModule = true;
exports.default = exports.caretSet = void 0;
var _react = _interopRequireDefault(require("react"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _reactDom = require("react-dom");
var _Input = _interopRequireDefault(require("./Input"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; }
var caretSet = function caretSet(node, start, end) {
if (end === void 0) {
end = start;
}
try {
node.setSelectionRange(start, end);
} catch (e) {
/* not focused or not visible */
}
};
exports.caretSet = caretSet;
var ComboboxInput =
/*#__PURE__*/
function (_React$Component) {
_inheritsLoose(ComboboxInput, _React$Component);
function ComboboxInput() {
var _this;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _React$Component.call.apply(_React$Component, [this].concat(args)) || this;
_this.handleChange = function (e) {
var _this$props = _this.props,
placeholder = _this$props.placeholder,
value = _this$props.value,
onChange = _this$props.onChange;
var stringValue = e.target.value;
var hasPlaceholder = !!placeholder; // IE fires input events when setting/unsetting placeholders.
// issue #112
if (hasPlaceholder && !stringValue && stringValue === (value || '')) return;
_this._last = stringValue;
onChange(e, stringValue);
};
return _this;
}
var _proto = ComboboxInput.prototype;
_proto.componentDidUpdate = function componentDidUpdate() {
var input = (0, _reactDom.findDOMNode)(this);
var val = this.props.value;
if (this.isSuggesting()) {
var start = val.toLowerCase().indexOf(this._last.toLowerCase()) + this._last.length;
var end = val.length - start;
if (start >= 0 && end !== 0) {
caretSet(input, start, start + end);
}
}
};
_proto.accept = function accept(clearSelection) {
if (clearSelection === void 0) {
clearSelection = false;
}
this._last = null;
if (clearSelection) {
var node = (0, _reactDom.findDOMNode)(this);
caretSet(node, node.value.length);
}
};
_proto.focus = function focus() {
(0, _reactDom.findDOMNode)(this).focus();
};
_proto.isSuggesting = function isSuggesting() {
var _this$props2 = this.props,
value = _this$props2.value,
suggest = _this$props2.suggest;
if (!suggest) return false;
return this._last != null && value.toLowerCase().indexOf(this._last.toLowerCase()) !== -1;
};
_proto.render = function render() {
var _this$props3 = this.props,
onKeyDown = _this$props3.onKeyDown,
props = _objectWithoutProperties(_this$props3, ["onKeyDown"]);
delete props.suggest;
return _react.default.createElement(_Input.default, _extends({}, props, {
className: "rw-widget-input",
onKeyDown: onKeyDown,
onChange: this.handleChange
}));
};
return ComboboxInput;
}(_react.default.Component);
ComboboxInput.defaultProps = {
value: ''
};
ComboboxInput.propTypes = {
value: _propTypes.default.string,
placeholder: _propTypes.default.string,
suggest: _propTypes.default.bool,
onChange: _propTypes.default.func.isRequired,
onKeyDown: _propTypes.default.func
};
var _default = ComboboxInput;
exports.default = _default; |
<reponame>DoubleGremlin181/RubiksCubeGym<gh_stars>10-100
from gym.envs.registration import register
register(
id='rubiks-cube-222-v0',
entry_point='rubiks_cube_gym.envs:RubiksCube222Env',
max_episode_steps=250,
)
register(
id='rubiks-cube-222-lbl-v0',
entry_point='rubiks_cube_gym.envs:RubiksCube222EnvLBL',
max_episode_steps=250,
)
register(
id='rubiks-cube-222-ortega-v0',
entry_point='rubiks_cube_gym.envs:RubiksCube222EnvOrtega',
max_episode_steps=250,
)
register(
id='pyraminx-wo-tips-v0',
entry_point='rubiks_cube_gym.envs:PyraminxWoTipsEnv',
max_episode_steps=250,
)
register(
id='pyraminx-wo-tips-lbl-v1',
entry_point='rubiks_cube_gym.envs:PyraminxWoTipsEnvLBL',
max_episode_steps=250,
)
register(
id='skewb-v0',
entry_point='rubiks_cube_gym.envs:SkewbEnv',
max_episode_steps=250,
)
register(
id='skewb-sarah-v1',
entry_point='rubiks_cube_gym.envs:SkewbEnvSarah',
max_episode_steps=250,
) |
package mcjty.incontrol.rules;
import com.google.gson.JsonElement;
import mcjty.incontrol.InControl;
import mcjty.incontrol.compat.ModRuleCompatibilityLayer;
import mcjty.incontrol.rules.support.GenericRuleEvaluator;
import mcjty.tools.rules.IEventQuery;
import mcjty.tools.rules.IModRuleCompatibilityLayer;
import mcjty.tools.rules.RuleBase;
import mcjty.tools.typed.Attribute;
import mcjty.tools.typed.AttributeMap;
import mcjty.tools.typed.GenericAttributeMapFactory;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.util.DamageSource;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraftforge.event.entity.living.LivingExperienceDropEvent;
import net.minecraftforge.fml.common.eventhandler.Event;
import static mcjty.incontrol.rules.support.RuleKeys.*;
public class ExperienceRule extends RuleBase<RuleBase.EventGetter> {
public static final IEventQuery<LivingExperienceDropEvent> EVENT_QUERY = new IEventQuery<LivingExperienceDropEvent>() {
@Override
public World getWorld(LivingExperienceDropEvent o) {
return o.getEntity().getEntityWorld();
}
@Override
public BlockPos getPos(LivingExperienceDropEvent o) {
return o.getEntity().getPosition();
}
@Override
public BlockPos getValidBlockPos(LivingExperienceDropEvent o) {
return o.getEntity().getPosition().down();
}
@Override
public int getY(LivingExperienceDropEvent o) {
return o.getEntity().getPosition().getY();
}
@Override
public Entity getEntity(LivingExperienceDropEvent o) {
return o.getEntity();
}
@Override
public DamageSource getSource(LivingExperienceDropEvent o) {
return null;
}
@Override
public Entity getAttacker(LivingExperienceDropEvent o) {
return o.getAttackingPlayer();
}
@Override
public EntityPlayer getPlayer(LivingExperienceDropEvent o) {
return o.getAttackingPlayer();
}
@Override
public ItemStack getItem(LivingExperienceDropEvent o) {
return ItemStack.EMPTY;
}
};
private static final GenericAttributeMapFactory FACTORY = new GenericAttributeMapFactory();
static {
FACTORY
.attribute(Attribute.create(MINTIME))
.attribute(Attribute.create(MAXTIME))
.attribute(Attribute.create(MINLIGHT))
.attribute(Attribute.create(MAXLIGHT))
.attribute(Attribute.create(MINHEIGHT))
.attribute(Attribute.create(MAXHEIGHT))
.attribute(Attribute.create(MINDIFFICULTY))
.attribute(Attribute.create(MAXDIFFICULTY))
.attribute(Attribute.create(MINSPAWNDIST))
.attribute(Attribute.create(MAXSPAWNDIST))
.attribute(Attribute.create(RANDOM))
.attribute(Attribute.create(INBUILDING))
.attribute(Attribute.create(INCITY))
.attribute(Attribute.create(INSTREET))
.attribute(Attribute.create(INSPHERE))
.attribute(Attribute.create(PASSIVE))
.attribute(Attribute.create(HOSTILE))
.attribute(Attribute.create(SEESKY))
.attribute(Attribute.create(WEATHER))
.attribute(Attribute.create(TEMPCATEGORY))
.attribute(Attribute.create(DIFFICULTY))
.attribute(Attribute.create(STRUCTURE))
.attribute(Attribute.create(PLAYER))
.attribute(Attribute.create(REALPLAYER))
.attribute(Attribute.create(FAKEPLAYER))
.attribute(Attribute.create(WINTER))
.attribute(Attribute.create(SUMMER))
.attribute(Attribute.create(SPRING))
.attribute(Attribute.create(AUTUMN))
.attribute(Attribute.create(STATE))
.attribute(Attribute.create(PSTATE))
.attribute(Attribute.createMulti(MOB))
.attribute(Attribute.createMulti(MOD))
.attribute(Attribute.createMulti(BLOCK))
.attribute(Attribute.create(BLOCKOFFSET))
.attribute(Attribute.createMulti(BIOME))
.attribute(Attribute.createMulti(BIOMETYPE))
.attribute(Attribute.createMulti(DIMENSION))
.attribute(Attribute.createMulti(HELDITEM))
.attribute(Attribute.createMulti(PLAYER_HELDITEM))
.attribute(Attribute.createMulti(OFFHANDITEM))
.attribute(Attribute.createMulti(BOTHHANDSITEM))
.attribute(Attribute.create(ACTION_RESULT))
.attribute(Attribute.create(ACTION_SETXP))
.attribute(Attribute.create(ACTION_ADDXP))
.attribute(Attribute.create(ACTION_MULTXP))
;
}
private final GenericRuleEvaluator ruleEvaluator;
private Event.Result result;
private Integer xp = null;
private float multxp = 1.0f;
private float addxp = 0.0f;
private ExperienceRule(AttributeMap map) {
super(InControl.setup.getLogger());
ruleEvaluator = new GenericRuleEvaluator(map);
addActions(map, new ModRuleCompatibilityLayer());
}
public static ExperienceRule parse(JsonElement element) {
if (element == null) {
return null;
} else {
AttributeMap map = FACTORY.parse(element);
return new ExperienceRule(map);
}
}
public int modifyXp(int xpIn) {
if (xp != null) {
xpIn = xp;
}
return (int) (xpIn * multxp + addxp);
}
@Override
protected void addActions(AttributeMap map, IModRuleCompatibilityLayer layer) {
super.addActions(map, layer);
if (map.has(ACTION_RESULT)) {
String br = map.get(ACTION_RESULT);
if ("default".equals(br) || br.startsWith("def")) {
this.result = Event.Result.DEFAULT;
} else if ("allow".equals(br) || "true".equals(br)) {
this.result = Event.Result.ALLOW;
} else {
this.result = Event.Result.DENY;
}
} else {
this.result = Event.Result.DEFAULT;
}
if (map.has(ACTION_SETXP)) {
xp = map.get(ACTION_SETXP);
}
if (map.has(ACTION_ADDXP)) {
addxp = map.get(ACTION_ADDXP);
}
if (map.has(ACTION_MULTXP)) {
multxp = map.get(ACTION_MULTXP);
}
}
public boolean match(LivingExperienceDropEvent event) {
return ruleEvaluator.match(event, EVENT_QUERY);
}
public Event.Result getResult() {
return result;
}
}
|
<reponame>ChrisLMerrill/museide
package org.museautomation.ui.valuesource.parser;
import org.junit.jupiter.api.*;
import org.museautomation.parsing.valuesource.*;
import org.museautomation.builtins.value.*;
import org.museautomation.builtins.value.property.*;
import org.museautomation.core.*;
import org.museautomation.core.project.*;
import org.museautomation.core.values.*;
/**
* These tests are intended to verify that the parser (muse-parser) is correctly lexing and parsing
* expected input and finding/using the built-in ValueSourceStringExpressionSupport implementations.
*
* These are NOT intended to test whether the ValueSourceStringExpressionSupport implementations are
* doing the right thing in all cases - those tests should directly access the implementations wherever
* they reside (e.g. in the core Muse project). Instead, these test should just ensure the correct
* value source is created based on the parse result.
*
* @author <NAME> (see LICENSE.txt for license details)
*/
class BinaryOperatorParsingTests
{
@Test
void parsePropertySource() throws ExpressionParsingException
{
String target_value = "target";
String property_value = "property";
ValueSourceConfiguration configuration = new ValueSourceExpressionParser(_project).parse(String.format("\"%s\".\"%s\"", target_value, property_value));
Assertions.assertEquals(PropertySource.TYPE_ID, configuration.getType());
ValueSourceConfiguration target_source = configuration.getSource(PropertySource.TARGET_PARAM);
Assertions.assertEquals(StringValueSource.TYPE_ID, target_source.getType());
Assertions.assertEquals(target_value, target_source.getValue());
ValueSourceConfiguration property_source = configuration.getSource(PropertySource.NAME_PARAM);
Assertions.assertEquals(StringValueSource.TYPE_ID, property_source.getType());
Assertions.assertEquals(property_value, property_source.getValue());
}
private MuseProject _project = new SimpleProject();
} |
using System;
using System.Net.Sockets;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Xunit;
public class EmailServiceTests
{
private IEmailService _service;
public EmailServiceTests()
{
_service = TestServiceProvider.Current.GetRequiredService<IEmailService>();
}
[Fact]
public async Task SendAsyncTest()
{
// Arrange
var emailContent = "Test email content";
var recipient = "test@example.com";
// Act and Assert
await Assert.ThrowsAsync<SocketException>(async () => await _service.SendAsync(recipient, emailContent));
}
} |
# Class input output
class ConsoleIo
def print_result(points, result)
puts "\n Вы набрали #{points} баллов:"
puts result
end
def ask_next_question(next_question)
puts "\n#{next_question}"
answer_to_question = 0
until answer_to_question.between?(1, 3)
puts 'Ваш ответ: 1 — да, 2 — иногда, 3 — нет'
answer_to_question = STDIN.gets.to_i
end
answer_to_question
end
def load_data(file_path)
abort "Файл не найден: #{file_path}" unless File.exist?(file_path)
f = File.new(file_path, 'r:UTF-8')
data = f.readlines
f.close
data
end
end
|
#include <cstdlib>
#include <cstdio>
#include <cmath>
#include "..\include\FreeImage.h"
#include "ImageIO.h"
using namespace std;
/////////////////////////////////////////////////////////////////////////////
// Deallocate the memory allocated to (*imageData) returned by
// the function ReadImageFile().
// (*imageData) will be set to NULL.
/////////////////////////////////////////////////////////////////////////////
void ImageIO::DeallocateImageData( uchar **imageData )
{
free( *imageData );
(*imageData) = NULL;
}
/////////////////////////////////////////////////////////////////////////////
// Read an image from the input filename.
// Returns 1 if successful or 0 if unsuccessful.
// The returned image data will be pointed to by (*imageData).
// The image width, image height, and number of components (color channels)
// per pixel will be returned in (*imageWidth), (*imageHeight),
// and (*numComponents).
// The value of (*numComponents) can be 1, 2, 3 or 4.
// The returned image data is always packed tightly with red, green, blue,
// and alpha arranged from lower to higher memory addresses.
// Each color channel take one byte.
// The first pixel (origin of the image) is at the bottom-left of the image.
/////////////////////////////////////////////////////////////////////////////
int ImageIO::ReadImageFile( const char *filename, uchar **imageData,
int *imageWidth, int *imageHeight, int *numComponents,
int flags, bool forceRGBA)
{
// Determine image format.
FREE_IMAGE_FORMAT fif = FreeImage_GetFileType( filename, 0 );
if( fif == FIF_UNKNOWN ) fif = FreeImage_GetFIFFromFilename( filename );
if( fif == FIF_UNKNOWN )
{
printf( "Error: Cannot determine image format of %s.\n", filename );
return 0;
}
// Read image data from file.
FIBITMAP *dib = NULL;
if( FreeImage_FIFSupportsReading( fif ) )
dib = FreeImage_Load( fif, filename, flags );
if( !dib )
{
printf( "Error: Cannot read image file %s.\n", filename );
return 0;
}
// Check image type.
FREE_IMAGE_TYPE fit = FreeImage_GetImageType( dib );
if ( fit != FIT_BITMAP )
{
FreeImage_Unload( dib );
printf( "Error: Only 8-bits-per-component standard bitmap is supported.\n" );
return 0;
}
// Check bits per pixel.
int bits_per_pixel = FreeImage_GetBPP( dib );
if ( bits_per_pixel != 8 && bits_per_pixel != 16 && bits_per_pixel != 24 && bits_per_pixel != 32 )
{
FreeImage_Unload( dib );
printf( "Error: Only 8, 16, 24, 32 bits per pixel are supported.\n" );
return 0;
}
int _numComponents = bits_per_pixel / 8;
int _imageWidth = FreeImage_GetWidth( dib );
int _imageHeight = FreeImage_GetHeight( dib );
int outputNumComponents = forceRGBA ? 4 : _numComponents;
uchar *_imageData = (uchar *) malloc( _imageWidth * _imageHeight * outputNumComponents );
if ( _imageData == NULL )
{
FreeImage_Unload( dib );
printf( "Error: Not enough memory.\n" );
return 0;
}
// Copy image in FIBITMAP to user image data.
int imageDataCount = 0;
if ( _numComponents == 1 )
{
for( int y = 0; y < _imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < _imageWidth; x++)
{
_imageData[imageDataCount++] = dibData[x];
if (forceRGBA)
{
_imageData[imageDataCount++] = 255;
_imageData[imageDataCount++] = 255;
_imageData[imageDataCount++] = 255;
}
}
}
}
else if ( _numComponents == 2 )
{
for( int y = 0; y < _imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < _imageWidth; x++)
{
_imageData[imageDataCount++] = dibData[0];
_imageData[imageDataCount++] = dibData[1];
if (forceRGBA)
{
_imageData[imageDataCount++] = 255;
_imageData[imageDataCount++] = 255;
}
dibData += _numComponents;
}
}
}
else if ( _numComponents == 3 )
{
for( int y = 0; y < _imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < _imageWidth; x++)
{
_imageData[imageDataCount++] = dibData[FI_RGBA_RED];
_imageData[imageDataCount++] = dibData[FI_RGBA_GREEN];
_imageData[imageDataCount++] = dibData[FI_RGBA_BLUE];
if (forceRGBA) _imageData[imageDataCount++] = 255;
dibData += _numComponents;
}
}
}
else if ( _numComponents == 4 )
{
for( int y = 0; y < _imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < _imageWidth; x++)
{
_imageData[imageDataCount++] = dibData[FI_RGBA_RED];
_imageData[imageDataCount++] = dibData[FI_RGBA_GREEN];
_imageData[imageDataCount++] = dibData[FI_RGBA_BLUE];
_imageData[imageDataCount++] = dibData[FI_RGBA_ALPHA];
dibData += _numComponents;
}
}
}
FreeImage_Unload( dib );
(*numComponents) = outputNumComponents;
(*imageWidth) = _imageWidth;
(*imageHeight) = _imageHeight;
(*imageData) = _imageData;
return 1;
}
/////////////////////////////////////////////////////////////////////////////
// Save an image to the output filename.
// Returns 1 if successful or 0 if unsuccessful.
// The input image data is pointed to by imageData.
// The image width, image height, and number of components (color channels)
// per pixel are provided in imageWidth, imageHeight, numComponents.
// The value of numComponents can be 1, 2, 3 or 4.
// Note that some numComponents cannot be supported by some image file formats.
// The input image data is assumed packed tightly with red, green, blue,
// and alpha arranged from lower to higher memory addresses.
// Each color channel take one byte.
// The first pixel (origin of the image) is at the bottom-left of the image.
/////////////////////////////////////////////////////////////////////////////
int ImageIO::SaveImageFile( const char *filename, const uchar *imageData,
int imageWidth, int imageHeight, int numComponents,
int flags )
{
// Try to guess the file format from the file extension.
FREE_IMAGE_FORMAT fif = FreeImage_GetFIFFromFilename( filename );
if( fif == FIF_UNKNOWN )
{
printf( "Error: Cannot determine output image format of %s.\n", filename );
return 0;
}
int bits_per_pixel = numComponents * 8;
if ( bits_per_pixel != 8 && bits_per_pixel != 16 && bits_per_pixel != 24 && bits_per_pixel != 32 )
{
printf( "Error: Only 8, 16, 24, 32 bits per pixel are supported.\n" );
return 0;
}
// Check whether user image data can be supported by output image format.
if( !( FreeImage_FIFSupportsWriting( fif ) &&
FreeImage_FIFSupportsExportBPP( fif, bits_per_pixel ) ) )
{
printf( "Error: Output image format not supported.\n" );
return 0;
}
// Create a FIBITMAP for storing the user image data before writing it to file.
FIBITMAP *dib = FreeImage_AllocateT( FIT_BITMAP, imageWidth, imageHeight, bits_per_pixel );
if( !dib )
{
FreeImage_Unload( dib );
printf( "Error: Cannot allocate internal bitmap.\n" );
return 0;
}
// Copy user image data to the FIBITMAP.
int imageDataCount = 0;
if ( numComponents == 1 )
{
for( int y = 0; y < imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < imageWidth; x++)
{
dibData[x] = imageData[imageDataCount++];
}
}
}
else if ( numComponents == 2 )
{
for( int y = 0; y < imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < imageWidth; x++)
{
dibData[0] = imageData[imageDataCount++];
dibData[1] = imageData[imageDataCount++];
dibData += numComponents;
}
}
}
else if ( numComponents == 3 )
{
for( int y = 0; y < imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < imageWidth; x++)
{
dibData[FI_RGBA_RED] = imageData[imageDataCount++];
dibData[FI_RGBA_GREEN] = imageData[imageDataCount++];
dibData[FI_RGBA_BLUE] = imageData[imageDataCount++];
dibData += numComponents;
}
}
}
else if ( numComponents == 4 )
{
for( int y = 0; y < imageHeight; y++ )
{
BYTE *dibData = FreeImage_GetScanLine( dib, y );
for( int x = 0; x < imageWidth; x++)
{
dibData[FI_RGBA_RED] = imageData[imageDataCount++];
dibData[FI_RGBA_GREEN] = imageData[imageDataCount++];
dibData[FI_RGBA_BLUE] = imageData[imageDataCount++];
dibData[FI_RGBA_ALPHA] = imageData[imageDataCount++];
dibData += numComponents;
}
}
}
// Write image in FIBITMAP to file.
if ( !FreeImage_Save( fif, dib, filename, flags ) )
{
FreeImage_Unload( dib );
printf( "Error: Cannot save image file %s.\n", filename );
return 0;
}
FreeImage_Unload( dib );
return 1;
}
int ImageIO::ReadImageFile(const char * filename, float ** imageData, int * imageWidth, int * imageHeight, int * numComponents, int flags)
{
// Determine image format.
FREE_IMAGE_FORMAT fif = FreeImage_GetFileType(filename, 0);
if (fif == FIF_UNKNOWN) fif = FreeImage_GetFIFFromFilename(filename);
if (fif == FIF_UNKNOWN)
{
printf("Error: Cannot determine image format of %s.\n", filename);
return 0;
}
else
{
printf("Type code: %d\n", fif);
}
// Read image data from file.
FIBITMAP *dib = NULL;
if (FreeImage_FIFSupportsReading(fif))
dib = FreeImage_Load(fif, filename, flags);
if (!dib)
{
printf("Error: Cannot read image file %s.\n", filename);
return 0;
}
// Check bits per pixel.
int bits_per_pixel = FreeImage_GetBPP(dib);
int _numComponents = bits_per_pixel / 32;
int _imageWidth = FreeImage_GetWidth(dib);
int _imageHeight = FreeImage_GetHeight(dib);
float *_imageData = (float *)malloc(_imageWidth * _imageHeight * _numComponents * 4);
if (_imageData == NULL)
{
FreeImage_Unload(dib);
printf("Error: Not enough memory.\n");
return 0;
}
// Copy image in FIBITMAP to user image data.
int imageDataCount = 0;
if (_numComponents == 1)
{
for (int y = 0; y < _imageHeight; y++)
{
float *dibData = (float *)FreeImage_GetScanLine(dib, y);
for (int x = 0; x < _imageWidth; x++)
{
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_BLUE], sizeof(float));
}
}
}
else if (_numComponents == 2)
{
for (int y = 0; y < _imageHeight; y++)
{
float *dibData = (float *)FreeImage_GetScanLine(dib, y);
for (int x = 0; x < _imageWidth; x++)
{
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_GREEN], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_BLUE], sizeof(float));
dibData += _numComponents;
}
}
}
else if (_numComponents == 3)
{
for (int y = 0; y < _imageHeight; y++)
{
float *dibData = (float *)FreeImage_GetScanLine(dib, y);
for (int x = 0; x < _imageWidth; x++)
{
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_RED], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_GREEN], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_BLUE], sizeof(float));
dibData += _numComponents;
}
}
}
else if (_numComponents == 4)
{
for (int y = 0; y < _imageHeight; y++)
{
float *dibData = (float *)FreeImage_GetScanLine(dib, y);
for (int x = 0; x < _imageWidth; x++)
{
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_BLUE], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_GREEN], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_RED], sizeof(float));
memcpy(&_imageData[imageDataCount++], &dibData[FI_RGBA_ALPHA], sizeof(float));
//if (_imageData[imageDataCount - 4] > 1)
//printf("pixel %d, r: %f, g: %f, b: %f a: %f\n", x + y*_imageWidth, _imageData[imageDataCount - 4], _imageData[imageDataCount - 3], _imageData[imageDataCount - 2], _imageData[imageDataCount - 1]);
dibData += _numComponents;
}
}
}
FreeImage_Unload(dib);
(*numComponents) = _numComponents;
(*imageWidth) = _imageWidth;
(*imageHeight) = _imageHeight;
(*imageData) = _imageData;
return 1;
}
|
require 'helper'
class TailExInputTest < Test::Unit::TestCase
def setup
Fluent::Test.setup
end
CONFIG = %[
tag tail_ex
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
pos_file test-pos-file
refresh_interval 30
]
PATHS = [
'test/plugin/data/2010/01/20100102-030405.log',
'test/plugin/data/log/foo/bar.log',
'test/plugin/data/log/test.log'
]
def create_driver(conf = CONFIG)
Fluent::Test::InputTestDriver.new(Fluent::TailExInput).configure(conf)
end
def test_configure
assert_nothing_raised { create_driver }
end
def test_posfile_creation
flexstub(Thread) do |threadclass|
threadclass.should_receive(:new).once.and_return do
flexmock('Thread') {|t| t.should_receive(:join).once }
end
threadclass.should_receive(:new).once
plugin = create_driver.instance
plugin.start
pf = nil
plugin.instance_eval do
pf = @pf
end
assert_instance_of Fluent::TailInput::PositionFile, pf
end
end
def test_expand_paths
plugin = create_driver.instance
flexstub(Time) do |timeclass|
timeclass.should_receive(:now).with_no_args.and_return(
Time.new(2010, 1, 2, 3, 4, 5))
assert_equal PATHS, plugin.expand_paths.sort
end
end
def test_start_watch_without_pos_file
config = %[
tag tail_ex
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
refresh_interval 30
]
plugin = create_driver(config).instance
flexstub(Fluent::TailExInput::TailExWatcher) do |watcherclass|
PATHS.each do |path|
watcherclass.should_receive(:new).with(path, 5, nil, any).once.and_return do
flexmock('TailExWatcher') {|watcher| watcher.should_receive(:attach).once}
end
end
plugin.start_watch(PATHS)
end
end
def test_refresh_watchers
plugin = create_driver.instance
sio = StringIO.new
plugin.instance_eval do
@pf = Fluent::TailInput::PositionFile.parse(sio)
end
flexstub(Time) do |timeclass|
timeclass.should_receive(:now).with_no_args.and_return(
Time.new(2010, 1, 2, 3, 4, 5), Time.new(2010, 1, 2, 3, 4, 6),
Time.new(2010, 1, 2, 3, 4, 7))
flexstub(Fluent::TailExInput::TailExWatcher) do |watcherclass|
PATHS.each do |path|
watcherclass.should_receive(:new).with(path, 5, Fluent::TailInput::FilePositionEntry, any).once.and_return do
flexmock('TailExWatcher') {|watcher| watcher.should_receive(:attach).once}
end
end
plugin.refresh_watchers
end
plugin.instance_eval do
@watchers['test/plugin/data/2010/01/20100102-030405.log'].should_receive(:close).once
end
flexstub(Fluent::TailExInput::TailExWatcher) do |watcherclass|
watcherclass.should_receive(:new).with('test/plugin/data/2010/01/20100102-030406.log', 5, Fluent::TailInput::FilePositionEntry, any).once.and_return do
flexmock('TailExWatcher') do |watcher|
watcher.should_receive(:attach).once
watcher.should_receive(:close).once
end
end
plugin.refresh_watchers
end
flexstub(Fluent::TailExInput::TailExWatcher) do |watcherclass|
watcherclass.should_receive(:new).never
plugin.refresh_watchers
end
end
end
def test_receive_lines
plugin = create_driver.instance
flexstub(Fluent::Engine) do |engineclass|
engineclass.should_receive(:emit_stream).with('tail_ex', any).once
plugin.receive_lines(['foo', 'bar'], 'foo.bar.log')
end
config = %[
tag pre.*
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
]
plugin = create_driver(config).instance
flexstub(Fluent::Engine) do |engineclass|
engineclass.should_receive(:emit_stream).with('pre.foo.bar.log', any).once
plugin.receive_lines(['foo', 'bar'], 'foo.bar.log')
end
config = %[
tag *.post
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
]
plugin = create_driver(config).instance
flexstub(Fluent::Engine) do |engineclass|
engineclass.should_receive(:emit_stream).with('foo.bar.log.post', any).once
plugin.receive_lines(['foo', 'bar'], 'foo.bar.log')
end
config = %[
tag pre.*.post
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
]
plugin = create_driver(config).instance
flexstub(Fluent::Engine) do |engineclass|
engineclass.should_receive(:emit_stream).with('pre.foo.bar.log.post', any).once
plugin.receive_lines(['foo', 'bar'], 'foo.bar.log')
end
config = %[
tag pre.*.post*ignore
path test/plugin/*/%Y/%m/%Y%m%d-%H%M%S.log,test/plugin/data/log/**/*.log
format /^(?<message>.*)$/
]
plugin = create_driver(config).instance
flexstub(Fluent::Engine) do |engineclass|
engineclass.should_receive(:emit_stream).with('pre.foo.bar.log.post', any).once
plugin.receive_lines(['foo', 'bar'], 'foo.bar.log')
end
end
end
class TailExWatcherTest < Test::Unit::TestCase
def setup
@tag = nil
@lines = nil
@watcher = Fluent::TailExInput::TailExWatcher.new('/var/tmp//foo.log', 5, nil, &method(:callback))
end
def callback(lines, tag)
@tag = tag
@lines = lines
end
def test_receive_lines
@watcher.instance_eval { @receive_lines.call(['l1', 'l2']) }
assert_equal 'var.tmp.foo.log', @tag
assert_equal ['l1', 'l2'], @lines
end
end
|
<filename>apps/_demo/index.js
const { Keystone } = require('@keystonejs/keystone')
const { PasswordAuthStrategy } = require('@keystonejs/auth-password')
const { GraphQLApp } = require('@keystonejs/app-graphql')
const { AdminUIApp } = require('@keystonejs/app-admin-ui')
const { StaticApp } = require('@keystonejs/app-static')
const { NextApp } = require('@keystonejs/app-next')
const { createItems } = require('@keystonejs/server-side-graphql-client')
const conf = require('@core/config')
const access = require('@core/keystone/access')
const { areWeRunningTests } = require('@core/keystone/test.utils')
const { EmptyApp } = require('@core/keystone/test.utils')
const { prepareDefaultKeystoneConfig } = require('@core/keystone/setup.utils')
const { registerSchemas } = require('@core/keystone/schema')
const keystone = new Keystone({
...prepareDefaultKeystoneConfig(conf),
onConnect: async () => {
// Initialise some data
if (conf.NODE_ENV !== 'development') return // Just for dev env purposes!
// This function can be called before tables are created! (we just ignore this)
try {
const users = await keystone.lists.User.adapter.findAll()
if (!users.length) {
const initialData = require('./initialData')
for (let { listKey, items } of initialData) {
console.log(`🗿 createItems(${listKey}) -> ${items.length}`)
await createItems({
keystone,
listKey,
items,
})
}
}
} catch (e) {
console.warn('onConnectError:', e)
}
},
})
registerSchemas(keystone, [
require('./schema/User'),
require('./schema/Organization'),
require('./schema/Auth'),
require('./schema/Test'),
])
const authStrategy = keystone.createAuthStrategy({
type: PasswordAuthStrategy,
list: 'User',
})
module.exports = {
configureExpress: app => {
// The express application variable trust proxy must be set to support reverse proxying
app.set('trust proxy', true)
},
keystone,
apps: [
new GraphQLApp({
apollo: {
debug: conf.NODE_ENV === 'development' || conf.NODE_ENV === 'test',
},
}),
new StaticApp({ path: conf.MEDIA_URL, src: conf.MEDIA_ROOT }),
new AdminUIApp({
adminPath: '/admin',
// enableDefaultRoute: true,
isAccessAllowed: access.userIsAdmin,
authStrategy,
}),
!areWeRunningTests() ? new NextApp({ dir: '.' }) : new EmptyApp(),
],
}
|
package com.waflo.cooltimediaplattform.backend.beans;
import com.waflo.cooltimediaplattform.backend.model.Category;
import org.springframework.stereotype.Component;
import javax.faces.component.UIComponent;
import javax.faces.component.UISelectItems;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.FacesConverter;
import java.util.List;
import java.util.function.Predicate;
@FacesConverter(forClass = Category.class)
@Component
public class CategoryConverter implements Converter {
@Override
public Object getAsObject(FacesContext context, UIComponent component, String value) {
Object o = null;
if (!(value == null || value.isEmpty())) {
o = this.getSelectedItemAsEntity(component, value);
}
return o;
}
private Object getSelectedItemAsEntity(UIComponent component, String value) {
Category item = null;
List<Category> selectItems = null;
for (UIComponent uic : component.getChildren()) {
if (uic instanceof UISelectItems) {
//Long itemId = Long.valueOf(value);
selectItems = (List<Category>) ((UISelectItems) uic).getValue();
if (selectItems != null && !selectItems.isEmpty()) {
Predicate<Category> predicate = i -> i.getName().equals(value);
item = selectItems.stream().filter(predicate).findFirst().orElse(null);
}
}
}
return item;
}
@Override
public String getAsString(FacesContext context, UIComponent component, Object value) {
if (value instanceof Category) {
return ((Category) value).getName();
}
return "";
}
}
|
#!/usr/bin/env bash
trap 'rm -rf "${WORKDIR}"' EXIT
[[ -z "${WORKDIR}" || "${WORKDIR}" != "/tmp/"* || ! -d "${WORKDIR}" ]] && WORKDIR="$(mktemp -d)"
[[ -z "${CURRENT_DIR}" || ! -d "${CURRENT_DIR}" ]] && CURRENT_DIR=$(pwd)
# Load custom functions
if type 'colorEcho' 2>/dev/null | grep -q 'function'; then
:
else
if [[ -s "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh" ]]; then
source "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh"
else
echo "${MY_SHELL_SCRIPTS:-$HOME/.dotfiles}/custom_functions.sh does not exist!"
exit 0
fi
fi
[[ -z "${CURL_CHECK_OPTS[*]}" ]] && Get_Installer_CURL_Options
# Curlie: The power of curl, the ease of use of httpie
# https://github.com/rs/curlie
APP_INSTALL_NAME="curlie"
GITHUB_REPO_NAME="rs/curlie"
ARCHIVE_EXT="tar.gz"
ARCHIVE_EXEC_DIR=""
ARCHIVE_EXEC_NAME="curlie"
EXEC_INSTALL_PATH="/usr/local/bin"
EXEC_INSTALL_NAME="curlie"
[[ -z "${ARCHIVE_EXEC_NAME}" ]] && ARCHIVE_EXEC_NAME="${EXEC_INSTALL_NAME}"
DOWNLOAD_FILENAME="${WORKDIR}/${EXEC_INSTALL_NAME}"
[[ -n "${ARCHIVE_EXT}" ]] && DOWNLOAD_FILENAME="${DOWNLOAD_FILENAME}.${ARCHIVE_EXT}"
REMOTE_SUFFIX=""
REMOTE_FILENAME=""
IS_INSTALL="yes"
IS_UPDATE="no"
CURRENT_VERSION="0.0.0"
VERSION_FILENAME="${EXEC_INSTALL_PATH}/${EXEC_INSTALL_NAME}.version"
if [[ -x "$(command -v ${EXEC_INSTALL_NAME})" ]]; then
IS_UPDATE="yes"
[[ -s "${VERSION_FILENAME}" ]] && CURRENT_VERSION=$(head -n1 "${VERSION_FILENAME}")
else
[[ "${IS_UPDATE_ONLY}" == "yes" ]] && IS_INSTALL="no"
fi
if [[ "${IS_INSTALL}" == "yes" ]]; then
colorEcho "${BLUE}Checking latest version for ${FUCHSIA}${APP_INSTALL_NAME}${BLUE}..."
CHECK_URL="https://api.github.com/repos/${GITHUB_REPO_NAME}/releases/latest"
REMOTE_VERSION=$(curl "${CURL_CHECK_OPTS[@]}" "${CHECK_URL}" | jq -r '.tag_name//empty' 2>/dev/null | cut -d'v' -f2)
if version_le "${REMOTE_VERSION}" "${CURRENT_VERSION}"; then
IS_INSTALL="no"
fi
fi
if [[ "${IS_INSTALL}" == "yes" ]]; then
[[ -z "${OS_INFO_TYPE}" ]] && get_os_type
[[ -z "${OS_INFO_ARCH}" ]] && get_arch
REMOTE_FILENAME="${EXEC_INSTALL_NAME}_${REMOTE_VERSION}_${OS_INFO_TYPE}_${OS_INFO_ARCH}.${ARCHIVE_EXT}"
[[ -z "${REMOTE_FILENAME}" ]] && IS_INSTALL="no"
fi
if [[ "${IS_INSTALL}" == "yes" ]]; then
colorEcho "${BLUE} Installing ${FUCHSIA}${APP_INSTALL_NAME} ${YELLOW}${REMOTE_VERSION}${BLUE}..."
# Download file
DOWNLOAD_URL="${GITHUB_DOWNLOAD_URL:-https://github.com}/${GITHUB_REPO_NAME}/releases/download/v${REMOTE_VERSION}/${REMOTE_FILENAME}"
colorEcho "${BLUE} From ${ORANGE}${DOWNLOAD_URL}"
curl "${CURL_DOWNLOAD_OPTS[@]}" -o "${DOWNLOAD_FILENAME}" "${DOWNLOAD_URL}"
curl_download_status=$?
if [[ ${curl_download_status} -gt 0 && -n "${GITHUB_DOWNLOAD_URL}" ]]; then
DOWNLOAD_URL="${DOWNLOAD_URL//${GITHUB_DOWNLOAD_URL}/https://github.com}"
colorEcho "${BLUE} From ${ORANGE}${DOWNLOAD_URL}"
curl "${CURL_DOWNLOAD_OPTS[@]}" -o "${DOWNLOAD_FILENAME}" "${DOWNLOAD_URL}"
curl_download_status=$?
fi
if [[ ${curl_download_status} -eq 0 ]]; then
# Extract file
case "${ARCHIVE_EXT}" in
"zip")
unzip -qo "${DOWNLOAD_FILENAME}" -d "${WORKDIR}"
;;
"tar.bz2")
tar -xjf "${DOWNLOAD_FILENAME}" -C "${WORKDIR}"
;;
"tar.gz")
tar -xzf "${DOWNLOAD_FILENAME}" -C "${WORKDIR}"
;;
"tar.xz")
tar -xJf "${DOWNLOAD_FILENAME}" -C "${WORKDIR}"
;;
"gz")
cd "${WORKDIR}" && gzip -df "${DOWNLOAD_FILENAME}"
;;
"bz")
cd "${WORKDIR}" && bzip2 -df "${DOWNLOAD_FILENAME}"
;;
"7z")
7z e "${DOWNLOAD_FILENAME}" -o"${WORKDIR}"
;;
esac
# Install
[[ -n "${ARCHIVE_EXEC_DIR}" ]] && ARCHIVE_EXEC_DIR=$(find "${WORKDIR}" -type d -name "${ARCHIVE_EXEC_DIR}")
[[ -z "${ARCHIVE_EXEC_DIR}" || ! -d "${ARCHIVE_EXEC_DIR}" ]] && ARCHIVE_EXEC_DIR=${WORKDIR}
if [[ -s "${ARCHIVE_EXEC_DIR}/${ARCHIVE_EXEC_NAME}" ]]; then
sudo cp -f "${ARCHIVE_EXEC_DIR}/${ARCHIVE_EXEC_NAME}" "${EXEC_INSTALL_PATH}/${EXEC_INSTALL_NAME}" && \
sudo chmod +x "${EXEC_INSTALL_PATH}/${EXEC_INSTALL_NAME}" && \
[[ -n "${VERSION_FILENAME}" ]] && echo "${REMOTE_VERSION}" | sudo tee "${VERSION_FILENAME}" >/dev/null || true
fi
fi
fi
cd "${CURRENT_DIR}" || exit |
<filename>sqlalchemy_jsonapi/unittests/test_serializer_get_relationship.py
"""Test for serializer's get_relationship."""
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
from sqlalchemy_jsonapi import __version__
class GetRelationship(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.get_relationship."""
def test_get_relationship_on_to_many(self):
"""Get a relationship to many resources returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(2):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_relationship(
self.session, {}, 'posts', blog_post.id, 'comments')
expected = {
'data': [{
'id': 1,
'type': 'comments'
}, {
'id': 2,
'type': 'comments'
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_relationship_on_to_one(self):
"""Get a relationship of on to one returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_relationship(
self.session, {}, 'posts', blog_post.id, 'author')
expected = {
'data': {
'id': 1,
'type': 'users'
},
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_relationship_with_unknown_relationship(self):
"""Get a resources relationship with an unknown relationship returns 404.
A RelationshipNotFoundError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
with self.assertRaises(errors.RelationshipNotFoundError) as error:
models.serializer.get_relationship(
self.session, {}, 'posts',
blog_post.id, 'invalid-relationship')
self.assertEqual(error.exception.status_code, 404)
|
import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { AuthModule } from './auth/auth.module';
import { UsersModule } from './users/users.module';
import { ReportsModule } from './reports/reports.module';
import { SignalTypesModule } from './signal-types/signal-types.module';
import { TypeOrmModule } from '@nestjs/typeorm';
import { FirebaseModule } from './firebase/firebase.module';
import { ConfigModule } from '@nestjs/config';
@Module({
imports: [
//Nest modules
ConfigModule.forRoot({
isGlobal: true,
}),
TypeOrmModule.forRoot({
type: 'mysql',
host: process.env.DB_HOST,
port: Number(process.env.DB_PORT),
username: process.env.DB_USER,
password: <PASSWORD>,
database: process.env.DB_NAME,
entities: ['dist/**/*.entity{.ts,.js}'],
autoLoadEntities: true,
synchronize: true,
}),
// App modules
AuthModule,
FirebaseModule,
UsersModule,
ReportsModule,
SignalTypesModule,
],
controllers: [AppController],
providers: [AppService],
})
export class AppModule { }
|
// JavaScript engine design to optimize the execution time of a program
// Get a program's source code
function engine(program) {
// Tokenize the program into an AST
const ast = tokenize(program);
// Optimize the AST to reduce steps taken during execution
const optimizedAst = optimize(ast);
// Interpret the AST to execute the program
interpret(optimizedAst);
} |
#include "snmpdemo.h"
namespace {
const auto mib_2_OID = QString( ".1.3.6.1.2.1" );
const auto sysDescr_OID = mib_2_OID + ".1.1.0";
const auto sysUpTimeInstance_OID = mib_2_OID + ".1.3.0";
const auto sysName_OID = mib_2_OID + ".1.5.0";
const auto ifTable_OID = mib_2_OID + ".2.2";
const auto ifEntry_OID = ifTable_OID + ".1";
const auto ifMIBObjects_OID = mib_2_OID + ".31.1";
const auto ifXTable = ifMIBObjects_OID + ".1";
const auto ifXEntry_OID = ifXTable + ".1";
const auto ifIndex_OID = ifEntry_OID + ".1";
const auto ifName_OID = ifXEntry_OID + ".1";
const auto ifDescr_OID = ifEntry_OID + ".2";
const auto ifPhysAddress_OID = ifEntry_OID + ".6";
}
SnmpDemo::SnmpDemo(QObject *parent) : QObject(parent)
{
m_address.setAddress( "192.168.1.163");
m_snmp_client = new QtSnmpClient(this);
m_snmp_client->setAgentAddress(m_address);
m_snmp_client->setCommunity( "public" );
connect( m_snmp_client, SIGNAL(responseReceived(qint32,QtSnmpDataList)), SLOT(onResponseReceived(qint32,QtSnmpDataList)) );
connect( m_snmp_client, SIGNAL(requestFailed(qint32)),SLOT(onRequestFailed(qint32)) );
m_timer = new QTimer(this);
connect( m_timer, SIGNAL(timeout()), SLOT(makeRequest()) );
m_timer->start( 500 + ( rand() % 1000 ) );
}
void SnmpDemo::onResponseReceived(const qint32, const QtSnmpDataList& values )
{
for( const auto& value : values ) {
printf( "%s | %s : %s\n",
qPrintable( m_address.toString() ),
qPrintable( value.address() ),
qPrintable( value.data().toHex() ) );
}
}
void SnmpDemo::onRequestFailed( const qint32 request_id )
{
qDebug() << m_address << request_id;
//assert( !request_id );
}
void SnmpDemo::makeRequest() {
if( ! m_snmp_client->isBusy() ) {
m_snmp_client->requestValue( sysDescr_OID );
m_snmp_client->requestValues( QStringList() << sysUpTimeInstance_OID << sysName_OID );
m_snmp_client->requestSubValues( ifIndex_OID );
m_snmp_client->requestSubValues( ifName_OID );
m_snmp_client->requestSubValues( ifDescr_OID );
m_snmp_client->requestSubValues( ifPhysAddress_OID );
}
}
|
printf '%s' "${DIM}"
printf '%s\n' \
'Here we are testing the actual testing tools for both success and failure '
printf '%s\n' \
'cases. That means here you will see red error messages but those are only '
printf '%s\n' \
'for validating the testing assertion functions by eye. As dm_tools is a '
printf '%s\n' \
'fundamental tool for every further dm project, dm_test cannot be used here '
printf '%s\n' \
'because it is based on dm_tools too. So dm_tools has to use a very minimal '
printf '%s\n' \
'but also hard coded test suite. This initial tests somewhat make sure that '
printf '%s' \
'this custom test suite is actually working as intended.'
printf '%s\n' "${RESET}"
#==============================================================================
# ASSERT EQUAL
#==============================================================================
# Here we are testing if the assertion function produces a success result if we
# compare two identical strings.
dm_tools__test__valid_case 'common - assert_equal success test'
dm_tools__test__assert_equal '42' '42'
# In this case we are forcing out an assertion failure by providing two
# different strings. The assertion should fail, and an appropriate error
# message should be printed out that will be captured by the tooling here. The
# assertion test is executed inside a subshell to capture its outputs and to be
# able to survive the exit call.
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='1'
dm_tools__test__error_case 'common - assert_equal failure printout test'
if result="$(dm_tools__test__assert_equal '42' '43')"
then
_dm_tools__test__test_case_failed
printf '%s\n' 'Test case was expected to fail..'
else
_dm_tools__test__test_case_succeeded
printf '%s\n' "$result" | sed "s/^/${DIM}captured output | ${RESET}/"
fi
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='0'
#==============================================================================
# ASSERT INVALID PARAMETER
#==============================================================================
# There is a dedicated asertion function that is checking if the tested tool
# has reported an invalid parameter error. If an error is reported, the
# assertion should print out the error message to validate it by eyes. If the
# tool isn't provided the required status code an appropriate error message
# should be printed out. In this case we are testing the valid use case when
# the theoritical tool validates the required status.
dm_tools__test__valid_case 'common - assert_invalid_parameter success test'
dm_tools__test__assert_invalid_parameters \
"$DM_TOOLS__STATUS__INVALID_PARAMETERS" \
"$( \
printf '%s\n' 'A specific error status and an error message is expected.'; \
printf '%s\n' 'This error message should be only visible if the assertion succeeds.' \
)"
# In this case we are passing the wrong status code, hence the assertion needs
# to print out an error message and terminates the test suite. We are running
# this test inside a subshell to be able to capture the output and also to be
# able to survive the exit call.
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='1'
dm_tools__test__error_case 'common - assert_invalid_parameter failure printout test'
if result="$(dm_tools__test__assert_invalid_parameters '42' 'invisible')"
then
_dm_tools__test__test_case_failed
printf '%s\n' 'Test case was expected to fail..'
else
_dm_tools__test__test_case_succeeded
printf '%s\n' "$result" | sed "s/^/${DIM}captured output | ${RESET}/"
fi
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='0'
#==============================================================================
# TEST CASE FAILED
#==============================================================================
# This test case reporting fuction should be used if the tool exits an
# unexpected error status. In this case the test case will be marked as failed,
# and an appropriate error should be printed. As this function will make the
# execution to fail regardless of the input, we are only testing the error case
# here.
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='1'
dm_tools__test__error_case 'common - test_case_failed failure printout test'
if result="$(dm_tools__test__test_case_failed '42')"
then
_dm_tools__test__test_case_failed
printf '%s\n' 'Test case was expected to fail..'
else
_dm_tools__test__test_case_succeeded
printf '%s\n' "$result" | sed "s/^/${DIM}captured output | ${RESET}/"
fi
#==============================================================================
# FINISH
#==============================================================================
# After this test file we want to have the standard test result printout
# behavior, so we are setting back the global variable to its default value.
#shellcheck disable=SC2034
DM_TOOLS__TEST__SUPPRESS_RESULT_PRINTOUT='0'
|
declare const sourceMapSupport: { install(): void };
if (typeof sourceMapSupport !== 'undefined') {
sourceMapSupport.install();
}
/*
import { BrowserPlatform } from '@aurelia/platform-browser';
import { $setup } from './setup-shared.js';
const platform = new BrowserPlatform(window);
$setup(platform);
*/
console.log(`Browser test context initialized`);
|
import './set-public-path'
import Vue from 'vue'
import App from './App.vue'
import singleSpaVue from 'single-spa-vue'
Vue.config.productionTip = false
const vueLifecycles = singleSpaVue({
Vue,
appOptions: {
el: '#app3',
render: (h) => h(App)
}
})
export const bootstrap = vueLifecycles.bootstrap
export const mount = vueLifecycles.mount
export const unmount = vueLifecycles.unmount
|
var cityInput = document.getElementById("city-text");
var cityForm = document.getElementById("city-form")
var cityHistory = document.getElementById("city-history");
const myKey = "<KEY>";
var currentCityContainer = document.getElementById("currentContainer");
var cities = [];
function renderHistory() {
cityHistory.innerHTML = "";
for (var i = 0; i < cities.length; i++) {
var city = cities[i]
var li = document.createElement("li");
li.textContent = city;
li.setAttribute("data-index", i);
li.classList.add("box", "button", "has-text-left");
cityHistory.appendChild(li);
}
}
function init() {
var storedCities = JSON.parse(localStorage.getItem("cities"));
if (storedCities !== null) {
cities = storedCities;
}
renderHistory();
}
function storeCities() {
localStorage.setItem("cities",JSON.stringify(cities));
}
var submitButton = document.getElementById("submit");
submitButton.addEventListener("click", function(event) {
event.preventDefault();
if (cityInput.value === "") {
return;
}
getCurrent(cityInput.value)
.then(result => {
let icon = result.weather[0].icon;
document.getElementById("currentCityName").innerHTML = result.name;
document.getElementById("currentIcon").src = "http://openweathermap.org/img/w/" + icon + ".png";
})
getCurrent(cityInput.value)
.then(result => getOneCall(result.coord.lat, result.coord.lon))
.then(finalResult => {
console.log(finalResult)
populateCurrentCity(finalResult);
populateForecast("day-one", finalResult, 1)
populateForecast("day-two", finalResult, 2)
populateForecast("day-three", finalResult, 3)
populateForecast("day-four", finalResult, 4)
populateForecast("day-five", finalResult, 5)
})
cities.push(cityInput.value);
cityInput.value = "";
storeCities();
renderHistory();
});
init();
var clearButton = document.getElementById("clear");
clearButton.addEventListener("click", function() {
localStorage.clear();
})
function populateForecast(day_desc, result, index){
let icon = result.daily[index].weather[0].icon;
document.getElementById(day_desc + "-date").innerHTML = convertUnixToDate(result.daily[index].dt);
document.getElementById(day_desc + "-icon").src = "http://openweathermap.org/img/w/" + icon + ".png";
document.getElementById(day_desc + "-temp").innerHTML = "Temp: " + result.daily[index].temp.day + "°F";
document.getElementById(day_desc + "-wind").innerHTML = "Wind: " + result.daily[index].wind_speed + " mph";
document.getElementById(day_desc + "-humidity").innerHTML = "Humidity: " + result.daily[index].humidity + "%";
}
async function getCurrent(input) {
const response = await fetch(
`https://api.openweathermap.org/data/2.5/weather?q=${input}&units=imperial&appid=${myKey}`
);
const data = await response.json();
return data;
}
async function getOneCall(lat, lng) {
const response = await fetch(
`https://api.openweathermap.org/data/2.5/onecall?lat=${lat}&lon=${lng}&units=imperial&appid=${myKey}`
);
const data = await response.json();
return data;
}
async function getFiveDayForeCast(city_name) {
const response = await fetch(
`https://api.openweathermap.org/data/2.5/forecast?q=${city_name}&units=imperial&appid=${myKey}`
);
const data = await response.json();
return data;
}
function convertUnixToDate(unix_time){
const unixTimestamp = unix_time;
const milliseconds = unixTimestamp * 1000;
const dateObject = new Date(milliseconds);
let options = {month:'numeric', day:'numeric', year:'numeric'};
return dateObject.toLocaleString('en-US', options);
}
function populateCurrentCity(currentCityValues) {
const humanDateFormat = convertUnixToDate(currentCityValues.current.dt)
let UVColor = currentCityValues.current.uvi
function changeUVColor() {
if (UVColor <= 2) {
document.getElementById("currentUVI").style.color = "white";
document.getElementById("currentUVI").style.backgroundColor = "green";
} else if (UVColor >= 6) {
document.getElementById("currentUVI").style.backgroundColor = "red";
} else {
document.getElementById("currentUVI").style.backgroundColor = "yellow";
}
return UVColor;
}
document.getElementById("currentDate").innerHTML = humanDateFormat;
document.getElementById("currentTemp").innerHTML = "Temp: " + currentCityValues.current.temp + "°F";
document.getElementById("currentWindSp").innerHTML = "Wind: " + currentCityValues.current.wind_speed + " mph";
document.getElementById("currentHumidity").innerHTML = "Humidity: " + currentCityValues.current.humidity + "%";
document.getElementById("currentUVI").innerHTML = "UV Index: " + changeUVColor();
}
cityHistory.addEventListener("click", function() {
for (var i = 0; i < cityHistory.childNodes.length; i++){
var savedCity = cityHistory.childNodes[i]
savedCity.addEventListener("click", function(event) {
console.log(event.target.innerHTML)
if (event.target.innerHTML === null) {
return;
}
else{
getCurrent(event.target.innerHTML)
.then(result => {
document.getElementById("currentCityName").innerHTML = result.name;
})
getCurrent(event.target.innerHTML)
.then(result => getOneCall(result.coord.lat, result.coord.lon))
.then(finalResult => {
populateCurrentCity(finalResult);
populateForecast("day-one", finalResult, 1)
populateForecast("day-two", finalResult, 2)
populateForecast("day-three", finalResult, 3)
populateForecast("day-four", finalResult, 4)
populateForecast("day-five", finalResult, 5)
})
cityHistory.innerHTML = "";
renderHistory()
}
});
}
});
|
#encoding=utf-8
from time import sleep
from test.test_funs import count_words_at_url
if __name__ == '__main__':
from rq import Queue, use_connection
from utils.worker import redis_connection
use_connection(redis_connection)
q = Queue("low")
result = q.enqueue(count_words_at_url, "https://www.heroku.com/")
while not result.result:
sleep(1)
print(result.result)
|
/*
NitroHax -- Cheat tool for the Nintendo DS
Copyright (C) 2008 Michael "Chishm" Chisholm
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <nds.h>
#include <fat.h>
#include <nds/fifocommon.h>
#include <stdio.h>
#include <string.h>
#include <malloc.h>
#include <list>
#include "inifile.h"
#include "nds_card.h"
#include "launch_engine.h"
#include "crc.h"
// #define REG_ROMCTRL (*(vu32*)0x40001A4)
// #define REG_SCFG_ROM (*(vu32*)0x4004000)
// #define REG_SCFG_CLK (*(vu32*)0x4004004)
// #define REG_SCFG_EXT (*(vu32*)0x4004008)
// #define REG_SCFG_MC (*(vu32*)0x4004010)
const char* settingsinipath = "sd:/_nds/TWiLightMenu/settings.ini";
bool consoleOn = false;
int main() {
bool TWLMODE = false;
bool TWLCLK = false; // false == NTR, true == TWL
bool TWLVRAM = false;
bool runCardEngine = false;
bool EnableSD = false;
int language = -1;
// If slot is powered off, tell Arm7 slot power on is required.
if(REG_SCFG_MC == 0x11) { fifoSendValue32(FIFO_USER_02, 1); }
if(REG_SCFG_MC == 0x10) { fifoSendValue32(FIFO_USER_02, 1); }
u32 ndsHeader[0x80];
char gameid[4];
if (fatInitDefault()) {
CIniFile settingsini( settingsinipath );
TWLCLK = settingsini.GetInt("NDS-BOOTSTRAP","BOOST_CPU",0);
TWLVRAM = settingsini.GetInt("NDS-BOOTSTRAP","BOOST_VRAM",0);
runCardEngine = settingsini.GetInt("SRLOADER","SLOT1_CARDENGINE",1);
//if(settingsini.GetInt("SRLOADER","DEBUG",0) == 1) {
// consoleOn = true;
// consoleDemoInit();
//}
if( TWLCLK == false ) {
//if(settingsini.GetInt("TWL-MODE","DEBUG",0) == 1) {
// printf("TWL_CLOCK ON\n");
//}
fifoSendValue32(FIFO_USER_04, 1);
// Disabled for now. Doesn't result in correct SCFG_CLK configuration during testing. Will go back to old method.
// setCpuClock(false);
REG_SCFG_CLK = 0x80;
swiWaitForVBlank();
}
if(settingsini.GetInt("SRLOADER","SLOT1_ENABLESD",0) == 1) {
//if(settingsini.GetInt("SRLOADER","DEBUG",0) == 1) {
// printf("SD access ON\n");
//}
EnableSD = true;
}
TWLMODE = settingsini.GetInt("SRLOADER","SLOT1_TWLMODE",0);
if(settingsini.GetInt("SRLOADER","RESET_SLOT1",1) == 1) {
fifoSendValue32(FIFO_USER_02, 1);
fifoSendValue32(FIFO_USER_07, 1);
}
language = settingsini.GetInt("NDS-BOOTSTRAP", "LANGUAGE", -1);
} else {
fifoSendValue32(FIFO_USER_02, 1);
fifoSendValue32(FIFO_USER_07, 1);
}
// Tell Arm7 it's ready for card reset (if card reset is nessecery)
fifoSendValue32(FIFO_USER_01, 1);
// Waits for Arm7 to finish card reset (if nessecery)
fifoWaitValue32(FIFO_USER_03);
// Wait for card to stablize before continuing
for (int i = 0; i < 30; i++) { swiWaitForVBlank(); }
sysSetCardOwner (BUS_OWNER_ARM9);
getHeader (ndsHeader);
for (int i = 0; i < 30; i++) swiIntrWait(0, 1);
memcpy (gameid, ((const char*)ndsHeader) + 12, 4);
while(1) {
if(REG_SCFG_MC == 0x11) {
break; } else {
runLaunchEngine (EnableSD, language, TWLMODE, TWLCLK, TWLVRAM, runCardEngine);
}
}
return 0;
}
|
<reponame>Epic-Deno/UmiAdmin<filename>src/layouts/baseLayout/header/index.tsx
/*
* @Description: 头部组件
* @Author: Pony
* @Date: 2021-08-09 21:53:03
* @LastEditors: Pony
* @LastEditTime: 2021-08-09 22:54:19
*/
import UserSetting from './userSetting';
export default () => {
return (
<>
<div style={{ fontSize: 18, color: '#fff' }}>博客管理台</div>
<UserSetting />
</>
)
} |
#!/bin/bash
function saveInk()
{
inkscape --file=$1 --export-area-page --export-width=$3 --export-png=$2
}
mypath=$(dirname $(readlink -f $0))
#declare -a res=(22 48 128)
declare -a res=(48)
INPUT=names.csv
OLDIFS=$IFS
IFS=,
echo "Select theme"
echo "1) black"
echo "2) white"
echo "3) solid black"
echo "4) solid white"
read selection
case $selection in
1)
source_path="./../dist/icons/black/svg/" ;;
2)
source_path="./../dist/icons/white/svg/" ;;
3)
source_path="./../dist/icons/solid-black/svg/" ;;
4)
source_path="./../dist/icons/solid-white/svg/" ;;
*)
echo "selection is not valid!" ;
exit 99 ;;
esac
for the_res in "${res[@]}"
do
mkdir $the_res
[ ! -f $INPUT ] && { echo "$INPUT file not found"; exit 99; }
while read oname nname
do
echo "== $oname.svg =="
saveInk "$source_path$oname.svg" "$mypath/$the_res/$nname" $the_res
done < $INPUT
done
IFS=$OLDIFS
#COMMENT1
|
<reponame>BuildForSDG/team-177-frontend<gh_stars>0
import React, { Component } from 'react'
import { Link } from 'react-router-dom'
class ContactUs extends Component{
constructor(props) {
super(props);
this.state = {value: 'Phone number or email address'
};
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(event) {
this.setState({value: event.target.value});
}
handleSubmit(event) {
alert('Password reset succesfully: ' + this.state.value);
event.preventDefault();
}
render() {
return (
<div id="box-contactus">
<h3>Get in touch with us.</h3>
<p>
Leave a comment or query on your experience with this
platform and may be a suggestion on how to improve it.
</p>
<form>
<div>
<label>Your name:
<input type="text" name="name" placeholder="Your name" required />
</label>
<label>Input your phone number or email address:
<input type="number" min="02000000" name="user_name" placeholder="Phone number or email address" required />
</label>
<label>Message:
<input type="textarea" name="message" placeholder="Message" />
</label>
<input class="button" type="submit" name="submit" value="Submit" />
</div>
</form>
<p>
We'll get back to you as soon as possible.
</p>
<p>
Do you want to <Link to="report">report a case</Link> of abuse of rights?
</p>
</div>
);
}
};
export default ContactUs |
export function loadImages() {
return fetch("/api/images")
.then(res => res.json())
.catch(err => reject(err));
}
export function loadImage(id) {
return fetch("/api/images/" + id)
.then(res => res.json())
.catch(err => reject(err));
}
|
import re
def countFontFamilies(css):
font_families = {}
pattern = r'font-family:\s*([^;]+);'
matches = re.findall(pattern, css)
for match in matches:
families = [f.strip().strip('"') for f in match.split(',')]
for family in families:
font_families[family] = font_families.get(family, 0) + 1
return font_families |
#!/bin/bash
dialog --title "Powercord Setup" --infobox "Welcome. \n\nIf this is your first time running this script, download the Powercord repo by selecting option 1 first.\n\nAfterwards, select a function by typing the number that co-relates with the specific action you want to do." 10 70;sleep 5
clear
cd
PS3='Powercord Setup: '
options=("Git Clone" "Update" "Install" "Uninstall" "Quit")
select choice in "${options[@]}"; do
case $choice in
"Git Clone")
git clone https://github.com/powercord-org/powercord
# Downloads latest version of Powercord
;;
"Update")
cd powercord && git pull
cd
# Updates Powercord to latest version
;;
"Install")
cd powercord && sudo npm run plug
cd
# Injects mod into Canary
;;
"Uninstall")
cd powercord && sudo npm run unplug
cd
# Uninjects mod
;;
"Quit")
echo "User requested exit"
exit
;;
*) echo "invalid option $REPLY";;
esac
done
|
<html>
<head>
<title>Country Flags</title>
<style>
table, th, td {
border: 1px solid black;
}
</style>
</head>
<body>
<h1>Country Flags</h1>
<table>
<tr>
<th>Country</th>
<th>Flag</th>
</tr>
<tr>
<td>USA</td>
<td>🇺🇸</td>
</tr>
<tr>
<td>India</td>
<td>🇮🇳</td>
</tr>
<tr>
<td>UK</td>
<td>🇬🇧</td>
</tr>
<tr>
<td>France</td>
<td>🇫🇷</td>
</tr>
<tr>
<td>Japan</td>
<td>🇯🇵</td>
</tr>
</table>
</body>
</html> |
default['sickle']['version'] = 'master'
default['sickle']['install_dir'] = '/usr/local/' + 'sickle'
default['sickle']['src_repo'] = 'https://github.com/najoshi/sickle'
default['sickle']['bin_path'] = '/usr/local/bin'
|
#!/bin/bash
##Rum LMBench test suite
TEST_ROOT=$(readlink -f `dirname $0`)
source ${TEST_ROOT}/scripts/utility.sh
#getopts
while getopts hbc: arg
do
case $arg in
b) LMBENCH_BIN_PATH=$OPTARG;;
c) full_percent=$OPTARG;;
h) usage; exit;;
*) echo "Invalid option: $arg"; usage; exit;;
esac
done
#verify the count number
invalid_char=`echo $full_percent | sed 's/[0-9]//g'`
if test X"$invalid_char" != "X"; then
usage;exit
fi
#if not set the full_percent in the command line, use the default 5
if test X"$full_percent" == "X"; then
full_percent=5
fi
## root privilege is required to drop memory caches for some cases.
[ "$UID" -ne 0 ] && { echo "Root privilege is required."; exit 1; }
#### ENV setup ####
#Where is lmbench binaries? Default path is /usr/bin
LMBENCH_BIN_PATH=${LMBENCH_BIN_PATH:-/usr/bin}
[ -d "$LMBENCH_BIN_PATH" ] || { echo "No such directory: ${LMBENCH_BIN_PATH}"; usage; exit 1; }
export PATH=${LMBENCH_BIN_PATH}:${PATH}
#Specify directory to save test logs, default is ${TEST_ROOT}/logs/xxx
LOGDIR="${TEST_ROOT}/logs/lmbench-$(date +%Y-%m-%d-%H-%M-%S)"
mkdir -p $LOGDIR || { echo "Error! Can't create log dir: $LOGDIR"; exit 1; }
DATE=$(which date)
## Figure out memory size (in MB size) will be used for testing
#+ Several benchmarks operate on a range of memory.
#+ The bigger the range, the more accurate the results, but larger sizes
#+ take somewhat longer to run the benchmark.
#+ It should be no more than 80% of your physical memory.
#+ prepare_test function will set it to half of the available memory
#+ (but no more than 512MB) if it is kept empty
MB=""
## We need a place to store a ${MB}m Mbyte file as well as create and delete a
#+ large number of small files. We default to /usr/tmp. If /usr/tmp is a
#+ memory resident file system (i.e., tmpfs), pick a different place.
#+ Please specify a directory that has enough space and is a local file
#+ system.
FSDIR=""
##Get system info
get_sysinfo
##Prepare temp files
prepare_test
#############
##Run tests ...
#############
##==fs test cases==
echo "get fs create/delete latency"
echo "10k file size"
done_percent=0
current_percent=0
log_file="$LOGDIR/10k_file_cd"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_fs -s 10k 2>&1 | awk '{print 1000000/$3 " " 1000000/$4}' >> $log_file 2>&1
done
echo "100k file size"
done_percent=0
current_percent=0
log_file="$LOGDIR/100k_file_cd"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_fs -s 100k 2>&1 | awk '{print 1000000/$3 " " 1000000/$4}' >> $log_file 2>&1;
done
#==Process creation===
echo "Process creation (us)"
echo "Process fork+exit"
done_percent=0
current_percent=0
log_file="$LOGDIR/proc_fork_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_proc fork >> $log_file 2>&1
done
echo "fork+execve"
done_percent=0
current_percent=0
log_file="$LOGDIR/proc_exec_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_proc exec >> $log_file 2>&1
done
echo "fork+/bin/sh"
done_percent=0
current_percent=0
log_file="$LOGDIR/proc_shell_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_proc shell >> $log_file 2>&1
done
##==pipe/unix latency/bandwidth==
## Pipe latency
echo "Pipe latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/pipe_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_pipe >> $log_file 2>&1
done
## AF_UNIX latency
echo "AF_UNIX sock stream latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/unix_sock_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_unix >> $log_file 2>&1
done
## Pipe Bandwidth
echo "Bandwidth pipe -P 1 -M 100m"
done_percent=0
current_percent=0
log_file="$LOGDIR/pipe_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_pipe -P 1 -M 100m >> $log_file 2>&1
done
echo "Bandwidth unix -P 1 -M 100m"
done_percent=0
current_percent=0
log_file="$LOGDIR/unix_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_unix -P 1 -M 100m >> $log_file 2>&1
done
#==context switch test==
echo "ctx latency -P 1 -s 64k 8"
done_percent=0
current_percent=0
log_file="$LOGDIR/ctx_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_ctx -P 1 -s 64k 8 >> $log_file 2>&1
done
#==signal tests==
echo "singnal handler "
SYNC_MAX=1
log_file="$LOGDIR/sig_hand_latency"
#case 1 of 3
echo "signal install"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_sig -P $SYNC_MAX install >> $log_file 2>&1
done
#case 2 of 3
echo "signal catch"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_sig -P $SYNC_MAX catch >> $log_file 2>&1
done
#case 3 of 3
echo "protection fault"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_sig -P $SYNC_MAX prot ${LMBENCH_BIN_PATH}/lat_sig >> $log_file 2>&1
done
#==syscall tests==
echo "System call overhead"
SYNC_MAX=1
log_file="$LOGDIR/syscall_overhead"
#case 1 of 6
echo "syscall: null"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX null >> $log_file 2>&1
done
#case 2 of 6
echo "syscall: read"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX read >> $log_file 2>&1
done
#case 3 of 6
echo "syscall: write"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX write >> $log_file 2>&1
done
#case 4 of 6
echo "syscall: stat"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX stat $STAT >> $log_file 2>&1
done
#case 5 of 6
echo "syscall: fstat"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX fstat $STAT >> $log_file 2>&1
done
#case 6 of 6
echo "syscall: open/close"
done_percent=0
current_percent=0
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_syscall -P $SYNC_MAX open $STAT >> $log_file 2>&1
done
##==select latency
echo "select latency"
SYNC_MAX=1
FDS=100
#case 1 of 2
echo "select: file"
done_percent=0
current_percent=0
log_file="$LOGDIR/select_file_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_select -n $FDS -P $SYNC_MAX file >> $log_file 2>&1
done
#case 2 of 2
echo "select: tcp"
done_percent=0
current_percent=0
log_file="$LOGDIR/select_tcp_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_select -n $FDS -P $SYNC_MAX tcp >> $log_file 2>&1
done
##==tcp/udp tests==
echo "tcp bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/tcp_bandwidth"
bw_tcp -s
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_tcp -m 10m localhost >> $log_file 2>&1
done
bw_tcp -S localhost
echo "tcp latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/tcp_latency"
lat_tcp -s
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_tcp -m 10k localhost >> $log_file 2>&1
done
lat_tcp -S localhost
echo "udp latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/udp_latency"
lat_udp -s
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_udp -m 10k localhost >> $log_file 2>&1
done
lat_udp -S localhost
echo "tcp connection latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/tcp_connect_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_connect -s
lat_connect localhost >> $log_file 2>&1
lat_connect -S localhost
[ $? -ne 0 ] && {
sleep 60
current_percent=$(($current_percent - 1))
lat_connect -S localhost
}
done
echo "rpc latency"
SYNC_MAX=1
done_percent=0
current_percent=0
log_file="$LOGDIR/rpc_latency"
lat_rpc -s
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_rpc -P $SYNC_MAX -p udp localhost >> $log_file 2>&1
lat_rpc -P $SYNC_MAX -p tcp localhost >> $log_file 2>&1
done
lat_rpc -S localhost
##== Arithmetic latency ==
##i.e. bit add mul div mod ...
echo "Arithmetic operations latency"
#case 1 of 2
echo "ops latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/ops_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_ops >> $log_file 2>&1
done
#case 2 of 2
#echo "parallel ops latency"
#full_percent=20
#done_percent=0
#current_percent=0
#log_file="$LOGDIR/par_ops_latency"
#while :
#do
# echo_process
# par_ops >> $log_file 2>&1
#done
##== Pagefaults latency ==
echo "Pagefaults latency"
SYNC_MAX=1
done_percent=0
current_percent=0
log_file="$LOGDIR/pagefaults_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_pagefault -P $SYNC_MAX $FILE >> $log_file 2>&1
drop_caches
done
##==Memory related test cases==
echo "bcopy libc"
SYNC_MAX=1
HALF=`expr $MB / 2`
done_percent=0
current_percent=0
log_file="$LOGDIR/memory_bcopy_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_mem -P $SYNC_MAX ${HALF}m bcopy >> $log_file 2>&1
done
echo "Mmap latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/mmap_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_mmap -P $SYNC_MAX ${MB}m $FILE >> $log_file 2>&1
drop_caches
done
echo "Mmap read bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/mmap_read_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_mmap_rd -P $SYNC_MAX ${FILE_SIZE}m mmap_only $FILE >> $log_file 2>&1
drop_caches
done
echo "Mmap read open2close bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/mmap_open2close_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_mmap_rd -P $SYNC_MAX ${FILE_SIZE}m open2close $FILE >> $log_file 2>&1
drop_caches
done
echo "Memory read bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/memory_read_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_mem -P $SYNC_MAX ${MB}m frd >> $log_file 2>&1
done
echo "Memory write bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/memory_write_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_mem -P $SYNC_MAX ${MB}m fwr >> $log_file 2>&1
done
echo "Memory load latency"
done_percent=0
current_percent=0
log_file="$LOGDIR/memory_load_latency"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
lat_mem_rd -P $SYNC_MAX ${MB}m 128 >> $log_file 2>&1
done
##== File bandwidth==
echo "File read bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/file_read_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_file_rd -P $SYNC_MAX ${FILE_SIZE}m io_only $FILE >> $log_file 2>&1
drop_caches
done
echo "File read open2close bandwidth"
done_percent=0
current_percent=0
log_file="$LOGDIR/file_open2close_bandwidth"
while :
do
echo_process
if [ $? -eq 1 ]
then
break
fi
bw_file_rd -P $SYNC_MAX ${FILE_SIZE}m open2close $FILE >> $log_file 2>&1
drop_caches
done
##Test end
##Clean up
clean_up
|
#!/bin/sh
render_templates() {
pwd
cd ..
git clone https://github.com/bcbio/bcbio_rnaseq_output_example.git
cd bcbio_rnaseq_output_example
Rscript -e 'devtools::install_local("../bcbioRNASeq")'
Rscript -e 'testthat::test_file("test_reports.R")'
cd report
mv de.html de-${TRAVIS_BRANCH}.html
mv qc.html qc-${TRAVIS_BRANCH}.html
mv fa.html fa-${TRAVIS_BRANCH}.html
cd ..
}
setup_git() {
git config --global user.email "travis@travis-ci.org"
git config --global user.name "Travis CI"
}
commit_website_files() {
git fetch origin gh-pages
git checkout gh-pages
git pull
cp report/*.html .
git add *.html
git commit --message "Travis build: $TRAVIS_BUILD_NUMBER"
}
upload_files() {
git remote add origin-pages https://${GITHUB_TOKE}@github.com/bcbio/bcbio_rnaseq_output_example.git > /dev/null 2>&1
git push --force --quiet --set-upstream origin-pages gh-pages
}
render_templates
setup_git
commit_website_files
upload_files
|
#!/bin/bash
experimentName="baselines"
pyName="run_pybullet.py"
cd ../../$experimentName/acktr/
for i in {0..5}
do
( python $pyName --env InvertedDoublePendulumBulletEnv-v0 --seed $i &> InvertedDoublePendulum_"$i".out)
echo "Complete the process $i"
done |
export const state = () => ({
sidebar: false,
nsfw: false,
user: false,
extended: false,
username: null,
token: null,
show: false
})
export const mutations = {
show (state, show) {
state.show = show
},
toggleSidebar (state) {
state.sidebar = !state.sidebar
},
toggleNsfw (state) {
state.nsfw = !state.nsfw
},
username (state, username) {
state.username = username
},
token (state, token) {
state.token = token
},
user (state, user) {
state.user = user
},
extended (state, extended) {
state.extended = extended
}
}
export const actions = {
async nuxtServerInit ({
commit
}, {
req
}) {
console.log('server init')
}
}
|
<reponame>atharrison/ruby-adventofcode2019
class Day01
def initialize
@data = Array.new
end
def load
f = File.open("data/day01/day01_input.txt")
while line = f.gets do
if line != nil
@data << line.to_i
end
end
end
def run_part1
total = 0
for d in @data do
total += d / 3
total -= 2
end
puts "Total Part 1: #{total}"
end
def run_part2
total = 0
for d in @data do
module_total = 0
fuel_part = d / 3 - 2
while fuel_part > 0
module_total += fuel_part
fuel_part = fuel_part / 3 - 2
end
total += module_total
end
puts "Total Part 2: #{total}"
end
end |
#! python3
# getOpenWeather.py - Prints the weather for a location from the command line.
# https://openweathermap.org/current
APPID = 'Replace me with your APPID'
import json, requests, sys
from pprint import pprint
from datetime import datetime, timedelta
# Compute location from command line arguments.
if len(sys.argv) < 2:
print('Usage: getOpenWeather.py city_name')
sys.exit()
location = ' '.join(sys.argv[1:])
# Download the JSON data from OpenWeatherMap.org's API.
url = f'https://api.openweathermap.org/data/2.5/weather?q={location}&appid={APPID}'
response = requests.get(url)
try:
response.raise_for_status()
except requests.exceptions.HTTPError:
print('City not found')
sys.exit()
# Uncomment to see the raw JSON text:
#pprint(response.text)
# Load JSON data into a Python variable.
weatherData = json.loads(response.text)
# Print weather descriptions.
w = weatherData['weather']
m = weatherData['main']
utcOffset = timedelta(seconds=weatherData['timezone'])
utcTime = datetime.utcnow()
localTime = utcTime + utcOffset
print('Current weather in %s, %s:' % (location, weatherData['sys']['country']))
print(w[0]['main'], '-', w[0]['description'])
print('Temperature is ' + str(round(m['temp'] - 273.15, 1)) + '°C')
print('Feels like ' + str(round(m['feels_like'] - 273.15, 1)) + '°C')
print('Humidity is ' + str(round(m['humidity'])) + '%')
print(f"Time at {location}: " + localTime.strftime("%B %d, %H:%M"))
|
#!/bin/bash
set -e
set -x
git config --global user.email "alice+travis@gothcandy.com"
git config --global user.name "Travis: Marrow"
pip install --upgrade setuptools pytest
pip install tox
pip install python-coveralls
pip install pytest-cov
pip install pytest-flakes
|
#python code/nbt_768.py woz config/woz_stat_update_bert.cfg
python -m pudb code/nbt_768.py woz config/woz_stat_update_bert.cfg |
<reponame>tpetillon/roofmapper-client
'use strict';
var $ = require('expose?$!expose?jQuery!jquery');
var L = require('leaflet');
var keyboardJS = require('keyboardjs');
var defined = require('./defined');
var OsmApi = require('./osmapi.js');
var Session = require('./session.js');
var Building = require('./building.js');
var Session = require('./session.js');
var BuildingService = require('./buildingservice.js');
var LoadingStatus = require('./loadingstatus.js');
var Localizer = require('./localizer.js');
var enMessages = require('./messages/en.json');
var frMessages = require('./messages/fr.json');
require("jquery-fullscreen-plugin");
require('leaflet-bing-layer');
require('leaflet-easybutton');
require('leaflet/dist/leaflet.css');
require('leaflet/dist/images/marker-icon.png');
require('leaflet/dist/images/marker-icon-2x.png');
require('leaflet/dist/images/marker-shadow.png');
require('leaflet-easybutton/src/easy-button.css');
require('./style.css');
require('bootstrap');
require("bootstrap-webpack");
require("font-awesome-webpack");
// since leaflet is bundled into the browserify package it won't be able to detect where the images
// solution is to point it to where you host the the leaflet images yourself
L.Icon.Default.imagePath = 'http://cdn.leafletjs.com/leaflet-0.7.3/images';
$("body").append(require('html!./main.html'));
$("body").append(require('html!./helppopup.html'));
$("body").append(require('html!./aboutpopup.html'));
$("body").append(require('html!./messagepopup.html'));
$("body").append(require('html!./roofmaterialpopup.html'));
$("body").append(require('html!./invaliditypopup.html'));
var roofMaterialL10nKeys = {
"roof_tiles" : "tiles",
"slate" : "slate",
"metal" : "metal",
"copper" : "copper",
"concrete" : "concrete",
"glass" : "glass",
"tar_paper" : "tar-paper",
"eternit" : "eternit",
"gravel" : "gravel",
"grass" : "grass",
"plants" : "plants",
"stone" : "stone",
"thatch" : "thatch"
};
var invalidityReasonL10nKeys = {
"mark_as_invalid" : "mark-as-invalid",
"multiple_materials" : "multiple-materials",
"multiple_buildings" : "multiple-buildings",
"building_fraction" : "building-fraction",
"not_a_building" : "not-a-building",
};
var _localizer = new Localizer(document, [ enMessages, frMessages ]);
var _map = undefined;
var _recenterButton = undefined;
var _outlineButton = undefined;
var _buildingPolygon = undefined;
var _api = new OsmApi();
var _session = new Session();
var _loadingStatus = new LoadingStatus();
function logout() {
if (_session.open) {
BuildingService.closeSession(_session.id);
_session.id = undefined;
}
_api.logout();
console.log("logged out");
updateConnectionStatusDisplay();
updateUi();
}
function confirmQuit(evt) {
var dialogText = _localizer.getText("close-roofmapper-confirm");
evt.returnValue = dialogText;
return dialogText;
}
function updateUi() {
var loading = _loadingStatus.isLoading;
$("#authenticate").prop('disabled', loading || _api.authenticated);
$("#logout").prop('disabled', loading || !_api.authenticated)
$("#building-buttons").find("button").prop('disabled', loading);
if (_session.currentIndex <= 0) {
$("#previous-building").prop('disabled', true);
}
if (!_session.open ||
(_session.currentIndex == _session.buildingCount - 1 && (_session.full || _session.changesetIsFull))) {
$("#next-building").prop('disabled', true);
}
if (defined(_session.currentBuilding)) {
_recenterButton.enable();
_outlineButton.enable();
} else {
_recenterButton.disable();
_outlineButton.disable();
}
$(".tag-buttons").find("input").prop('disabled', loading || _session.currentIndex < 0);
$("#invalidity-buttons").find("input").prop('disabled', loading || _session.currentIndex < 0);
$("#upload-changes").prop(
"disabled",
loading || !_session.open || (_session.taggedBuildingCount <= 0 && _session.invalidatedBuildingCount <= 0));
$(".tag-buttons").find("input")
.prop("checked", false)
.parent().removeClass("active");
$("#invalidity-buttons").find("input")
.prop("checked", false)
.parent().removeClass("active");
$("#tag-other-detail-span").hide();
$("#tag-invalid-detail-span").hide();
if (!loading && defined(_session.currentBuilding)) {
var invalidityReason = _session.currentBuilding.invalidityReason;
if (defined(invalidityReason)) {
$("#tag-invalid")
.prop("checked", true)
.parent().addClass("active");
$("#invalidity-" + invalidityReason)
.prop("checked", true)
.parent().addClass("active");
$("#tag-invalid-detail-text").attr("l10n", invalidityReasonL10nKeys[invalidityReason]);
$("#tag-invalid-detail-span").show();
} else {
var roofMaterial = _session.currentBuilding.roofMaterial;
$("#tag-" + roofMaterial)
.prop("checked", true)
.parent().addClass("active");
if (roofMaterial === "glass" || roofMaterial === "grass" ||
roofMaterial === "plants" || roofMaterial === "stone" ||
roofMaterial === "tar_paper" || roofMaterial === "thatch") {
$("#tag-other")
.prop("checked", true)
.parent().addClass("active");
$("#tag-other-detail-text").attr("l10n", roofMaterialL10nKeys[roofMaterial]);
$("#tag-other-detail-span").show();
}
}
$(".tag-buttons").find("input")
.parent().removeClass("disabled");
} else {
$(".tag-buttons").find("input")
.parent().addClass("disabled");
}
$("#tagged-building-count")
.attr('l10n', 'n-buildings-tagged')
.attr('l10n-params', JSON.stringify({ count: _session.taggedBuildingCount }));
$("#uploaded-building-count")
.attr('l10n', 'n-buildings-uploaded')
.attr('l10n-params', JSON.stringify({ count: _session.uploadedBuildingCount }));
if (_session.taggedBuildingCount > 0 || _session.invalidatedBuildingCount > 0) {
window.addEventListener('beforeunload', confirmQuit);
} else {
window.removeEventListener('beforeunload', confirmQuit)
}
}
function updateConnectionStatusDisplay() {
if (_api.authenticated) {
$("#authenticate-button").hide();
$("#user-menu").show();
$("#username").removeAttr("l10n").text(_api.username);
$("#user-profile-link").attr("href", _api.url + "/user/" + _api.username);
} else {
$("#authenticate-button").show();
$("#user-menu").hide();
$("#username").attr("l10n", "username");
}
}
if (_api.authenticated) {
$("#connection-status").text("Authenticated, retrieving username...");
_loadingStatus.addSystem('connection');
_api.connect(function(error) {
_loadingStatus.removeSystem('connection');
if (defined(error)) {
showMessage("could-not-connect", error.responseText);
} else {
console.log("connected as " + _api.username + " (" + _api.userId + ")");
openSession();
}
updateConnectionStatusDisplay();
updateUi();
});
}
document.getElementById('authenticate-button').onclick = function() {
_loadingStatus.addSystem('authentication');
_api.authenticate(function(error) {
_loadingStatus.removeSystem('authentication');
if (defined(error)) {
showMessage("could-not-authenticate", error.responseText);
} else {
console.log("connected as " + _api.username + " (" + _api.userId + ")");
openSession();
}
updateConnectionStatusDisplay();
updateUi();
});
};
document.getElementById('logout-button').onclick = logout;
function openSession() {
if (!_api.authenticated) {
return;
}
if (_session.open) {
return;
}
_loadingStatus.addSystem('open-session');
BuildingService.openSession(_api.userId, function(error, sessionId) {
_loadingStatus.removeSystem('open-session');
if (defined(error)) {
console.log("could not open session: " + error.responseText);
showMessage("could-not-open-session", error.responseText);
} else {
console.log("session " + sessionId + " opened");
_session.id = sessionId;
displayNextBuilding();
}
updateUi();
});
}
function destroyBuildingPolygon() {
if (defined(_buildingPolygon)) {
_map.removeLayer(_buildingPolygon);
_buildingPolygon = undefined;
}
}
function displayBuildingPolygon(building) {
destroyBuildingPolygon();
if (!defined(building)) {
return;
}
building.polygon.setStyle({
weight : 2,
color : '#FFFF00',
opacity : 0.8,
fill : false,
dashArray : "5,5",
clickable : false
});
building.polygon.addTo(_map);
_map.fitBounds(building.polygon.getBounds());
_buildingPolygon = building.polygon;
}
function loadAndDisplayNewBuilding() {
if (_session.full || _session.changesetIsFull) {
return;
}
_loadingStatus.addSystem('load-building');
BuildingService.getBuilding(_session.id, function(error, building) {
if (defined(error)) {
console.error("Could not get building from building service: " + error.responseText);
showMessage("could-not-get-building-from-building-service", error.responseText);
} else {
_api.request('/api/0.6/' + building.type + '/' + building.id + '/full', 'GET', function(error, response) {
if (defined(error)) {
console.error("Download error: " + error.responseText);
showMessage("download-error", error.responseText);
_loadingStatus.removeSystem('load-building');
} else {
var $data = $(response);
var version = Number($data.children("osm").children(building.type).attr("version"));
if (version !== building.version) {
console.log("Building " + building.type + "/" + building.id + " is at version " + version +
", was expecting version " + building.version + ". Skipping.");
_loadingStatus.removeSystem('load-building');
invalidateBuilding(building.type, building.id, 'outdated');
loadAndDisplayNewBuilding();
} else {
building.setData($data);
if (defined(building.roofMaterial)) {
console.log("Building " + building.type + "/" + building.id +
" already has its roof material defined. Skipping.");
_loadingStatus.removeSystem('load-building');
loadAndDisplayNewBuilding();
} else {
console.log("Displaying building " + building.type + "/" + building.id);
_session.addBuilding(building, true);
displayBuildingPolygon(building);
updateUi();
_loadingStatus.removeSystem('load-building');
}
}
}
});
}
});
}
function displayPreviousBuilding() {
if (_session.currentIndex > 0) {
_session.currentIndex = _session.currentIndex - 1;
var building = _session.getCurrentBuilding();
displayBuildingPolygon(building);
updateUi();
}
}
function displayNextBuilding() {
if (_session.currentIndex < _session.buildingCount - 1) {
_session.currentIndex = _session.currentIndex + 1;
var building = _session.getCurrentBuilding();
displayBuildingPolygon(building);
updateUi();
} else {
loadAndDisplayNewBuilding();
}
}
document.getElementById('previous-building').onclick = displayPreviousBuilding;
document.getElementById('next-building').onclick = displayNextBuilding;
function createChangeset(callback) {
var changesetData =
'<osm>' +
'<changeset>' +
'<tag k="created_by" v="RoofMapper ' + ROOFMAPPER_VERSION + '"/>' +
'<tag k="comment" v="Add building roof:material data from imagery"/>' +
'<tag k="source" v="Bing"/>' +
'</changeset>' +
'</osm>';
_loadingStatus.addSystem('changeset-creation');
_api.requestWithData('/api/0.6/changeset/create', 'PUT', changesetData, function(error, response) {
if (defined(error)) {
console.error("Changeset creation error: " + error.responseText);
showMessage("changeset-creation-error", error.responseText);
_loadingStatus.removeSystem('changeset-creation');
} else {
var changesetId = Number(response);
console.log("Changeset " + changesetId + " created");
_session.changesetId = changesetId;
_loadingStatus.removeSystem('changeset-creation');
if (callback) {
callback();
}
}
});
}
function uploadTags(callback) {
if (_session.taggedBuildingCount === 0) {
if (callback) {
callback();
}
return;
}
if (!defined(_session.changesetId)) {
createChangeset(function () { uploadTags(callback); });
return;
}
_loadingStatus.addSystem('changes-upload');
var changeData = _session.toOsmChange();
var url = '/api/0.6/changeset/' + _session.changesetId + '/upload';
_api.requestWithData(url, 'POST', changeData, function(error, response) {
if (defined(error)) {
console.error("Changes upload error: " + error.responseText);
if (error.statusText === "Conflict") {
if (error.responseText.match(/was closed/)) {
// The changeset #id was closed at #closed_at.
console.log("The changeset " + _session.changesetId +
" has been closed, creating a new one.");
createChangeset(function() { uploadTags(callback); });
} else if (error.responseText.match(/Version mismatch/)) {
// Version mismatch: Provided #ver_client, server had: #ver_serv of [Way|Relation] #id
removeBuildingInConflict(error.responseText);
_loadingStatus.removeSystem('changes-upload');
}
}
} else {
console.log("Changes uploaded to OSM server");
BuildingService.tagBuildings(_session.id, _session.toTagData(), function(error) {
_loadingStatus.removeSystem('changes-upload');
if (defined(error)) {
console.error("Could not upload tags to building service: " + error.responseText);
} else {
console.log("Tags uploaded to building service");
}
_session.clearTaggedBuildings();
destroyBuildingPolygon();
if (callback) {
callback();
}
});
}
});
}
function uploadInvalidationData(callback) {
if (_session.invalidatedBuildingCount === 0) {
if (defined(callback)) {
callback();
}
return;
}
_loadingStatus.addSystem('invalidation-data-upload');
BuildingService.invalidateBuildings(_session.id, _session.toInvalidationData(), function(error) {
_loadingStatus.removeSystem('invalidation-data-upload');
if (defined(error)) {
console.error("Could not upload invalidity reasons to building service: " + error.responseText);
} else {
console.log("Invalidity reasons uploaded to building service");
showMessage("changes-uploaded-to-osm");
}
_session.clearInvalidatedBuildings();
destroyBuildingPolygon();
if (defined(callback)) {
callback();
}
});
}
function uploadChanges() {
if (_session.taggedBuildingCount > 0) {
uploadTags(function() {
uploadInvalidationData(loadAndDisplayNewBuilding);
})
} else {
uploadInvalidationData(loadAndDisplayNewBuilding);
}
}
function removeBuildingInConflict(errorString) {
var matches = errorString.match(/(Way|Relation) (\d+)/);
if (matches == null) {
return;
}
var type = matches[1].toLowerCase();
var id = Number(matches[2]);
console.log("Removing building " + type + "/" + id + " from session");
_session.removeBuilding(type, id);
if (_session.currentIndex >= 0 && _session.currentIndex < _session.buildingCount) {
var building = _session.getCurrentBuilding();
displayBuildingPolygon(building);
updateUi();
} else {
loadAndDisplayNewBuilding();
}
invalidateBuilding(type, id, 'outdated');
}
function invalidateBuilding(buildingType, buildingId, reason, callback) {
var invalidationData = [
{
type : buildingType,
id : buildingId,
invalidation_reason : reason
}
];
BuildingService.invalidate(_session.id, invalidationData, function() {
console.log("building " + buildingType + "/" + buildingId + " invalidated because of \"" + reason + "\"");
});
}
function releaseBuilding(buildingType, buildingId, callback) {
BuildingService.releaseBuilding(_session.id, buildingType, buildingId, function() {
_session.removeBuilding(buildingType, buildingId);
console.log("building " + buildingType + "/" + buildingId + " released");
if (defined(callback)) {
callback();
}
});
}
document.getElementById('upload-changes').onclick = function() {
uploadChanges();
};
function showMessage(messageKey) {
var parameters = [];
for (var i = 1; i < arguments.length; i++) {
parameters.push(arguments[i]);
}
var message = _localizer.getText(messageKey, parameters);
$("#message-popup").find('#message').text(message);
$("#message-popup").modal('show');
}
function recenterMapOnBuilding() {
if (defined(_buildingPolygon)) {
_map.fitBounds(_buildingPolygon.getBounds());
}
}
function toggleBuildingOutline() {
if (defined(_buildingPolygon)) {
if (_map.hasLayer(_buildingPolygon)) {
_map.removeLayer(_buildingPolygon);
} else {
_map.addLayer(_buildingPolygon);
}
}
}
function addKeyboardShortcut(key, conditions, action) {
keyboardJS.bind(key, function(e) {
for (var i = 0; i < conditions.length; i++) {
if (!conditions[i]()) {
return true;
}
}
action(e);
return false;
});
}
function init() {
console.log('RoofMapper ' + ROOFMAPPER_VERSION + ', server: "' + OSM_SERVER_URL + '", auth method: ' + OSM_AUTH_METHOD);
$('#roofmapper-version').text(ROOFMAPPER_VERSION);
_map = L.map('map');
var bingKey = '<KEY>';
L.tileLayer.bing(bingKey).addTo(_map);
_map.setView([46.935, 2.780], 7);
_recenterButton = L.easyButton(
'fa-crosshairs fa-lg',
recenterMapOnBuilding,
'', // title
'recenter-button' // id
);
_recenterButton.addTo(_map);
_recenterButton.disable();
_outlineButton = L.easyButton(
'fa-square-o fa-lg',
toggleBuildingOutline,
'', // title
'outline-button' // id
);
_outlineButton.addTo(_map);
_outlineButton.disable();
// Set the title here and not in the button constructor because when set by
// the constructor, the title is only displayable when the button is active.
// With this alternative way its always displayable.
$("#recenter-button").closest(".leaflet-control").attr("l10n-attr-title", "recenter-on-building");
$("#outline-button").closest(".leaflet-control").attr("l10n-attr-title", "toggle-building-outline");
_loadingStatus.addListener(updateUi);
$('input[name=tag-selection]').change(function() {
if (defined(_session.currentBuilding)) {
if (this.value === 'invalid') {
$("#invalidity-popup").modal('show');
} else if (this.value === 'other') {
$("#roof-material-popup").modal('show');
} else if (this.checked) {
var value = this.value === 'undefined' ? undefined : this.value;
_session.setBuildingRoofMaterial(_session.currentBuilding, value);
$("#roof-material-popup").modal('hide');
}
updateUi();
}
});
$('input[type=radio][name=invalidity-selection]').change(function() {
if (defined(_session.currentBuilding)) {
_session.setBuildingInvalidityReason(_session.currentBuilding, this.value);
updateUi();
}
$("#invalidity-popup").modal('hide');
});
var sessionOpened = function() { return _session.open; };
var isNotLoading = function() { return !_loadingStatus.isLoading; };
var buildingDisplayed = function() { return defined(_session.currentBuilding); };
var isNotAtFirstBuilding = function() { return _session.currentIndex > 0; };
var nextBuildingIsAvailable = function() { return !(_session.currentIndex == _session.buildingCount - 1 && (_session.full || _session.changesetIsFull)); };
var roofMaterialPopupIsShown = function() { return $('#roof-material-popup').hasClass('in') };
var roofMaterialPopupIsHidden = function() { return !$('#roof-material-popup').hasClass('in') };
var invalidityPopupIsShown = function() { return $('#invalidity-popup').hasClass('in') };
var invalidityPopupIsHidden = function() { return !$('#invalidity-popup').hasClass('in') };
addKeyboardShortcut('backspace', [ isNotLoading, isNotAtFirstBuilding ], displayPreviousBuilding);
addKeyboardShortcut('space', [ isNotLoading, sessionOpened, nextBuildingIsAvailable ], displayNextBuilding);
addKeyboardShortcut('c', [ buildingDisplayed ], recenterMapOnBuilding);
addKeyboardShortcut('b', [ buildingDisplayed ], toggleBuildingOutline);
var addRoofMaterialKeyboardShortcut = function(key, material) {
addKeyboardShortcut(
key,
[ isNotLoading, buildingDisplayed, roofMaterialPopupIsHidden, invalidityPopupIsHidden ],
function() { $("#tag-" + material).prop("checked", true).trigger('change'); });
};
addRoofMaterialKeyboardShortcut('numzero', 'undefined');
addRoofMaterialKeyboardShortcut('numone', 'roof_tiles');
addRoofMaterialKeyboardShortcut('numtwo', 'slate');
addRoofMaterialKeyboardShortcut('numthree', 'metal');
addRoofMaterialKeyboardShortcut('numfour', 'copper');
addRoofMaterialKeyboardShortcut('numfive', 'concrete');
addRoofMaterialKeyboardShortcut('numsix', 'eternit');
addRoofMaterialKeyboardShortcut('numseven', 'gravel');
addKeyboardShortcut(
'numeight',
[ isNotLoading, buildingDisplayed, roofMaterialPopupIsHidden, invalidityPopupIsHidden ],
function() { $("#roof-material-popup").modal('show'); }
);
addKeyboardShortcut(
'numnine',
[ isNotLoading, buildingDisplayed, roofMaterialPopupIsHidden, invalidityPopupIsHidden ],
function() { $("#invalidity-popup").modal('show'); }
);
addKeyboardShortcut(
'numzero',
[ isNotLoading, buildingDisplayed, roofMaterialPopupIsShown ],
function() { $("#roof-material-popup").modal('hide') });
var addAdditionalRoofMaterialKeyboardShortcut = function(key, invalidityReason) {
addKeyboardShortcut(
key,
[ isNotLoading, buildingDisplayed, roofMaterialPopupIsShown ],
function() { $("#tag-" + invalidityReason).prop("checked", true).trigger('change'); });
};
addAdditionalRoofMaterialKeyboardShortcut('numone', 'glass');
addAdditionalRoofMaterialKeyboardShortcut('numtwo', 'grass');
addAdditionalRoofMaterialKeyboardShortcut('numthree', 'plants');
addAdditionalRoofMaterialKeyboardShortcut('numfour', 'stone');
addAdditionalRoofMaterialKeyboardShortcut('numfive', 'tar_paper');
addAdditionalRoofMaterialKeyboardShortcut('numsix', 'thatch');
addKeyboardShortcut(
'numzero',
[ isNotLoading, buildingDisplayed, invalidityPopupIsShown ],
function() { $("#invalidity-popup").modal('hide') });
var addInvalidityKeyboardShortcut = function(key, invalidityReason) {
addKeyboardShortcut(
key,
[ isNotLoading, buildingDisplayed, invalidityPopupIsShown ],
function() { $("#invalidity-" + invalidityReason).prop("checked", true).trigger('change'); });
};
addInvalidityKeyboardShortcut('numone', 'multiple_materials');
addInvalidityKeyboardShortcut('numtwo', 'multiple_buildings');
addInvalidityKeyboardShortcut('numthree', 'building_fraction');
addInvalidityKeyboardShortcut('numfour', 'not_a_building');
updateConnectionStatusDisplay();
updateUi();
// avoid a situation where the map is partially loaded
setTimeout(function() { _map.invalidateSize() }, 1);
$("#fullscreen-button").click(function() {
$(document).toggleFullScreen();
});
$('#language-en-button').click(function() {
_localizer.language = 'en';
})
$('#language-fr-button').click(function() {
_localizer.language = 'fr';
})
if (!_api.authenticated) {
$("#help-popup").modal('show');
}
var toggleHelp = function() {
$("#help-popup").modal('toggle');
};
addKeyboardShortcut('h', [], toggleHelp);
addKeyboardShortcut('questionmark', [], toggleHelp);
// hack for French keyboards, as KeyboardJS only support US layout
addKeyboardShortcut('shift + comma', [], toggleHelp);
}
init(); |
/*
* Copyright 2014-2014 <NAME>
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.chenlichao.wmi4j;
import org.jinterop.dcom.core.JIVariant;
import org.jinterop.dcom.impls.automation.IJIDispatch;
import cn.chenlichao.wmi4j.consts.WbemAuthenticationLevelEnum;
import cn.chenlichao.wmi4j.consts.WbemImpersonationLevelEnum;
/**
* The SWbemSecurity object gets or sets security settings, such as privileges, COM impersonations, and authentication levels
* assigned to an object. The {@link SWbemLocator}, {@link SWbemServices}, {@link SWbemObject},
* {@link SWbemObjectSet}, {@link SWbemObjectPath}, {@link SWbemLastError},
* and {@link SWbemEventSource} objects have a security property, which is the SWbemSecurity object.
* When you retrieve an instance or view the WMI security log, you might need to set the properties of the SWbemSecurity object.
*
* Created by chenlichao on 14-7-23.
*/
public class SWbemSecurity extends AbstractScriptingObject{
SWbemSecurity(IJIDispatch dispatch) {
super(dispatch);
}
/**
* Get the COM Authentication level that is assigned to this object.
* This setting determines how you protect information sent from WMI.
*
* @return The COM Authentication level that is assigned to this object.
* @throws WMIException
*/
public WbemAuthenticationLevelEnum getAuthenticationLevel() throws WMIException {
int value = (Integer)getProperty(Integer.class, "AuthenticationLevel");
return WbemAuthenticationLevelEnum.parse(value);
}
/**
* Set the COM Authentication level that is assigned to this object.
* @param level the COM Authentication level that is assigned to this object.
* @throws WMIException
*/
public void setAuthenticationLevel(WbemAuthenticationLevelEnum level) throws WMIException {
putProperty("AuthenticationLevel", new JIVariant(level.getValue()));
}
/**
* Get the COM impersonation level that is assigned to this object.
* This setting determines if processes owned by Windows Management Instrumentation (WMI)
* can detect or use your security credentials when making calls to other processes.
*
* @return the COM impersonation level that is assigned to this object.
* @throws WMIException
*/
public WbemImpersonationLevelEnum getImpersonationLevel() throws WMIException {
int value = (Integer)getProperty(Integer.class, "ImpersonationLevel");
return WbemImpersonationLevelEnum.parse(value);
}
/**
* Set the COM impersonation level that is assigned to this object.
* @param level the COM impersonation level that is assigned to this object.
* @throws WMIException
*/
public void setImpersonationLevel(WbemImpersonationLevelEnum level) throws WMIException {
putProperty("ImpersonationLevel", new JIVariant(level.getValue()));
}
/**
* The privileges property is an SWbemPrivilegeSet object.
* This property is used to enable or disable specific Windows privileges.
* You may need to set one of these privileges to perform specific tasks using the Windows Management Instrumentation (WMI) API.
*
* <h3>Remark</h3>
* <p>This setting allows you to grant or revoke privileges as part of a WMI moniker string.
* For the complete list of applicable values, see {@link cn.chenlichao.wmi4j.consts.WbemPrivilegeEnum} </p>
*
* <p>You can change the privileges defined for the {@link SWbemServices},
* {@link SWbemObject}, {@link SWbemObjectSet}, {@link SWbemObjectPath},
* and {@link SWbemLocator} objects by adding {@link SWbemPrivilege} objects to the privileges property.</p>
*
* <p>There are fundamental differences in how different versions of Windows handle changes to privileges.
* If you are developing an application that is only used on Windows platforms, you can set or revoke privileges at any time.</p>
*
* @return An SWbemPrivilegeSet object.
* @throws WMIException
*/
public SWbemPrivilegeSet getPrivileges() throws WMIException {
return getProperty(SWbemPrivilegeSet.class, "Privileges");
}
}
|
use std::collections::HashSet;
use std::fs;
use std::path::Path;
fn find_unique_file_extensions(directory_path: &str) -> HashSet<String> {
let mut extensions = HashSet::new();
if let Ok(entries) = fs::read_dir(directory_path) {
for entry in entries {
if let Ok(entry) = entry {
let path = entry.path();
if path.is_file() {
if let Some(ext) = path.extension() {
if let Some(ext_str) = ext.to_str() {
extensions.insert(ext_str.to_lowercase());
}
}
} else if path.is_dir() {
extensions.extend(find_unique_file_extensions(&path.to_string_lossy()));
}
}
}
}
extensions
}
fn main() {
let directory_path = "path_to_directory"; // Replace with the actual directory path
let unique_extensions = find_unique_file_extensions(directory_path);
println!("Unique file extensions: {:?}", unique_extensions);
} |
#!/bin/bash
# UPDATE THE WEBROOT IF REQUIRED.
if [[ ! -z "${WEBROOT}" ]] && [[ ! -z "${WEBROOT_PUBLIC}" ]]; then
sed -i "s#root /var/www/public;#root ${WEBROOT_PUBLIC};#g" /etc/nginx/sites-available/default.conf
else
export WEBROOT=/var/www
export WEBROOT_PUBLIC=/var/www/public
fi
# UPDATE COMPOSER PACKAGES ON BUILD.
## 💡 THIS MAY MAKE THE BUILD SLOWER BECAUSE IT HAS TO FETCH PACKAGES.
if [[ "${COMPOSER_UPDATE_ON_BUILD}" == "1" ]]; then
composer update && composer dump-autoload -o
fi
# RUN LARAVEL MIGRATIONS ON BUILD.
if [[ "${RUN_LARAVEL_MIGRATIONS_ON_BUILD}" == "1" ]]; then
cd ${WEBROOT}
php artisan migrate
fi
# LARAVEL SCHEDULER
if [[ "${RUN_LARAVEL_SCHEDULER}" == "1" ]]; then
echo '* * * * * cd /var/www && php artisan schedule:run >> /dev/null 2>&1' > /etc/crontabs/root
crond
fi
# SYMLINK CONFIGURATION FILES.
ln -s /etc/php7/php.ini /etc/php7/conf.d/php.ini
ln -s /etc/nginx/sites-available/default.conf /etc/nginx/sites-enabled/default.conf
# PRODUCTION LEVEL CONFIGURATION.
if [[ "${PRODUCTION}" == "1" ]]; then
sed -i -e "s/;log_level = notice/log_level = warning/g" /etc/php7/php-fpm.conf
sed -i -e "s/clear_env = no/clear_env = yes/g" /etc/php7/php-fpm.d/www.conf
sed -i -e "s/display_errors = On/display_errors = Off/g" /etc/php7/php.ini
else
sed -i -e "s/;log_level = notice/log_level = notice/g" /etc/php7/php-fpm.conf
sed -i -e "s/;daemonize\s*=\s*yes/daemonize = no/g" /etc/php7/php-fpm.conf
fi
# PHP & SERVER CONFIGURATIONS.
if [[ ! -z "${PHP_MEMORY_LIMIT}" ]]; then
sed -i "s/memory_limit = 128M/memory_limit = ${PHP_MEMORY_LIMIT}M/g" /etc/php7/conf.d/php.ini
fi
if [ ! -z "${PHP_POST_MAX_SIZE}" ]; then
sed -i "s/post_max_size = 50M/post_max_size = ${PHP_POST_MAX_SIZE}M/g" /etc/php7/conf.d/php.ini
fi
if [ ! -z "${PHP_UPLOAD_MAX_FILESIZE}" ]; then
sed -i "s/upload_max_filesize = 10M/upload_max_filesize = ${PHP_UPLOAD_MAX_FILESIZE}M/g" /etc/php7/conf.d/php.ini
fi
find /etc/php7/conf.d/ -name "*.ini" -exec sed -i -re 's/^(\s*)#(.*)/\1;\2/g' {} \;
# START SUPERVISOR.
exec /usr/bin/supervisord -n -c /etc/supervisord.conf
|
<reponame>Fabidione/FabienneDione_6_20072021
const mongoose = require('mongoose');
const uniqueValidator = require('mongoose-unique-validator');
const sanitizerPlugin = require('mongoose-sanitizer-plugin');
const validator = require('validator');
const userSchema = mongoose.Schema({
email: { type: String,
required: [ true, "Please enter your email address"],
unique: true ,
validate: [validator.isEmail, { error: 'Adresse mail non valide' }]
},
password: { type: String,
required: [true, "Please choose a password"]
}
});
userSchema.plugin(uniqueValidator);
userSchema.plugin(sanitizerPlugin);
module.exports = mongoose.model('User', userSchema); |
package cn.stylefeng.roses.kernel.scanner.api.util;
import cn.hutool.core.util.IdUtil;
import cn.hutool.core.util.StrUtil;
import cn.stylefeng.roses.kernel.scanner.api.context.MetadataContext;
import cn.stylefeng.roses.kernel.scanner.api.enums.FieldMetadataTypeEnum;
import cn.stylefeng.roses.kernel.scanner.api.enums.FieldTypeEnum;
import cn.stylefeng.roses.kernel.scanner.api.enums.ParamTypeEnum;
import cn.stylefeng.roses.kernel.scanner.api.pojo.resource.FieldMetadata;
import java.lang.reflect.ParameterizedType;
/**
* 类的元数据描述
*
* @author fengshuonan
* @date 2022/1/14 10:59
*/
public class ClassDescriptionUtil {
/**
* 创建针对类的基础描述
*
* @author fengshuonan
* @date 2022/1/13 18:06
*/
public static FieldMetadata createClassMetadata(Class<?> clazz, FieldTypeEnum fieldTypeEnum, String uuid) {
FieldMetadata fieldMetadataItem = new FieldMetadata();
// 设置唯一id
fieldMetadataItem.setMetadataId(IdUtil.fastSimpleUUID());
// 设置字段中文含义
fieldMetadataItem.setChineseName(clazz.getSimpleName());
// 设置字段类类型
fieldMetadataItem.setFieldClassType(clazz.getSimpleName());
// 设置类的全路径
fieldMetadataItem.setFieldClassPath(clazz.getName());
// 根据uuid获取参数的名称
String paramName = MetadataContext.getParamName(uuid);
if (StrUtil.isNotBlank(paramName)) {
fieldMetadataItem.setFieldName(paramName);
}
// 设置是否带泛型
fieldMetadataItem.setGenericFieldMetadataType(FieldMetadataTypeEnum.FIELD.getCode());
// 设置字段类型,基本、数组、还是object
fieldMetadataItem.setFieldType(fieldTypeEnum.getCode());
// 设置当前context构造的参数类型
ParamTypeEnum paramTypeMetadata = MetadataContext.getParamTypeMetadata(uuid);
if (paramTypeMetadata != null) {
fieldMetadataItem.setRequestParamType(paramTypeMetadata.getCode());
}
// 设置字段
return fieldMetadataItem;
}
/**
* 创建针对类的基础描述
*
* @author fengshuonan
* @date 2022/1/13 18:06
*/
public static FieldMetadata createParameterizedMetadata(ParameterizedType parameterizedType, FieldTypeEnum fieldTypeEnum, String uuid) {
Class<?> rawType = (Class<?>) parameterizedType.getRawType();
return createClassMetadata(rawType, fieldTypeEnum, uuid);
}
}
|
#!/bin/bash
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generates an Eclipse project in hero
set -e
DIR=`dirname $0`
if [ ! -d $DIR/../bazel-genfiles ]; then
echo "Error: Directory $DIR/../bazel-genfiles does not exists."
echo "please buid heron first"
exit 1
fi
# generate .project file
readonly project_file=$DIR/../.project
rm -rf $project_file
cat >> $project_file <<EOH
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>heron</name>
<projects/>
<buildSpec>
<buildCommand>
<name>org.python.pydev.PyDevBuilder</name>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
</buildCommand>
</buildSpec>
<natures>
<nature>org.python.pydev.pythonNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>
EOH
# generate .pydevproject file
readonly pydevproject_file=$DIR/../.pydevproject
rm -rf $pydevproject_file
cat >> $pydevproject_file <<EOH
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?><pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
EOH
function generate_py_source_dirs() {
for pysrcdir in $py_dir_list; do
cat >> $pydevproject_file << EOH
<path>/\${PROJECT_DIR_NAME}/$pysrcdir</path>
EOH
done
}
py_dir_list=`find $DIR/../heron -path "*/src/python" | cut -d '/' -f 4-`
generate_py_source_dirs
py_dir_list=`find $DIR/../heron -path "*/tests/python" | cut -d '/' -f 4-`
generate_py_source_dirs
cat >> $pydevproject_file << 'EOF'
</pydev_pathproperty>
</pydev_project>
EOF
# generate .classpath file
readonly classpath_file=$DIR/../.classpath
rm -rf $classpath_file
cat >> $classpath_file <<EOH
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
EOH
function generate_source_dirs() {
for srcdir in $dir_list; do
cat >> $classpath_file << EOH
<classpathentry kind="src" output="bin/$srcdir" path="$srcdir">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
EOH
done
}
dir_list=`find $DIR/../heron -path "*/src/java" | cut -d '/' -f 4-`
generate_source_dirs
dir_list=`find $DIR/../integration_test -path "*/src/java" | cut -d '/' -f 4-`
generate_source_dirs
dir_list=`find $DIR/../heron -path "*/tests/java" | cut -d '/' -f 4-`
generate_source_dirs
#dir_list=`find $DIR/../heron -path "*/src/python" | cut -d '/' -f 4-`
#generate_source_dirs
#dir_list=`find $DIR/../heron -path "*/tests/python" | cut -d '/' -f 4-`
#generate_source_dirs
for jarfile in `find $DIR/../bazel-genfiles/ -name \*.jar | cut -d '/' -f 4-`; do
cat >> $classpath_file << EOH
<classpathentry kind="lib" path="$jarfile"/>
EOH
done
cat >> $classpath_file << 'EOF'
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
<accessrules>
<accessrule kind="accessible" pattern="com/sun/net/httpserver/**"/>
<accessrule kind="accessible" pattern="com/sun/management/**"/>
</accessrules>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
EOF
|
package io.swagger.api;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.time.OffsetDateTime;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import io.swagger.Auxiliary;
import io.swagger.Blockchain;
import io.swagger.InvalidTicketException;
import io.swagger.annotations.ApiParam;
import io.swagger.configuration.Properties;
import io.swagger.model.Ticket;
import io.swagger.model.ValidateTicketResponse;
import io.swagger.model.ApiRequest;
@javax.annotation.Generated(value = "io.swagger.codegen.languages.SpringCodegen", date = "2019-10-14T21:38:57.474Z")
@Controller
public class ValidateTicketApiController implements ValidateTicketApi {
private static final Logger log = LoggerFactory.getLogger(ValidateTicketApiController.class);
private final ObjectMapper objectMapper;
private final HttpServletRequest request;
@Autowired
private Blockchain chain;
@org.springframework.beans.factory.annotation.Autowired
public ValidateTicketApiController(ObjectMapper objectMapper, HttpServletRequest request) {
this.objectMapper = objectMapper;
objectMapper.configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false);
this.request = request;
}
public ResponseEntity<ValidateTicketResponse> validateTicket(
@ApiParam(value = "Information needed to create a ticket", required = true) @Valid @RequestBody ApiRequest body) {
log.info("URL: "+ request.getRequestURI());
log.info(body.toString());
String accept = request.getHeader("Accept");
if (accept != null && accept.contains("application/json")) {
log.info("Header conditions accepted.");
Ticket ticket = body.getTicket();
log.info(ticket.toString());
try {
objectMapper.configure(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE, false);
log.info("Trying to generate hash.");
String hash = Auxiliary.computeHash(ticket.getTicketId(), ticket.getTimestamp().toString(), ticket.getDetails(),
ticket.getStatus(), ticket.getPreviousHash(), body.getSecret());
// Checks to see if the
log.info("Checking to see if the hash exists.");
if(hash.equals(ticket.getHash())){
log.info("Ticket is real.");
int ticketId = ticket.getTicketId();
Ticket block = chain.getBlockReverse(ticketId);
ValidateTicketResponse response = new ValidateTicketResponse();
if(block.getStatus().equalsIgnoreCase("expired")){
log.error("Ticket has expired");
response = response.message("Ticket is expired");
return new ResponseEntity<ValidateTicketResponse>(response, HttpStatus.OK);
}
else if(block.getStatus().equalsIgnoreCase("valid")){
// If ticket has already been validated
// Check to see if it has expired
log.info("Time difference ticket > " + Auxiliary.difOffsetDateTime(block.getTimestamp(), org.threeten.bp.OffsetDateTime.now()));
if( Auxiliary.difOffsetDateTime(block.getTimestamp(), org.threeten.bp.OffsetDateTime.now()) > Properties.TTL_TICKET){
log.error("Ticket has expired");
//Adding new block to the blockchain
Ticket expiredTicket = new Ticket(ticket.getDetails(),"expired", chain.getLatestBlockHash(),body.getSecret());
chain.addBlock(expiredTicket.ticketId(ticket.getTicketId()));
response = response.message("Expired");
return new ResponseEntity<ValidateTicketResponse>(response, HttpStatus.OK);
}
// If it hasn't expired then it still is valid
else {
log.info("Ticket has already been validated.");
response = response.message("Valid");
return new ResponseEntity<ValidateTicketResponse>(response, HttpStatus.OK);
}
}
else if(block.getStatus().equalsIgnoreCase("inactive")){
log.info("Ticket has been validated.");
Ticket validatedTicket = new Ticket(ticket.getDetails(),"valid", chain.getLatestBlockHash(),body.getSecret());
chain.addBlock(validatedTicket.ticketId(ticket.getTicketId()));
response = response.message("Valid");
return new ResponseEntity<ValidateTicketResponse>(response, HttpStatus.CREATED);
}
else{
throw new InvalidTicketException();
}
}
else{
throw new InvalidTicketException();
}
}catch (InvalidTicketException e) {
log.error("Ticket is invalid:", e.toString());
return new ResponseEntity<>(HttpStatus.FORBIDDEN);
}catch (NullPointerException e) {
log.error("Bad request:", e.toString());
return new ResponseEntity<>(HttpStatus.BAD_REQUEST);
}catch (Exception e) {
log.error("Error ocurred: ", e.toString());
return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
return new ResponseEntity<>(HttpStatus.NOT_IMPLEMENTED);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.