text stringlengths 1 1.05M |
|---|
import React from 'react';
import Header from './Header';
import SubHeader from "./SubHeader";
import Menu from './Menu';
const App = () => (
<div className="app">
<Header></Header>
<SubHeader></SubHeader>
<Menu></Menu>
</div>
)
export default App; |
#!/bin/bash
dbus-monitor "interface='org.freedesktop.Notifications'" | xargs -I '{}' pkill notify-osd
|
package drivers
import (
"fmt"
"github.com/Xhofe/alist/conf"
"github.com/Xhofe/alist/model"
"github.com/Xhofe/alist/utils"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"path/filepath"
)
type GoogleDrive struct{}
func (driver GoogleDrive) Config() DriverConfig {
return DriverConfig{
Name: "GoogleDrive",
OnlyProxy: true,
}
}
func (driver GoogleDrive) Items() []Item {
return []Item{
{
Name: "client_id",
Label: "client id",
Type: "string",
Required: true,
},
{
Name: "client_secret",
Label: "client secret",
Type: "string",
Required: true,
},
{
Name: "refresh_token",
Label: "refresh token",
Type: "string",
Required: true,
},
{
Name: "root_folder",
Label: "root folder file_id",
Type: "string",
Required: false,
},
}
}
func (driver GoogleDrive) Save(account *model.Account, old *model.Account) error {
account.Proxy = true
err := driver.RefreshToken(account)
if err != nil {
account.Status = err.Error()
_ = model.SaveAccount(account)
return err
}
if account.RootFolder == "" {
account.RootFolder = "root"
}
account.Status = "work"
_ = model.SaveAccount(account)
return nil
}
func (driver GoogleDrive) File(path string, account *model.Account) (*model.File, error) {
path = utils.ParsePath(path)
if path == "/" {
return &model.File{
Id: account.RootFolder,
Name: account.Name,
Size: 0,
Type: conf.FOLDER,
Driver: driver.Config().Name,
UpdatedAt: account.UpdatedAt,
}, nil
}
dir, name := filepath.Split(path)
files, err := driver.Files(dir, account)
if err != nil {
return nil, err
}
for _, file := range files {
if file.Name == name {
return &file, nil
}
}
return nil, PathNotFound
}
func (driver GoogleDrive) Files(path string, account *model.Account) ([]model.File, error) {
path = utils.ParsePath(path)
var rawFiles []GoogleFile
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
if err == nil {
rawFiles, _ = cache.([]GoogleFile)
} else {
file, err := driver.File(path, account)
if err != nil {
return nil, err
}
rawFiles, err = driver.GetFiles(file.Id, account)
if err != nil {
return nil, err
}
if len(rawFiles) > 0 {
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
}
}
files := make([]model.File, 0)
for _, file := range rawFiles {
files = append(files, *driver.FormatFile(&file))
}
return files, nil
}
func (driver GoogleDrive) Link(path string, account *model.Account) (string, error) {
file, err := driver.File(path, account)
if err != nil {
return "", err
}
if file.Type == conf.FOLDER {
return "", NotFile
}
link := fmt.Sprintf("https://www.googleapis.com/drive/v3/files/%s?includeItemsFromAllDrives=true&supportsAllDrives=true", file.Id)
var e GoogleError
_, _ = googleClient.R().SetError(&e).
SetHeader("Authorization", "Bearer "+account.AccessToken).
Get(link)
if e.Error.Code != 0 {
if e.Error.Code == 401 {
err = driver.RefreshToken(account)
if err != nil {
_ = model.SaveAccount(account)
return "", err
}
return driver.Link(path, account)
}
return "", fmt.Errorf("%s: %v", e.Error.Message, e.Error.Errors)
}
return link + "&alt=media", nil
}
func (driver GoogleDrive) Path(path string, account *model.Account) (*model.File, []model.File, error) {
path = utils.ParsePath(path)
log.Debugf("google path: %s", path)
file, err := driver.File(path, account)
if err != nil {
return nil, nil, err
}
if file.Type != conf.FOLDER {
//file.Url, _ = driver.Link(path, account)
return file, nil, nil
}
files, err := driver.Files(path, account)
if err != nil {
return nil, nil, err
}
return nil, files, nil
}
func (driver GoogleDrive) Proxy(c *gin.Context, account *model.Account) {
c.Request.Header.Add("Authorization", "Bearer "+account.AccessToken)
}
func (driver GoogleDrive) Preview(path string, account *model.Account) (interface{}, error) {
return nil, NotSupport
}
var _ Driver = (*GoogleDrive)(nil) |
export ZTTY_FEATURES="MakeWithUUID:${ZTTY_FEATURES}"
# Require: https://github.com/syumai/uuidgenseeded
function mkuuidfile() {
if [[ $# -eq 0 ]] || [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]
then
echo "usage: $0 [files...]"
return 1
fi
for p in $@
do
d="$(dirname "$p")"
b="$(basename "$p")"
u="$(uuidgenseeded -lower $b | cut -d'-' -f1 )"
touch "${d}/${u}-${b}"
done
}
########################################################
# for move
function convToUuid() {
if [[ $# -eq 0 ]] || [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]
then
echo "usage: $0 [files|dir ...]"
return 1
fi
for p in $@
do
d="$(dirname "$p")"
b="$(basename "$p")"
u="$(uuidgenseeded -lower $b | cut -d'-' -f1 )"
mv "$p" "${d}/${u}-${b}"
done
}
function mkuuiddir() {
if [[ $# -eq 0 ]] || [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]
then
echo "usage: $0 [dir...]"
return 1
fi
for p in $@
do
d="$(dirname "$p")"
b="$(basename "$p")"
u="$(uuidgenseeded -lower "$b" | cut -d'-' -f1 )"
mkdir "${d}/${u}-${b}"
done
}
|
<gh_stars>0
package io.miti.shortstop.model;
public enum HttpOperation {
POST, PUT, GET, DELETE, OPTIONS, TRACE, HEAD, PATCH, UNKNOWN
}
|
import { Label } from './Label';
import { Resolution } from '../utils/Resolution';
import { TimeRange } from '../utils/TimeValueRange';
import { useSelector } from '../states/store';
import React from 'react';
const Labels = ( { range, size }: {
range: TimeRange;
size: Resolution;
} ): JSX.Element => {
const { labels } = useSelector( ( state ) => ( {
labels: state.automaton.labels
} ) );
return <>
{ Object.entries( labels ).map( ( [ name, time ] ) => (
<Label
key={ name }
name={ name }
time={ time }
range={ range }
size={ size }
/>
) ) }
</>;
};
export { Labels };
|
<filename>app/controllers/admin/jobs_controller.rb
class Admin::JobsController < Admin::ApplicationController
before_action :set_job, only: [:show, :edit, :update, :destroy]
# GET /admin/jobs
# GET /admin/jobs.json
def index
@jobs = JobVacancy.paginate(:page => params[:page], :per_page => 50)
end
# GET /admin/jobs/1
# GET /admin/jobs/1.json
def show
end
# GET /admin/jobs/new
def new
@job = JobVacancy.new
end
# GET /admin/jobs/1/edit
def edit
end
# POST /admin/jobs
# POST /admin/jobs.json
def create
@job = JobVacancy.new(job_params)
respond_to do |format|
if @job.save
format.html { redirect_to admin_jobs_path, notice: 'Job was successfully created.' }
format.json { render :show, status: :created, location: @job }
else
format.html { render :new }
format.json { render json: @job.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /admin/jobs/1
# PATCH/PUT /admin/jobs/1.json
def update
respond_to do |format|
if @job.update(job_params)
format.html { redirect_to admin_jobs_path, notice: 'Job was successfully updated.' }
format.json { render :show, status: :ok, location: @job }
else
format.html { render :edit }
format.json { render json: @job.errors, status: :unprocessable_entity }
end
end
end
# DELETE /admin/jobs/1
# DELETE /admin/jobs/1.json
def destroy
@job.destroy
respond_to do |format|
format.html { redirect_to admin_jobs_url, notice: 'Job was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_job
@job = JobVacancy.find_by_slug(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def job_params
params.require(:job_vacancy).permit(:position, :description, :due_date, :visible, :company_name, :level, :qualification, :salary_range_min, :salary_range_max, :location, :job_type, :negotiable_salary, :about_company)
end
end
|
<reponame>Julien75013/php-src
/*
+----------------------------------------------------------------------+
| This source file is subject to version 3.01 of the PHP license, |
| that is bundled with this package in the file LICENSE, and is |
| available through the world-wide-web at the following url: |
| http://www.php.net/license/3_01.txt |
| If you did not receive a copy of the PHP license and are unable to |
| obtain it through the world-wide-web, please send a note to |
| license@php.net so we can mail you a copy immediately. |
+----------------------------------------------------------------------+
| Authors: <NAME> (<EMAIL>) |
+----------------------------------------------------------------------+
*/
#include "../intl_cppshims.h"
#include <unicode/dtptngen.h>
#include "../intl_convertcpp.h"
extern "C" {
#include "php_intl.h"
#define USE_DATETIMEPATTERNGENERATOR_POINTER 1
#include "datepatterngenerator_class.h"
#include <zend_exceptions.h>
#include <assert.h>
}
using icu::DateTimePatternGenerator;
using icu::Locale;
using icu::StringPiece;
static zend_result dtpg_ctor(INTERNAL_FUNCTION_PARAMETERS, zend_error_handling *error_handling, bool *error_handling_replaced)
{
char *locale_str;
size_t locale_len = 0;
IntlDatePatternGenerator_object* dtpgo;
intl_error_reset(NULL);
zval *object = return_value;
ZEND_PARSE_PARAMETERS_START(0, 1)
Z_PARAM_OPTIONAL
Z_PARAM_STRING_OR_NULL(locale_str, locale_len)
ZEND_PARSE_PARAMETERS_END_EX(return FAILURE);
if (error_handling != NULL) {
zend_replace_error_handling(EH_THROW, IntlException_ce_ptr, error_handling);
*error_handling_replaced = 1;
}
DTPATTERNGEN_METHOD_FETCH_OBJECT_NO_CHECK;
if (dtpgo->dtpg != NULL) {
intl_errors_set(DTPATTERNGEN_ERROR_P(dtpgo), U_ILLEGAL_ARGUMENT_ERROR, "Cannot call constructor twice", 0);
return FAILURE;
}
INTL_CHECK_LOCALE_LEN_OR_FAILURE(locale_len);
if (locale_len == 0) {
locale_str = (char *) intl_locale_get_default();
}
Locale locale = Locale::createFromName(locale_str);
dtpgo->dtpg = DateTimePatternGenerator::createInstance(
locale,
DTPATTERNGEN_ERROR_CODE(dtpgo));
if (U_FAILURE(DTPATTERNGEN_ERROR_CODE(dtpgo))) {
intl_error_set(NULL, DTPATTERNGEN_ERROR_CODE(dtpgo),
"Error creating DateTimePatternGenerator",
0);
return FAILURE;
}
return SUCCESS;
}
U_CFUNC PHP_METHOD( IntlDatePatternGenerator, create )
{
object_init_ex( return_value, IntlDatePatternGenerator_ce_ptr );
if (dtpg_ctor(INTERNAL_FUNCTION_PARAM_PASSTHRU, NULL, NULL) == FAILURE) {
zval_ptr_dtor(return_value);
RETURN_NULL();
}
}
U_CFUNC PHP_METHOD( IntlDatePatternGenerator, __construct )
{
zend_error_handling error_handling;
bool error_handling_replaced = 0;
/* return_value param is being changed, therefore we will always return
* NULL here */
return_value = ZEND_THIS;
if (dtpg_ctor(INTERNAL_FUNCTION_PARAM_PASSTHRU, &error_handling, &error_handling_replaced) == FAILURE) {
if (!EG(exception)) {
zend_string *err = intl_error_get_message(NULL);
zend_throw_exception(IntlException_ce_ptr, ZSTR_VAL(err), intl_error_get_code(NULL));
zend_string_release_ex(err, 0);
}
}
if (error_handling_replaced) {
zend_restore_error_handling(&error_handling);
}
}
U_CFUNC PHP_METHOD( IntlDatePatternGenerator, getBestPattern )
{
char *skeleton_str = NULL;
size_t skeleton_len;
UnicodeString skeleton_uncleaned;
DTPATTERNGEN_METHOD_INIT_VARS;
/* Parse parameters. */
if( zend_parse_method_parameters( ZEND_NUM_ARGS(), getThis(), "Os",
&object, IntlDatePatternGenerator_ce_ptr, &skeleton_str, &skeleton_len ) == FAILURE )
{
RETURN_THROWS();
}
DTPATTERNGEN_METHOD_FETCH_OBJECT;
intl_stringFromChar(skeleton_uncleaned, skeleton_str, skeleton_len, DTPATTERNGEN_ERROR_CODE_P(dtpgo));
INTL_METHOD_CHECK_STATUS(dtpgo, "Skeleton is not a valid UTF-8 string");
UnicodeString skeleton = dtpgo->dtpg->getSkeleton(skeleton_uncleaned, DTPATTERNGEN_ERROR_CODE(dtpgo));
INTL_METHOD_CHECK_STATUS(dtpgo, "Error getting cleaned skeleton");
UnicodeString result = dtpgo->dtpg->getBestPattern(skeleton, DTPATTERNGEN_ERROR_CODE(dtpgo));
INTL_METHOD_CHECK_STATUS(dtpgo, "Error retrieving pattern");
zend_string *u8str = intl_charFromString(result, DTPATTERNGEN_ERROR_CODE_P(dtpgo));
INTL_METHOD_CHECK_STATUS(dtpgo, "Error converting result to UTF-8");
RETVAL_STR(u8str);
}
|
import sample from '../utils/sample'
export default () => {
return sample([
'sword',
'glaive',
'shield',
'spear',
'scythe',
'mace',
'club',
'staff',
'rod',
'orb',
'axe',
'halberd',
'bow',
'crossbow'
])
}
|
#!/bin/bash
set -e;
export LC_NUMERIC=C;
SDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)";
[ "$(pwd)/utils" = "$SDIR" ] || {
echo "Run the script from \"$(dirname $SDIR)\"!" >&2 && exit 1;
}
[ ! -f "$(pwd)/utils/parse_options.inc.sh" ] && \
echo "Missing $(pwd)/utils/parse_options.inc.sh file!" >&2 && exit 1;
order=10;
overwrite=false;
srilm_options="-wbdiscount -interpolate";
help_message="
Usage: ${0##*/} [options] tr_txt va_txt te_txt [ext_txt ...] output_dir
Description:
Build a character-level N-gram language model using SRILM. You can specify
the order and using the --order option. Other SRILM options can be modified
with --srilm_options.
Arguments:
tr_txt : Training character text data (assumes first column is ID).
va_txt : Validation character text data (assumes first column is ID).
te_txt : Test character text data (assumes first column is ID).
ext_txt : External character text data.
output_dir : Output directory where the language models and other
files will be written (e.g. \"decode/lm\").
Options:
--order : (type = integer, default = $order)
Order of the n-gram language model.
--overwrite : (type = boolean, default = $overwrite)
Overwrite previously created files.
--srilm_options : (type = string, default = \"$srilm_options\")
Use SRILM's ngram-count with these options.
";
source utils/parse_options.inc.sh || exit 1;
[ $# -lt 4 -o "$(echo "$# % 2" | bc)" -eq 0 ] &&
echo "$help_message" >&2 && exit 1;
# Read corpora data fils from the arguments.
tr_txt="$1";
va_txt="$2";
te_txt="$3";
shift 3;
# Read external data files from the arguments.
external_txt=();
while [ $# -gt 1 ]; do
external_txt+=("$1");
shift 1;
done;
# Read output directory from the arguments.
odir="$1";
# Check input files
for f in "$tr_txt" "$va_txt" "$te_txt" "${external_txt[@]}"; do
[ ! -s "$f" ] && echo "ERROR: File \"$f\" does not exist!" >&2 && exit 1;
done;
# Check required tools (SRILM)
for f in ngram-count ngram compute-best-mix; do
which "$f" &> /dev/null ||
{ echo "ERROR: Program $f is not in your PATH!" >&2 && exit 1; }
done;
# Create output dir
mkdir -p "$odir";
# Check that $1 is not newer than $2...$#.
function check_not_newer () {
for i in $(seq 2 $#); do [[ "${!i}" -ot "$1" ]] && return 1; done;
return 0;
}
# Check that $1 is not older than $2...$#
function check_not_older () {
for i in $(seq 2 $#); do [[ "${!i}" -nt "$1" ]] && return 1; done;
return 0;
}
# Interpolate a list of .arpa.gz files.
function interpolate_arpa_files () {
# Compute detailed perplexity on the validation data
info_files=();
for arpa in $@; do
info="${arpa/.arpa.gz/.info}";
[[ "$overwrite" = false && -s "$info" && ( ! "$info" -ot "$arpa" ) ]] ||
gawk '{$1=""; print;}' "$va_txt" |
ngram -order "$order" -debug 2 -ppl - -lm <(zcat "$arpa") &> "$info" ||
{ echo "ERROR: Creating file \"$info\"!" >&2 && exit 1; }
info_files+=("$info");
done;
# Compute interpolation weights
mixf="$odir/interpolation-${order}gram.mix";
( [[ "$overwrite" = false && -s "$mixf" ]] &&
check_not_older "$mixf" "${info_files[@]}" ) ||
compute-best-mix "${info_files[@]}" &> "$mixf" ||
{ echo "ERROR: Creating file \"$mixf\"!" >&2 && exit 1; }
lambdas=( $(grep "best lambda" "$mixf" | gawk -F\( '{print $2}' | tr -d \)) );
# Interpolate language models.
tmpfs=();
args=();
for i in $(seq 1 $#); do
tmpfs+=( "$(mktemp)" );
zcat "${!i}" > "${tmpfs[${#tmpfs[@]} - 1]}";
if [ $i -eq 1 ]; then
args+=( -lm "${tmpfs[${#tmpfs[@]} - 1]}" -lambda "${lambdas[i - 1]}" );
elif [ $i -eq 2 ]; then
args+=( -mix-lm "${tmpfs[${#tmpfs[@]} - 1]}" );
else
args+=( "-mix-lm$[i - 1]" "${tmpfs[${#tmpfs[@]} - 1]}" \
"-mix-lambda$[i - 1]" "${lambdas[i - 1]}" );
fi;
done;
outf="$odir/interpolation-${order}gram.arpa.gz";
[[ "$overwrite" = false && -s "$outf" && ( ! "$outf" -ot "$mixf" ) ]] ||
ngram -order "${order}" "${args[@]}" -write-lm - |
gzip -9 -c > "$outf" ||
{ echo "ERROR: Creating file \"$outf\"!" >&2 && exit 1; }
rm -f "${tmpfs[@]}";
return 0;
}
# Create vocabulary file.
vocf="$(mktemp)";
cut -d\ -f2- "$tr_txt" "$va_txt" "$te_txt" | tr \ \\n | gawk 'NF > 0' |
sort | uniq > "$vocf";
# Train N-gram on the training partition
outf="$odir/$(basename "$tr_txt" .txt)-${order}gram.arpa.gz";
[[ "$overwrite" = false && -s "$outf" && ( ! "$outf" -ot "$tr_txt" ) ]] ||
gawk '{$1=""; print;}' "$tr_txt" |
ngram-count -order "$order" -vocab "$vocf" $srilm_options -text - -lm - |
gzip -9 -c > "$outf" ||
{ echo "ERROR: Failed creating file \"$outf\"!" >&2 && exit 1; }
arpa_files=( "$outf" );
# Train N-gram on each external corpus
for txtf in "${external_txt[@]}"; do
outf="$odir/$(basename "$txtf" .txt)-${order}gram.arpa.gz";
info="$odir/$(basename "$txtf" .txt)-${order}gram.info";
[[ "$overwrite" = false && -s "$outf" && ( ! "$outf" -ot "$txtf" ) ]] ||
ngram-count -order "$order" -vocab "$vocf" $srilm_options -text "$txtf" \
-lm - | gzip -9 -c > "$outf" ||
{ echo "ERROR: Failed creating file \"$outf\"!" >&2 && exit 1; }
arpa_files+=( "$outf" );
done;
# Interpolate all language models
if [ ${#arpa_files[@]} -gt 1 ]; then
interpolate_arpa_files "${arpa_files[@]}";
outf="$odir/interpolation-${order}gram.arpa.gz";
else
outf="${arpa_files[0]}";
fi;
# Compute detailed perplexity of the interpolated model
ppl=();
oov=();
oovp=();
for f in "$tr_txt" "$va_txt" "$te_txt"; do
ppl+=( $(gawk '{$1=""; print;}' "$f" |
ngram -order "$order" -ppl - -lm <(zcat "$outf") 2>&1 |
tail -n1 | sed -r 's|^.+\bppl= ([0-9.]+)\b.+$|\1|g' |
gawk '{printf("%.2f", $1);}') );
aux=( $(gawk -v VF="$vocf" '
BEGIN{ N=0; OOV=0; while((getline < VF) > 0) V[$1]=1; }
{ for (i=2;i<=NF;++i) { ++N; if (!($i in V)) { ++OOV; } } }
END{ print OOV, N; }' "$f") );
oov+=(${aux[0]});
oovp+=( $(echo "${aux[0]} ${aux[1]}" |
gawk '{ printf("%.2f", 100 * $1 / $2); }') );
done;
# Print statistics
cat <<EOF >&2
Char-level ${order}-gram:
Train: ppl = ${ppl[0]}, oov = ${oov[0]}, %oov = ${oovp[0]}
Valid: ppl = ${ppl[1]}, oov = ${oov[1]}, %oov = ${oovp[1]}
Test: ppl = ${ppl[2]}, oov = ${oov[2]}, %oov = ${oovp[2]}
EOF
exit 0;
|
import React, { useState, useEffect } from "react"
import TradingViewWidget from "react-tradingview-widget"
import Binance from "binance-api-node"
import "bootstrap/dist/css/bootstrap.min.css"
import "./middleGraphsSection.css"
import MenuCard from "./MenuCard"
const client = Binance()
// Authenticated client, can make signed calls
const client2 = Binance({
apiKey: process.env.GATSBY_APIKEY,
apiSecret: process.env.GATSBY_APISECRET,
})
const MiddleGraphsSection = () => {
const [symbol, setSymbol] = useState("BTCUSDT")
const [dailyStatsForSymbol, setDailyStatsForSymbol] = useState(0)
// getting symbol statistics
useEffect(() => {
client.dailyStats({ symbol: symbol }).then(stat => {
setDailyStatsForSymbol(stat)
})
}, [symbol])
const dailyHigh = parseFloat(dailyStatsForSymbol.highPrice).toFixed(2)
const dailyLow = parseFloat(dailyStatsForSymbol.lowPrice).toFixed(2)
const priceChangePercent = dailyStatsForSymbol.priceChangePercent
const priceChange = parseFloat(dailyStatsForSymbol.priceChange).toFixed(2)
const lastPrice = parseFloat(dailyStatsForSymbol.lastPrice).toFixed(2)
return (
<>
{/* bar graph for live market section */}
<div
style={{
flex: 10,
display: "flex",
flexDirection: "column",
background: "#3C4C5E",
}}
>
{/* top header */}
<TopHeaderSection
setSymbol={setSymbol}
symbol={symbol}
setDailyStatsForSymbol={setDailyStatsForSymbol}
dailyHigh={dailyHigh}
dailyLow={dailyLow}
priceChangePercent={priceChangePercent}
lastPrice={lastPrice}
priceChange={priceChange}
/>
{/* Chart */}
<ChartSection symbol={symbol} />
</div>
{/* bot forecast graph section */}
<div style={{ flex: 8 }}>
<MenuCard />
</div>
</>
)
}
// Top header section
const TopHeaderSection = ({
setSymbol,
symbol,
setDailyStatsForSymbol,
dailyHigh,
dailyLow,
priceChangePercent,
lastPrice,
priceChange,
}) => {
function handleSelectChange(event) {
setSymbol(event.target.value)
}
return (
<div
style={{
flex: 3,
display: "flex",
flexDirection: "row",
justifyContent: "space-evenly",
}}
>
<div
style={{
display: "flex",
flexDirection: "row",
height: "100%",
justifyContent: "center",
alignItems: "center",
}}
>
<select
className="dropDown"
value={symbol}
onChange={handleSelectChange}
>
<option value="BTCUSDT">BTC/USD</option>
<option value="ETHUSDT">ETH/USD</option>
<option value="XRPUSDT">XRP/USD</option>
</select>
</div>
<div className="dataContainer">
<text className="greyText">Last Price</text>
<text
style={{
fontSize: 18,
color: priceChangePercent >= 0 ? "#60BC3F" : "#DB3E62",
}}
>
{lastPrice}
</text>
</div>
<div className="dataContainer">
<text className="greyText">24h Change</text>
<div style={{ display: "flex", flexDirection: "row" }}>
<text
style={{
fontSize: 18,
color: priceChangePercent >= 0 ? "#60BC3F" : "#DB3E62",
}}
>
{priceChange}
</text>
<div>
{priceChangePercent >= 0 && (
<text style={{ color: "#60BC3F", marginLeft: 8 }}>+</text>
)}
<text
style={{
marginLeft: priceChangePercent >= 0 ? 0 : 8,
color: priceChangePercent >= 0 ? "#60BC3F" : "#DB3E62",
}}
>
{priceChangePercent}%
</text>
</div>
</div>
</div>
<div className="dataContainer">
<text className="greyText">24h High</text>
<text className="text">{dailyHigh}</text>
</div>
<div className="dataContainer">
<text className="greyText">24h Low</text>
<text className="text">{dailyLow}</text>
</div>
</div>
)
}
// chart section
const ChartSection = ({ symbol }) => {
return (
<div
style={{
flex: 10,
paddingLeft: "3%",
paddingRight: "3%",
paddingBottom: "3%",
}}
>
<TradingViewWidget
symbol={symbol}
autosize
theme="dark"
studies={["MACD@tv-basicstudies"]}
/>
</div>
)
}
export default MiddleGraphsSection
|
#!/bin/bash
set -e
get_abs_filename() {
echo "$(cd "$(dirname "$1")" && pwd)/$(basename "$1")"
}
export NXF_IGNORE_WARN_DSL2=true
export NXF_CMD=${NXF_CMD:-$(get_abs_filename ../launch.sh)}
export TRAVIS_PULL_REQUEST=${TRAVIS_PULL_REQUEST:=false}
#
# Tests
#
(
cd ../tests/checks;
bash run.sh
)
# disable ansi log to make log more readable
export NXF_ANSI_LOG=false
#
# Hello
#
git clone https://github.com/nextflow-io/hello
(
cd hello;
$NXF_CMD run .
$NXF_CMD run . -resume
)
if [[ $TRAVIS_PULL_REQUEST != false ]]; then
echo Skipping tests requiring secret vars
exit 0
fi
#
# AMPA-NF
#
git clone https://github.com/cbcrg/ampa-nf
docker pull cbcrg/ampa-nf
(
cd ampa-nf;
$NXF_CMD run . -with-docker
$NXF_CMD run . -with-docker -resume
)
#
# RNASEQ-NF
#
echo nextflow-io/rnaseq-nf
[[ $TOWER_ACCESS_TOKEN ]] && OPTS='-with-tower' || OPTS=''
$NXF_CMD run nextflow-io/rnaseq-nf -with-docker $OPTS
$NXF_CMD run nextflow-io/rnaseq-nf -with-docker $OPTS -resume
#
# AWS Batch tests
#
echo aws batch tests
bash awsbatch.sh
#
# Google Life Sciences
#
if [[ $GOOGLE_SECRET ]]; then
bash gls.sh
else
echo "Google Life Science test skipped because GOOGLE_SECRET env var is missing"
fi
|
package proc_server_test
import (
"context"
"strconv"
"testing"
"configcenter/src/common/mapstr"
params "configcenter/src/common/paraparse"
commonutil "configcenter/src/common/util"
"configcenter/src/test"
"configcenter/src/test/reporter"
"configcenter/src/test/util"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var header = test.GetHeader()
var clientSet = test.GetClientSet()
var serviceClient = clientSet.ProcServer().Service()
var processClient = clientSet.ProcServer().Process()
var instClient = test.GetClientSet().TopoServer().Instance()
var hostServerClient = test.GetClientSet().HostServer()
var apiServerClient = test.GetClientSet().ApiServer()
var bizId, hostId1, hostId2, setId int64
func TestProcServer(t *testing.T) {
RegisterFailHandler(util.Fail)
reporters := []Reporter{
reporter.NewHtmlReporter(test.GetReportDir()+"procserver.html", test.GetReportUrl(), true),
}
RunSpecsWithDefaultAndCustomReporters(t, "ProcServer Suite", reporters)
}
var _ = BeforeSuite(func() {
test.ClearDatabase()
Describe("test preparation", func() {
Describe("create biz", func() {
input := map[string]interface{}{
"life_cycle": "2",
"language": "1",
"bk_biz_maintainer": "admin",
"bk_biz_name": "cc_biz",
"time_zone": "Africa/Accra",
}
rsp, err := apiServerClient.CreateBiz(context.Background(), "0", header, input)
util.RegisterResponse(rsp)
Expect(err).NotTo(HaveOccurred())
Expect(rsp.Result).To(Equal(true))
bizId, err = commonutil.GetInt64ByInterface(rsp.Data["bk_biz_id"])
Expect(err).NotTo(HaveOccurred())
})
Describe("add host", func() {
input := map[string]interface{}{
"bk_biz_id": bizId,
"host_info": map[string]interface{}{
"1": map[string]interface{}{
"bk_host_innerip": "1.0.0.1",
"bk_asset_id": "addhost_api_asset_1",
"bk_cloud_id": 0,
},
"2": map[string]interface{}{
"bk_host_innerip": "1.0.0.2",
"bk_asset_id": "addhost_api_asset_2",
"bk_cloud_id": 0,
},
},
}
rsp, err := hostServerClient.AddHost(context.Background(), header, input)
util.RegisterResponse(rsp)
Expect(err).NotTo(HaveOccurred())
Expect(rsp.Result).To(Equal(true))
})
Describe("search host", func() {
input := ¶ms.HostCommonSearch{
AppID: int(bizId),
}
rsp, err := hostServerClient.SearchHost(context.Background(), header, input)
util.RegisterResponse(rsp)
Expect(err).NotTo(HaveOccurred())
Expect(rsp.Result).To(Equal(true))
Expect(rsp.Data.Count).To(Equal(2))
hostId1, err = commonutil.GetInt64ByInterface(rsp.Data.Info[0]["host"].(map[string]interface{})["bk_host_id"])
Expect(err).NotTo(HaveOccurred())
hostId2, err = commonutil.GetInt64ByInterface(rsp.Data.Info[1]["host"].(map[string]interface{})["bk_host_id"])
Expect(err).NotTo(HaveOccurred())
})
Describe("create set", func() {
input := mapstr.MapStr{
"bk_set_name": "test",
"bk_parent_id": bizId,
"bk_supplier_account": "0",
"bk_biz_id": bizId,
"bk_service_status": "1",
"bk_set_env": "3",
}
rsp, err := instClient.CreateSet(context.Background(), strconv.FormatInt(bizId, 10), header, input)
util.RegisterResponse(rsp)
Expect(err).NotTo(HaveOccurred())
Expect(rsp.Result).To(Equal(true))
Expect(rsp.Data["bk_set_name"].(string)).To(Equal("test"))
parentIdRes, err := commonutil.GetInt64ByInterface(rsp.Data["bk_parent_id"])
Expect(err).NotTo(HaveOccurred())
Expect(parentIdRes).To(Equal(bizId))
bizIdRes, err := commonutil.GetInt64ByInterface(rsp.Data["bk_biz_id"])
Expect(err).NotTo(HaveOccurred())
Expect(bizIdRes).To(Equal(bizId))
setId, err = commonutil.GetInt64ByInterface(rsp.Data["bk_set_id"])
Expect(err).NotTo(HaveOccurred())
})
})
})
|
package servlet;
import java.io.IOException;
import javax.servlet.ServletException;
public class SearchCommand extends FrontCommand {
@Override
public void process() throws ServletException, IOException {
forward("/search.jsp");
}
}
|
#!/bin/bash
echo "gen_misc.sh version 20150511"
echo "change by baojun 20151211"
echo "no input to read"
echo ""
echo "Please follow below steps(1-5) to generate specific bin(s):"
echo "STEP 1: choose boot version(0=boot_v1.1, 1=boot_v1.2+, 2=none)"
echo "enter(0/1/2, default 1):"
#read input
if [ -z "$input" ]; then
boot=new
elif [ $input == 0 ]; then
boot=old
elif [ $input == 1 ]; then
boot=new
else
boot=none
fi
echo "boot mode: $boot"
echo ""
echo "STEP 2: choose bin generate(0=eagle.flash.bin+eagle.irom0text.bin, 1=user1.bin, 2=user2.bin)"
echo "enter (0/1/2, default 1):"
#read input
if [ -z "$input" ]; then
if [ $boot == none ]; then
app=0
echo "choose no boot before"
echo "generate bin: eagle.flash.bin+eagle.irom0text.bin"
else
app=1
echo "generate bin: user1.bin"
fi
elif [ $input == 1 ]; then
if [ $boot == none ]; then
app=0
echo "choose no boot before"
echo "generate bin: eagle.flash.bin+eagle.irom0text.bin"
else
app=1
echo "generate bin: user1.bin"
fi
elif [ $input == 2 ]; then
if [ $boot == none ]; then
app=0
echo "choose no boot before"
echo "generate bin: eagle.flash.bin+eagle.irom0text.bin"
else
app=2
echo "generate bin: user2.bin"
fi
else
if [ $boot != none ]; then
boot=none
echo "ignore boot"
fi
app=0
echo "generate bin: eagle.flash.bin+eagle.irom0text.bin"
fi
echo ""
echo "STEP 3: choose spi speed(0=20MHz, 1=26.7MHz, 2=40MHz, 3=80MHz)"
echo "enter (0/1/2/3, default 2):"
#read input
if [ -z "$input" ]; then
spi_speed=40
elif [ $input == 0 ]; then
spi_speed=20
elif [ $input == 1 ]; then
spi_speed=26.7
elif [ $input == 3 ]; then
spi_speed=80
else
spi_speed=40
fi
echo "spi speed: $spi_speed MHz"
echo ""
echo "STEP 4: choose spi mode(0=QIO, 1=QOUT, 2=DIO, 3=DOUT)"
echo "enter (0/1/2/3, default 0):"
#read input
if [ -z "$input" ]; then
spi_mode=QIO
elif [ $input == 1 ]; then
spi_mode=QOUT
elif [ $input == 2 ]; then
spi_mode=DIO
elif [ $input == 3 ]; then
spi_mode=DOUT
else
spi_mode=QIO
fi
echo "spi mode: $spi_mode"
echo ""
echo "STEP 5: choose spi size and map"
echo " 0= 512KB( 256KB+ 256KB)"
echo " 2=1024KB( 512KB+ 512KB)"
echo " 3=2048KB( 512KB+ 512KB)"
echo " 4=4096KB( 512KB+ 512KB)"
echo " 5=2048KB(1024KB+1024KB)"
echo " 6=4096KB(1024KB+1024KB)"
echo "enter (0/2/3/4/5/6, default 2):"
#read input
if [ -z "$input" ]; then
spi_size_map=2
echo "spi size: 1024KB"
echo "spi ota map: 512KB + 512KB"
elif [ $input == 2 ]; then
spi_size_map=2
echo "spi size: 1024KB"
echo "spi ota map: 512KB + 512KB"
elif [ $input == 3 ]; then
spi_size_map=3
echo "spi size: 2048KB"
echo "spi ota map: 512KB + 512KB"
elif [ $input == 4 ]; then
spi_size_map=4
echo "spi size: 4096KB"
echo "spi ota map: 512KB + 512KB"
elif [ $input == 5 ]; then
spi_size_map=5
echo "spi size: 2048KB"
echo "spi ota map: 1024KB + 1024KB"
elif [ $input == 6 ]; then
spi_size_map=6
echo "spi size: 4096KB"
echo "spi ota map: 1024KB + 1024KB"
else
spi_size_map=0
echo "spi size: 512KB"
echo "spi ota map: 256KB + 256KB"
fi
echo ""
touch user/user_main.c
echo ""
echo "start..."
echo ""
make COMPILE=gcc BOOT=$boot APP=$app SPI_SPEED=$spi_speed SPI_MODE=$spi_mode SPI_SIZE_MAP=$spi_size_map
|
#!/bin/bash
set -Eeo pipefail
cd "$YAK_WORKSPACE"/dart-sdk/sdk
while read -r path
do
cp -rv "$path" /usr/
done < <(find out/ReleaseX64/dart-sdk/ \
-type d \
-mindepth 1 \
-maxdepth 1 \
-not -iname util)
|
#!/bin/bash
AUTO_DEVOPS_CHART=gitlab/auto-deploy-rails
AUTO_DEVOPS_CHART_REPOSITORY=https://leifcr.gitlab.io/auto-deploy-rails
function download_chart() {
if [[ ! -d chart ]]; then
auto_chart=${AUTO_DEVOPS_CHART:-gitlab/auto-deploy-app}
auto_chart_name=$(basename $auto_chart)
auto_chart_name=${auto_chart_name%.tgz}
auto_chart_name=${auto_chart_name%.tar.gz}
else
auto_chart="chart"
auto_chart_name="chart"
fi
helm init --client-only
helm repo add gitlab ${AUTO_DEVOPS_CHART_REPOSITORY:-https://charts.gitlab.io}
if [[ ! -d "$auto_chart" ]]; then
helm fetch ${auto_chart} --untar
fi
if [ "$auto_chart_name" != "chart" ]; then
mv ${auto_chart_name} chart
fi
helm dependency update chart/
helm dependency build chart/
}
download_chart
helm repo list
helm repo remove gitlab
cat chart/Chart.yaml
rm -rf chart
|
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.internal;
import org.apache.tapestry5.SymbolConstants;
import org.apache.tapestry5.ioc.IOCUtilities;
import org.apache.tapestry5.ioc.Registry;
import org.apache.tapestry5.ioc.RegistryBuilder;
import org.apache.tapestry5.ioc.def.ContributionDef;
import org.apache.tapestry5.ioc.def.ModuleDef;
import org.apache.tapestry5.ioc.internal.util.InternalUtils;
import org.apache.tapestry5.ioc.services.*;
import org.apache.tapestry5.modules.TapestryModule;
import org.slf4j.Logger;
import java.util.Formatter;
import java.util.List;
/**
* This class is used to build the {@link Registry}. The Registry contains
* {@link org.apache.tapestry5.ioc.modules.TapestryIOCModule} and {@link TapestryModule}, any
* modules identified by {@link #addModules(Class[])} )}, plus the application module.
*
* The application module is optional.
*
* The application module is identified as <em>package</em>.services.<em>appName</em>Module, where
* <em>package</em> and the <em>appName</em> are specified by the caller.
*/
@SuppressWarnings("rawtypes")
public class TapestryAppInitializer
{
private final Logger logger;
private final SymbolProvider appProvider;
private final String appName;
private final long startTime;
private final RegistryBuilder builder = new RegistryBuilder();
private long registryCreatedTime;
private Registry registry;
/**
* @param logger
* logger for output confirmation
* @param appPackage
* root package name to search for pages and components
* @param appName
* the name of the application (i.e., the name of the application servlet)
*/
public TapestryAppInitializer(Logger logger, String appPackage, String appName)
{
this(logger, new SingleKeySymbolProvider(InternalConstants.TAPESTRY_APP_PACKAGE_PARAM, appPackage), appName,
null);
}
/**
* @param logger
* logger for output confirmation
* @param appProvider
* provides symbols for the application (normally, from the ServletContext init
* parameters), plus (as of 5.4) the value for symbol {@link SymbolConstants#CONTEXT_PATH}
* @param appName
* the name of the application (i.e., the name of the application servlet)
* @param executionModes
* an optional, comma-separated list of execution modes, each of which is used
* to find a list of additional module classes to load (key
* <code>tapestry.<em>name</em>-modules</code> in appProvider, i.e., the servlet
* context)
*/
public TapestryAppInitializer(Logger logger, SymbolProvider appProvider, String appName, String executionModes)
{
this.logger = logger;
this.appProvider = appProvider;
String appPackage = appProvider.valueForSymbol(InternalConstants.TAPESTRY_APP_PACKAGE_PARAM);
this.appName = appName;
startTime = System.currentTimeMillis();
if (!Boolean.parseBoolean(appProvider.valueForSymbol(InternalConstants.DISABLE_DEFAULT_MODULES_PARAM)))
{
IOCUtilities.addDefaultModules(builder);
}
// This gets added automatically.
addModules(TapestryModule.class);
String className = appPackage + ".services." + InternalUtils.capitalize(this.appName) + "Module";
try
{
// This class is possibly loaded by a parent class loader of the application class
// loader. The context class loader should have the appropriate view to the module
// class,
// if any.
Class moduleClass = Thread.currentThread().getContextClassLoader().loadClass(className);
builder.add(moduleClass);
} catch (ClassNotFoundException ex)
{
// That's OK, not all applications will have a module class, even though any
// non-trivial application will.
logger.warn("Application Module class {} not found", className);
}
// Add a synthetic module that contributes symbol sources.
addSyntheticSymbolSourceModule(appPackage);
for (String mode : TapestryInternalUtils.splitAtCommas(executionModes))
{
String key = String.format("tapestry.%s-modules", mode);
String moduleList = appProvider.valueForSymbol(key);
for (String moduleClassName : TapestryInternalUtils.splitAtCommas(moduleList))
{
builder.add(moduleClassName);
}
}
}
/**
* Adds additional modules.
*
* @param moduleDefs
*/
public void addModules(ModuleDef... moduleDefs)
{
for (ModuleDef def : moduleDefs)
builder.add(def);
}
public void addModules(Class... moduleClasses)
{
builder.add(moduleClasses);
}
private void addSyntheticSymbolSourceModule(String appPackage)
{
ContributionDef appPathContribution = new SyntheticSymbolSourceContributionDef("AppPath",
new SingleKeySymbolProvider(InternalSymbols.APP_PACKAGE_PATH, appPackage.replace('.', '/')));
ContributionDef symbolSourceContribution = new SyntheticSymbolSourceContributionDef("ServletContext",
appProvider, "before:ApplicationDefaults", "after:EnvironmentVariables");
ContributionDef appNameContribution = new SyntheticSymbolSourceContributionDef("AppName",
new SingleKeySymbolProvider(InternalSymbols.APP_NAME, appName), "before:ServletContext");
builder.add(new SyntheticModuleDef(symbolSourceContribution, appNameContribution, appPathContribution));
}
public Registry createRegistry()
{
registryCreatedTime = System.currentTimeMillis();
registry = builder.build();
return registry;
}
/**
* Announce application startup, by logging (at INFO level) the names of all pages,
* components, mixins and services.
*/
public void announceStartup()
{
if (!logger.isInfoEnabled()) // if info logging is off we can stop now
{
return;
}
long toFinish = System.currentTimeMillis();
SymbolSource source = registry.getService("SymbolSource", SymbolSource.class);
StringBuilder buffer = new StringBuilder("Startup status:\n\nServices:\n\n");
Formatter f = new Formatter(buffer);
int unrealized = 0;
ServiceActivityScoreboard scoreboard = registry.getService(ServiceActivityScoreboard.class);
List<ServiceActivity> serviceActivity = scoreboard.getServiceActivity();
int longest = 0;
// One pass to find the longest name, and to count the unrealized services.
for (ServiceActivity activity : serviceActivity)
{
Status status = activity.getStatus();
longest = Math.max(longest, activity.getServiceId().length());
if (status == Status.DEFINED || status == Status.VIRTUAL)
unrealized++;
}
String formatString = "%" + longest + "s: %s\n";
// A second pass to output all the services
for (ServiceActivity activity : serviceActivity)
{
f.format(formatString, activity.getServiceId(), activity.getStatus().name());
}
f.format("\n%4.2f%% unrealized services (%d/%d)\n", 100. * unrealized / serviceActivity.size(), unrealized,
serviceActivity.size());
f.format("\nApplication '%s' (version %s) startup time: %,d ms to build IoC Registry, %,d ms overall.", appName,
source.valueForSymbol(SymbolConstants.APPLICATION_VERSION),
registryCreatedTime - startTime,
toFinish - startTime);
String version = source.valueForSymbol(SymbolConstants.TAPESTRY_VERSION);
boolean productionMode = Boolean.parseBoolean(source.valueForSymbol(SymbolConstants.PRODUCTION_MODE));
buffer.append("\n\n");
buffer.append(" ______ __ ____\n");
buffer.append("/_ __/__ ____ ___ ___ / /_______ __ / __/\n");
buffer.append(" / / / _ `/ _ \\/ -_|_-</ __/ __/ // / /__ \\ \n");
buffer.append("/_/ \\_,_/ .__/\\__/___/\\__/_/ \\_, / /____/\n");
f.format (" /_/ /___/ %s%s\n\n",
version, productionMode ? "" : " (development mode)");
// log multi-line string with OS-specific line endings (TAP5-2294)
logger.info(buffer.toString().replaceAll("\\n", System.getProperty("line.separator")));
}
}
|
"""
@author: <NAME>
@contact: <EMAIL>
"""
import numpy as np
from PIL import Image
from torch.utils.data import Dataset
from .utils.data import transforms as T
from collections import OrderedDict
from glob import glob
import os.path as osp
import json
import torch
class CommonDataset ( Dataset ):
def __init__(self, post_processed_dir='/home/jiangwen/reid/CamStyle/data/Shelf/post_processed',
camera_parameter=None):
self.data_dir = post_processed_dir
self.args = dict ( arch='resnet50', batch_size=128, camstyle=46, data_dir=self.data_dir,
dataset='market', dist_metric='euclidean', dropout=0.5, epochs=50, evaluate=False,
features=1024,
height=256, logs_dir='logs/market-ide-camstyle-re', lr=0.1, momentum=0.9,
output_feature='pool5',
print_freq=1, re=0.5, rerank=True, weight_decay=0.0005, width=128, workers=8,
resume='logs/market-ide-camstyle-re/checkpoint.pth.tar' )
self.normalizer = T.Normalize ( mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225] )
self.test_transformer = T.Compose ( [
T.Resize ( (self.args['height'], self.args['width']), interpolation=3 ),
T.ToTensor (),
self.normalizer,
] )
self.info_dict = OrderedDict ()
self.cam_names = sorted ( [osp.split ( i )[-1] for i in glob ( osp.join ( self.data_dir, '*' ) ) if
osp.split ( i )[-1] != 'parameter'] )
for cam_id in self.cam_names:
with open ( osp.join ( self.data_dir, cam_id, 'pose_info.json' ), 'r' ) as f:
pose_info = json.loads ( f.read () )
self.info_dict[cam_id] = pose_info
self.dimGroup = OrderedDict ()
for img_id in self.info_dict[self.cam_names[0]]:
# img_id is str
cnt = 0
this_dim = [0]
for cam_id in self.cam_names:
num_person = len ( self.info_dict[cam_id][img_id] ) - 1 # exclude key img_path
cnt += num_person
this_dim.append ( cnt )
self.dimGroup[int ( img_id )] = torch.Tensor ( this_dim ).long ()
# handle camera parameter
self.P = camera_parameter['P'].astype ( np.float32 )
self.K = camera_parameter['K'].astype ( np.float32 )
self.RT = camera_parameter['RT'].astype ( np.float32 )
self.skew_op = lambda x: torch.tensor ( [[0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]] )
self.fundamental_op = lambda K_0, R_0, T_0, K_1, R_1, T_1: torch.inverse ( K_0 ).t () @ (
R_0 @ R_1.t ()) @ K_1.t () @ self.skew_op ( K_1 @ R_1 @ R_0.t () @ (T_0 - R_0 @ R_1.t () @ T_1) )
self.fundamental_RT_op = lambda K_0, RT_0, K_1, RT_1: self.fundamental_op ( K_0, RT_0[:, :3], RT_0[:, 3], K_1,
RT_1[:, :3], RT_1[:, 3] )
self.F = torch.zeros ( len ( self.cam_names ), len ( self.cam_names ), 3, 3 ) # NxNx3x3 matrix
# TODO: optimize this stupid nested for loop
for i in range ( len ( self.cam_names ) ):
for j in range ( len ( self.cam_names ) ):
self.F[i, j] += self.fundamental_RT_op ( torch.tensor ( self.K[i] ),
torch.tensor ( self.RT[i] ),
torch.tensor ( self.K[j] ), torch.tensor ( self.RT[j] ) )
if self.F[i, j].sum() == 0:
self.F[i, j] += 1e-12 # to avoid nan
# handle heatmap info
self.heatmaps = None
self.template = load_template ()
def __getitem__(self, item):
"""
Get a list of image in multi view at the same time
:param item:
:return: images, fnames, pid, cam_id
"""
img_id = item
data_by_cam = OrderedDict ()
for cam_id in self.cam_names:
data_by_cam[cam_id] = [v['img_path'] for k, v in self.info_dict[cam_id][str ( img_id )].items () if
k != 'image_name']
image = list ()
fname = list ()
pid = list ()
cam_id = list ()
for k, v in data_by_cam.items ():
fname += v
pid += [osp.basename ( i ).split ( '_' )[-1].split ( '.' )[0] for i in v]
cam_id += [osp.split ( i )[-2] for i in v]
image += [self.test_transformer ( Image.open ( osp.join ( self.data_dir, i ) ) ) for i in v]
image = torch.stack ( image )
data_batch = (image, fname, pid, cam_id)
return data_batch
def __len__(self):
if len ( self.info_dict ):
return len ( self.info_dict[self.cam_names[0]] )
else:
return 0
def get_unary(self, person, sub_imgid2cam, candidates, img_id):
def get2Dfrom3D(x, P):
"""get the 2d joint from 3d joint"""
x4d = np.append ( x, 1 )
x2d = np.dot ( P, x4d )[0:2] / (np.dot ( P, x4d )[2] + 10e-6) # to avoid np.dot(P, x4d)[2] = 0
return x2d
# get the unary of 3D candidates
joint_num = len ( candidates )
point_num = len ( candidates[0] )
unary = np.ones ( (joint_num, point_num) )
info_list = list () # This also occur in multi setimator
for cam_id in self.cam_names:
info_list += [i for _, i in self.info_dict[cam_id][str ( img_id )].items () if _ != 'image_name']
# project the 3d point to each view to get the 2d points
for i in person:
Pi = self.P[sub_imgid2cam[i]]
heatmap = np.load (
osp.join ( self.data_dir, info_list[i]['heatmap_path'] ) )
crop = np.array ( info_list[i]['heatmap_bbox'] )
for j in range ( heatmap.shape[0] ):
heatmap_j = heatmap[j]
for k in range ( len ( candidates[j] ) ):
point_2d = get2Dfrom3D ( candidates[j][k], Pi )
point_2d_in_heatmap = point_2d - np.array ( [crop[0], crop[1]] )
if point_2d_in_heatmap[0] > heatmap_j.shape[1] or point_2d_in_heatmap[0] < 0 or point_2d_in_heatmap[
1] > heatmap_j.shape[0] or point_2d_in_heatmap[1] < 0:
unary_i = 10e-6
else:
unary_i = heatmap_j[int ( point_2d_in_heatmap[1] ), int ( point_2d_in_heatmap[0] )]
unary[j, k] = unary[j, k] * unary_i
unary = np.log10 ( unary )
return unary
def load_template(dataset='h36m'):
"""
Hard encode the human body template
:return:
"""
templates = {'h36m': np.array ( [[0.0018327, 0.18507086, -0.17760321, 0.47678296, -0.46611124,
0.71017444, -0.71153766, 0.11616346, -0.12763677, 0.11020779,
-0.12279839, 0.12724847, -0.12452087],
[-0.0827738, -0.07526917, -0.05761691, -0.09604145, -0.02306564,
-0.18181808, -0.06122154, -0.12290852, -0.09051553, -0.08240831,
-0.0523845, 0.03715071, 0.05312368],
[1.70503833, 1.48879248, 1.4854071, 1.44106006, 1.42731128,
1.42766638, 1.40946619, 0.97231879, 1.00533917, 0.50190244,
0.53471307, 0.04910713, 0.07812376]] ),
'Shelf': np.array ( [[0.01273053, -0.09262084, -0.11961558, -0.07061234, -0.08761829,
0.05067334, 0.0088842, 0.02459383, -0.08589214, 0.05839888,
-0.08001912, -0.00395661, -0.14304384],
[0.05546921, 0.22573541, -0.11484059, 0.25385895, -0.20887429,
0.1862903, -0.16983723, 0.15736914, -0.06168539, 0.16666036,
-0.06817156, 0.1914962, -0.09228449],
[1.60827349, 1.28002543, 1.28858008, 1.00131741, 1.00584484,
0.82851737, 0.7909359, 0.75035656, 0.73453197, 0.3672495,
0.38460963, -0.04995751, -0.04118636]] ),
'Campus': np.array ( [[-0.52248502, -0.64536842, -0.37618539, -0.64643804, -0.28080107,
-0.61725263, -0.39121596, -0.53340433, -0.42570307, -0.47950823,
-0.33426481, -0.46441123, -0.45108205],
[4.01057597, 3.88068601, 3.85644611, 3.88494234, 3.90516631,
4.05613315, 4.02384458, 3.81515482, 3.85981597, 3.93538466,
3.81045037, 3.89418933, 3.48824897],
[1.95452321, 1.65249654, 1.63991337, 1.32163371, 1.27597037,
1.30090807, 1.21906915, 1.04422362, 1.02544295, 0.57991175,
0.58941852, 0.07508519, 0.30164174]] )}
return templates[dataset]
|
#!/usr/bin/env bash
playSound=
sound-setup() {
if which afplay; then
playSound='sound-afplay'
elif which paplay; then
playSound='sound-paplay'
elif which aplay; then
playSound='sound-aplay'
else
playSound='sound-beep'
fi
}
sound-teardown() {
true
}
sound() {
local sound=sound/$1.mp3
$playSound "$sound"
}
sound-afplay() {
afplay "$*" &
}
sound-paplay() {
paplay "$*" &
}
sound-aplay() {
aplay "$*" &
}
sound-beep() {
echo -en '\a' &
} |
#!/bin/ksh
TMPD=/tmp/$USER
# Cleanup
if [ $CLEANUP ] ; then
echo "Done"
else
export GETFILES="*.tbl *.geneC* *.gb"
export HOST=$HOSTNAME
if [ ! -d $TMPD ] ; then
mkdir $TMPD
fi
fi
|
import {Routes} from '@angular/router';
import {EventsComponent} from '@src/app/pages/events/events.component';
export const routes: Routes = [
{
path: '',
component: EventsComponent,
},
];
|
export interface ParsedRequest {
text: string;
items: string[];
dofusClass: string | null;
tags: string[];
}
|
/*
* Copyright 2022 QOS.<NAME> (Switzerland)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j.testUtil;
public class VersionUtil {
static final int DEFAULT_GUESS = 8;
static public int getJavaMajorVersion() {
String javaVersionString = System.getProperty("java.version");
System.out.println("javaVersionString=" + javaVersionString);
return getJavaMajorVersion(javaVersionString);
}
static public int getJavaMajorVersion(String versionString) {
if (versionString == null)
return DEFAULT_GUESS;
if (versionString.startsWith("1.")) {
return versionString.charAt(2) - '0';
} else {
String firstDigits = extractFirstDigits(versionString);
try {
return Integer.parseInt(firstDigits);
} catch (NumberFormatException e) {
return DEFAULT_GUESS;
}
}
}
private static String extractFirstDigits(String versionString) {
StringBuffer buf = new StringBuffer();
for (char c : versionString.toCharArray()) {
if (Character.isDigit(c))
buf.append(c);
else
break;
}
return buf.toString();
}
}
|
const { spawn, execSync } = require('child_process')
const { createHmac } = require('crypto')
const express = require('express')
const { readFileSync } = require('fs')
const { parse } = require('json5')
let config
try {
config = parse(readFileSync('./config.json'))
} catch (err) {
console.log(`Failed to load config due to ${err}`)
}
function Frau() {
let handle
function launch() {
return new Promise((resolve, reject) => {
handle = spawn('node', ['.'], {
cwd: config.cwd,
stdio: 'pipe'
})
handle.on('error', err => {
reject(err)
})
handle.stdout.setEncoding('utf8')
handle.stdout.on('data', data => {
console.log(data.substring(0, data.length - 1))
resolve()
})
handle.stderr.setEncoding('utf8')
handle.stderr.on('data', data => {
console.log(data.substring(0, data.length - 1))
resolve()
})
})
}
this.start = function() {
launch()
.then(() => {
handle.once('exit', (code, signal) => {
console.log(`Frau exited with code ${code}: ${signal}`)
this.start()
})
})
.catch(err => {
console.log(`Failed to launch Frau due to ${err}`)
this.start()
})
}
this.stop = function() {
return new Promise((resolve, reject) => {
handle.removeAllListeners('exit')
handle.removeAllListeners('error')
handle.once('exit', resolve)
handle.once('error', reject)
handle.kill('SIGINT')
})
}
}
const app = express()
const frau = new Frau()
app.post('/minami/github/push', (request, response) => {
if (!request.header('X-Hub-Signature')) {
response.sendStatus(403)
return
}
let hmac = createHmac('sha1', config.secret)
request.once('readable', () => {
hmac.update(request.read() || '')
if (hmac.digest('hex') !== request.header('X-Hub-Signature').slice(5)) {
response.sendStatus(403)
return
}
frau.stop()
.then(() => {
execSync('git pull', {
cwd: config.cwd,
stdio: 'inherit'
})
})
.catch(err => {
response.sendStatus(500)
console.log(`Frau failed to update due to ${err}`)
})
.then(() => {
frau.start()
response.sendStatus(200)
})
})
})
app.use(function (err, request, response, next) {
console.log(`Failed to handle ${request.method} ${request.originalUrl} due to ${err}`)
console.log(err.stack)
response.sendStatus(500)
})
process.once('SIGINT', () => {
frau.stop()
.then(() => process.exit(0))
.catch(err => {
console.log(`Failed to stop Frau due to ${err}`)
process.exit(1)
})
})
app.listen(8080, () => frau.start()) |
#!/usr/bin/env bash
# Copyright The KubeDB Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
pushd $GOPATH/src/kubedb.dev/etcd/hack/gendocs
go run main.go
popd
|
import json
import logging
import os
from models import Whitelist
from models import Vulnerability
_logger = logging.getLogger(__name__)
def read_whitelist(filename):
"""Attempt to populate and return a ```Whitelist```
from the file pointed to by ```filename```.
If ```filename``` is ```None``` then an empty ```Whitelist```
is returned.
If any kind of error occurs ```None``` is returned.
"""
if filename is None:
return Whitelist({})
try:
with open(filename) as fp:
try:
whitelist = Whitelist(json.load(fp))
except Exception as ex:
_logger.error("Could not read whitelist from '%s' - %s\n", filename, ex)
return None
except Exception as ex:
_logger.error("Could not read whitelist from '%s' - %s\n", filename, ex)
return None
# :TODO: validate whitelist with jsonschema or done in Whitelist class
return whitelist
def read_vulnerabilities(directory):
vulnerabilities_by_cve_id = {}
try:
filenames = os.listdir(directory)
except Exception:
_logger.error("Could not read vulnerabilities from directory '%s'", directory)
return None
for filename in filenames:
absolute_filename = os.path.join(directory, filename)
try:
with open(absolute_filename) as fp:
features = json.load(fp).get('Layer', {}).get('Features', [])
for feature in features:
vulnerabilities = feature.get('Vulnerabilities', [])
for vulnerability in vulnerabilities:
vulnerability = Vulnerability(vulnerability)
if vulnerability.cve_id not in vulnerabilities_by_cve_id:
vulnerabilities_by_cve_id[vulnerability.cve_id] = vulnerability
except Exception:
_logger.error("Could not read vulnerabilities from '%s'", absolute_filename)
return None
return vulnerabilities_by_cve_id.values()
|
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.*;
import java.util.*;
import java.util.regex.*;
public class CooccurrenceMatrix {
public static void main(String[] args)
{
//int r=0;
String lineNumber = "";
String filePath = "E:\\Sir\\Co-occurrence\\3co mat\\DB.txt";;
String filePath1 = "E:\\Sir\\Co-occurrence\\\\3co mat\\GENE_TERMS.txt";
BufferedReader br,br1;
// String inputSearch = "Search";
String line = "";
String str= "";
String gene="";
int count=0;
ArrayList<String> list = new ArrayList<String>();
try {
br = new BufferedReader(new FileReader(filePath));
try {
while((line = br.readLine()) != null)
{
String [] result =line.split("\t", 2);
String first = result[0];
String searchMe= result[1];
Pattern re = Pattern.compile("[^.!?\\s][^.!?]*(?:[.!?](?!['\"]?\\s|$)[^.!?]*)*[.!?]?['\"]?(?=\\s|$)", Pattern.MULTILINE | Pattern.COMMENTS);
Matcher reMatcher = re.matcher(searchMe);
while(reMatcher.find()) {
br1 = new BufferedReader(new FileReader(filePath1));
str= reMatcher.group();
count=0;
while((gene = br1.readLine()) != null)
{
String findMe=gene;
findMe="<Gene>"+gene+"</Gene>";
if(gene.contains("+"))
{
String[] result1=gene.split("\\+",2);
String pat="(?i)\\b"+result1[0]+"\\+"+result1[1]+"\\b";
Pattern res = Pattern.compile(pat);
Matcher reMatchers = res.matcher(str);
if(reMatchers.find()) {
count++;
str= str.replaceAll(pat,findMe);
}
}
else
{
String pat="(?i)\\b"+gene+"\\b";
Pattern res = Pattern.compile(pat);
Matcher reMatchers = res.matcher(str);
if(reMatchers.find()) {
count++;
str= str.replaceAll(pat,findMe);
}
}
}
if(count>=2)
System.out.println(first+"\t"+str);
br1.close();
}
}
br.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// System.out.println("Times found at--"+count);
//System.out.println("Word found at--"+lineNumber);
}
} |
package com.nextbreakpoint;
import org.junit.Test;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.function.Consumer;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
public class AndTest {
@Test
public void shouldNotCallSecondCallableWhenFailure() throws Exception {
Callable<Object> callable = mock(Callable.class);
when(callable.call()).thenReturn("X");
Try.failure(new Exception()).and(callable).isFailure();
verify(callable, times(0)).call();
}
@Test
public void shouldCallSecondCallableWhenSuccessAndValueIsNull() throws Exception {
Callable<Object> callable = mock(Callable.class);
Try.success(null).and(callable).isPresent();
verify(callable, times(1)).call();
}
@Test
public void shouldCallSecondCallableWhenSuccessAndValueIsNotNull() throws Exception {
Callable<String> callable = mock(Callable.class);
Try.success("X").and(callable).isPresent();
verify(callable, times(1)).call();
}
@Test
public void shouldCallSecondCallableWhenFirstCallableReturnsValue() throws Exception {
Callable<String> callable = mock(Callable.class);
Try.of(() -> "X").and(callable).isPresent();
verify(callable, times(1)).call();
}
@Test
public void shouldReturnValueOfSecondCallableWhenFirstCallableReturnsValueAndSecondCallableReturnsValue() {
assertEquals("Y", Try.of(() -> "X").and(() -> "Y").get());
}
@Test
public void shouldNotCallSecondCallableWhenFirstCallableThrowsException() throws Exception {
Callable<Object> callable = mock(Callable.class);
Try.of(() -> { throw new Exception(); }).and(callable).isPresent();
verify(callable, times(0)).call();
}
@Test
public void shouldNotReturnValueWhenFirstCallableThrowsExceptionAndSecondCallableReturnsValue() {
assertFalse(Try.of(() -> { throw new Exception(); }).and(() -> "Y").isPresent());
}
@Test
public void shouldReturnFailureWhenAllCallablesThrowException() {
assertTrue(Try.of(() -> { throw new Exception(); }).and(() -> { throw new Exception(); }).isFailure());
}
@Test
public void shouldNotCallConsumersWhenFirstCallableThrowsException() {
Consumer<Optional<Object>> consumer1 = mock(Consumer.class);
Consumer<Optional<Object>> consumer2 = mock(Consumer.class);
Try.of(() -> { throw new Exception(); }).onSuccess(consumer1).and(() -> "X").onSuccess(consumer2).isPresent();
verify(consumer1, times(0)).accept(any(Optional.class));
verify(consumer2, times(0)).accept(any(Optional.class));
}
@Test
public void shouldNotCallConsumerWhenSecondCallableThrowsException() {
Consumer<Optional<Object>> consumer = mock(Consumer.class);
Try.of(() -> { throw new Exception(); }).and(() -> { throw new Exception(); }).onSuccess(consumer).isFailure();
verify(consumer, times(0)).accept(any(Optional.class));
}
@Test
public void shouldNotCallConsumersWhenFirstAndSecondCallableThrowException() {
Consumer<Optional<Object>> consumer1 = mock(Consumer.class);
Consumer<Optional<Object>> consumer2 = mock(Consumer.class);
Try.of(() -> { throw new Exception(); }).onSuccess(consumer1)
.and(() -> { throw new Exception(); }).onSuccess(consumer2).isFailure();
verify(consumer1, times(0)).accept(any(Optional.class));
verify(consumer2, times(0)).accept(any(Optional.class));
}
@Test
public void shouldNotCallConsumerWhenFirstAndSecondCallablesThrowException() {
Consumer<Optional<Object>> consumer = mock(Consumer.class);
Try.of(() -> { throw new Exception(); }).onSuccess(consumer).and(() -> { throw new Exception(); }).isFailure();
verify(consumer, times(0)).accept(any(Optional.class));
}
}
|
package metrics
import "testing"
func TestStringPointerDeref(t *testing.T) {
value := "test"
testCases := []struct {
stringPointer *string
expected string
}{
{
stringPointer: nil,
expected: "",
},
{
stringPointer: &value,
expected: value,
},
}
for _, tc := range testCases {
if got := stringPointerDeref(tc.stringPointer); got != tc.expected {
t.Errorf("Got: %v, expected: %v", got, tc.expected)
}
}
}
|
int mid_element(int arr[], int n)
{
if (n % 2 == 0)
return (arr[n/2] + arr[n/2 - 1])/2;
else
return arr[n/2];
} |
def get_subsets(s):
subsets = [[]]
for item in s:
new_subsets = []
for subset in subsets:
new_subsets.append(subset + [item])
subsets += new_subsets
return subsets |
# Generated by Django 3.1.5 on 2021-02-04 12:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('account', '0005_auto_20210203_1123'),
]
operations = [
migrations.AddField(
model_name='account',
name='avatar_url',
field=models.TextField(blank=True, null=True),
),
]
|
/* jshint node: true */
'use strict';
module.exports = require( './visualCaptcha.js' );
|
#!/usr/bin/env bash
# Based on:
# https://raw.githubusercontent.com/confluentinc/confluent-platform-security-tools/master/kafka-generate-ssl.sh
set -e
KEYSTORE_FILENAME="kafka.server.keystore.jks"
VALIDITY_IN_DAYS=3650
DEFAULT_TRUSTSTORE_FILENAME="kafka.server.truststore.jks"
TRUSTSTORE_WORKING_DIRECTORY="./testHelpers/certs/truststore"
KEYSTORE_WORKING_DIRECTORY="./testHelpers/certs/keystore"
CA_CERT_FILE="ca-cert"
KEYSTORE_SIGN_REQUEST="./testHelpers/certs/cert-file"
KEYSTORE_SIGN_REQUEST_SRL="./testHelpers/certs/ca-cert.srl"
KEYSTORE_SIGNED_CERT="./testHelpers/certs/cert-signed"
KEYSTORE_CRED_FILE="./testHelpers/certs/keystore_creds"
SSLKEY_CRED_FILE="./testHelpers/certs/sslkey_creds"
TRUSTSTORE_CRED_FILE="./testHelpers/certs/truststore_creds"
function file_exists_and_exit() {
echo "'$1' cannot exist. Move or delete it before"
echo "re-running this script."
exit 1
}
if [ -e "$KEYSTORE_WORKING_DIRECTORY" ]; then
file_exists_and_exit $KEYSTORE_WORKING_DIRECTORY
fi
if [ -e "$CA_CERT_FILE" ]; then
file_exists_and_exit $CA_CERT_FILE
fi
if [ -e "$KEYSTORE_SIGN_REQUEST" ]; then
file_exists_and_exit $KEYSTORE_SIGN_REQUEST
fi
if [ -e "$KEYSTORE_SIGN_REQUEST_SRL" ]; then
file_exists_and_exit $KEYSTORE_SIGN_REQUEST_SRL
fi
if [ -e "$KEYSTORE_SIGNED_CERT" ]; then
file_exists_and_exit $KEYSTORE_SIGNED_CERT
fi
echo
echo "Welcome to the Kafka SSL keystore and truststore generator script."
echo
echo "First, do you need to generate a trust store and associated private key,"
echo "or do you already have a trust store file and private key?"
echo
echo -n "Do you need to generate a trust store and associated private key? [yn] "
read generate_trust_store
trust_store_file=""
trust_store_private_key_file=""
if [ "$generate_trust_store" == "y" ]; then
if [ -e "$TRUSTSTORE_WORKING_DIRECTORY" ]; then
file_exists_and_exit $TRUSTSTORE_WORKING_DIRECTORY
fi
mkdir $TRUSTSTORE_WORKING_DIRECTORY
echo
echo "OK, we'll generate a trust store and associated private key."
echo
echo "First, the private key."
echo
echo "You will be prompted for:"
echo " - A password for the private key. Remember this."
echo " - Information about you and your company."
echo " - NOTE that the Common Name (CN) is currently not important."
openssl req -new -x509 -keyout $TRUSTSTORE_WORKING_DIRECTORY/ca-key \
-out $TRUSTSTORE_WORKING_DIRECTORY/ca-cert -days $VALIDITY_IN_DAYS
trust_store_private_key_file="$TRUSTSTORE_WORKING_DIRECTORY/ca-key"
echo
echo "Two files were created:"
echo " - $TRUSTSTORE_WORKING_DIRECTORY/ca-key -- the private key used later to"
echo " sign certificates"
echo " - $TRUSTSTORE_WORKING_DIRECTORY/ca-cert -- the certificate that will be"
echo " stored in the trust store in a moment and serve as the certificate"
echo " authority (CA). Once this certificate has been stored in the trust"
echo " store, it will be deleted. It can be retrieved from the trust store via:"
echo " $ keytool -keystore <trust-store-file> -export -alias CARoot -rfc"
echo
echo "Now the trust store will be generated from the certificate."
echo
echo "You will be prompted for:"
echo " - the trust store's password (labeled 'keystore'). Remember this"
echo " - a confirmation that you want to import the certificate"
keytool -keystore $TRUSTSTORE_WORKING_DIRECTORY/$DEFAULT_TRUSTSTORE_FILENAME \
-alias CARoot -import -file $TRUSTSTORE_WORKING_DIRECTORY/ca-cert
trust_store_file="$TRUSTSTORE_WORKING_DIRECTORY/$DEFAULT_TRUSTSTORE_FILENAME"
echo
echo "$TRUSTSTORE_WORKING_DIRECTORY/$DEFAULT_TRUSTSTORE_FILENAME was created."
# don't need the cert because it's in the trust store.
# rm $TRUSTSTORE_WORKING_DIRECTORY/$CA_CERT_FILE
else
echo
echo -n "Enter the path of the trust store file. "
read -e trust_store_file
if ! [ -f $trust_store_file ]; then
echo "$trust_store_file isn't a file. Exiting."
exit 1
fi
echo -n "Enter the path of the trust store's private key. "
read -e trust_store_private_key_file
if ! [ -f $trust_store_private_key_file ]; then
echo "$trust_store_private_key_file isn't a file. Exiting."
exit 1
fi
fi
echo
echo "Continuing with:"
echo " - trust store file: $trust_store_file"
echo " - trust store private key: $trust_store_private_key_file"
mkdir $KEYSTORE_WORKING_DIRECTORY
echo
echo "Now, a keystore will be generated. Each broker and logical client needs its own"
echo "keystore. This script will create only one keystore. Run this script multiple"
echo "times for multiple keystores."
echo
echo "You will be prompted for the following:"
echo " - A keystore password. Remember it."
echo " - Personal information, such as your name."
echo " NOTE: currently in Kafka, the Common Name (CN) does not need to be the FQDN of"
echo " this host. However, at some point, this may change. As such, make the CN"
echo " the FQDN. Some operating systems call the CN prompt 'first / last name'"
echo " - A key password, for the key being generated within the keystore. Remember this."
# To learn more about CNs and FQDNs, read:
# https://docs.oracle.com/javase/7/docs/api/javax/net/ssl/X509ExtendedTrustManager.html
keytool -keystore $KEYSTORE_WORKING_DIRECTORY/$KEYSTORE_FILENAME \
-alias localhost -validity $VALIDITY_IN_DAYS -genkey -keyalg RSA
echo
echo "'$KEYSTORE_WORKING_DIRECTORY/$KEYSTORE_FILENAME' now contains a key pair and a"
echo "self-signed certificate. Again, this keystore can only be used for one broker or"
echo "one logical client. Other brokers or clients need to generate their own keystores."
echo
echo "Fetching the certificate from the trust store and storing in $CA_CERT_FILE."
echo
echo "You will be prompted for the trust store's password (labeled 'keystore')"
keytool -keystore $trust_store_file -export -alias CARoot -rfc -file $CA_CERT_FILE
echo
echo "Now a certificate signing request will be made to the keystore."
echo
echo "You will be prompted for the keystore's password."
keytool -keystore $KEYSTORE_WORKING_DIRECTORY/$KEYSTORE_FILENAME -alias localhost \
-certreq -file $KEYSTORE_SIGN_REQUEST
echo
echo "Now the trust store's private key (CA) will sign the keystore's certificate."
echo
echo "You will be prompted for the trust store's private key password."
openssl x509 -req -CA $CA_CERT_FILE -CAkey $trust_store_private_key_file \
-in $KEYSTORE_SIGN_REQUEST -out $KEYSTORE_SIGNED_CERT \
-days $VALIDITY_IN_DAYS -CAcreateserial
# creates $KEYSTORE_SIGN_REQUEST_SRL which is never used or needed.
echo
echo "Now the CA will be imported into the keystore."
echo
echo "You will be prompted for the keystore's password and a confirmation that you want to"
echo "import the certificate."
keytool -keystore $KEYSTORE_WORKING_DIRECTORY/$KEYSTORE_FILENAME -alias CARoot \
-import -file $CA_CERT_FILE
rm $CA_CERT_FILE # delete the trust store cert because it's stored in the trust store.
echo
echo "Now the keystore's signed certificate will be imported back into the keystore."
echo
echo "You will be prompted for the keystore's password."
keytool -keystore $KEYSTORE_WORKING_DIRECTORY/$KEYSTORE_FILENAME -alias localhost -import \
-file $KEYSTORE_SIGNED_CERT
echo
echo "Generating credential files"
echo
echo "testtest" > $KEYSTORE_CRED_FILE
echo "testtest" > $SSLKEY_CRED_FILE
echo "testtest" > $TRUSTSTORE_CRED_FILE
echo
echo "All done!"
echo
echo "Delete intermediate files? They are:"
echo " - '$KEYSTORE_SIGN_REQUEST_SRL': CA serial number"
echo " - '$KEYSTORE_SIGN_REQUEST': the keystore's certificate signing request"
echo " (that was fulfilled)"
echo " - '$KEYSTORE_SIGNED_CERT': the keystore's certificate, signed by the CA, and stored back"
echo " into the keystore"
echo -n "Delete? [yn] "
read delete_intermediate_files
if [ "$delete_intermediate_files" == "y" ]; then
rm $KEYSTORE_SIGN_REQUEST_SRL
rm $KEYSTORE_SIGN_REQUEST
rm $KEYSTORE_SIGNED_CERT
fi
|
<gh_stars>0
/* - Coeus web framework -------------------------
*
* Licensed under the Apache License, Version 2.0.
*
* Author: <NAME>
*/
package com.tzavellas.coeus.bind.converter
import java.util.{Date, TimeZone, Locale}
import java.text.{DateFormat, SimpleDateFormat}
import com.tzavellas.coeus.bind.Converter
class DateConverter(
patternOrStyle: Either[String, Int] = Left("yyyy/MM/dd"),
lenient: Boolean = false,
timeZone: Option[TimeZone] = None)
extends AbstractConverter[Date] {
def parse(text: String, locale: Locale) =
filterEmpty(text, null, dateFormat(locale).parse(_))
def format(value: Date, locale: Locale) = dateFormat(locale).format(value)
private def dateFormat(locale: Locale) = {
val fmt = patternOrStyle match {
case Left(pattern) => new SimpleDateFormat(pattern, locale)
case Right(style) => DateFormat.getDateInstance(style, locale)
}
for (tz <- timeZone) fmt.setTimeZone(tz)
fmt.setLenient(lenient)
fmt
}
}
|
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import svm
from sklearn.metrics import confusion_matrix
# Importing the dataset
dataset = pd.read_csv('dataset.csv', encoding="ISO-8859-1")
# Cleaning the texts
import re
import nltk
nltk.download('stopwords')
from nltk.corpus import stopwords
from nltk.stem.porter import PorterStemmer
corpus = []
for i in range(0, len(dataset)):
review = re.sub('[^a-zA-Z]', ' ', dataset['Text'][i])
review = review.lower()
review = review.split()
ps = PorterStemmer()
review = [ps.stem(word) for word in review if not word in set(stopwords.words('english'))]
review = ' '.join(review)
corpus.append(review)
# Creating the Bag of Words model
cv = TfidfVectorizer(max_features = 1500)
X = cv.fit_transform(corpus).toarray()
y = dataset.iloc[:, -1].values
# Splitting the dataset into the Training set and Test set
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20, random_state = 0)
# Fitting SVM to the Training set
clf = svm.LinearSVC()
clf.fit(X_train, y_train)
# Making predictions on the Test set
y_pred = clf.predict(X_test)
# Making the Confusion Matrix
cm = confusion_matrix(y_test, y_pred)
print(cm) |
#!/bin/bash
echo '[ INFO ] Cleaning up ...'
rm -vf bbs_output_bytes.*
rm -vf full_ones.*
rm -vf jacobi_output_bytes.*
echo '[ INFO ] Done.'
|
<filename>xhlibrary/src/main/java/com/wxh/sdk/ui/base/BaseActivity.java
package com.wxh.sdk.ui.base;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.AnimationDrawable;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.wxh.sdk.R;
import com.wxh.sdk.android.XHActivityManager;
import com.wxh.sdk.http.HttpModel;
import com.wxh.sdk.http.HttpStatus;
import com.wxh.sdk.util.StringUtil;
import com.wxh.sdk.util.ViewUtils;
import com.wxh.sdk.view.LoadingDialog;
/**
* Acticity基类
*/
public abstract class BaseActivity extends AppCompatActivity {
/**
* 上下文对象
*/
protected Context context;
/**
* 顶部布局左边的图标
*/
ImageView iv_back;
/**
* 顶部布局标题内容
*/
TextView tv_tlable;
/**
* 加载缓冲时菊花布局
*/
protected View ps_bview;
/**
* 菊花控件
*/
protected ImageView iv_ps;
/**
* 菊花下面的文字控件
*/
protected TextView tv_pmsg;
public LoadingDialog mProgressDialog;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
context = this;
initView(savedInstanceState);
ViewUtils.inject(this, getWindow().getDecorView());
XHActivityManager.getActivityManager().addActivity(this);
if (intentData()) {
initUI();
}
}
/**
* 设置界面布局
*/
public abstract void initView(Bundle savedInstanceState);
/**
* 检查调用本类必需传递的参数条件是否满足
* 默认返回true,在需要的类中重写此方法即可
* - returns: true为满足
*/
protected abstract boolean intentData();
/**
* 初始化UI
*/
protected abstract void initUI();
/**
* 请求初始化数据 加载initUI 后手动调用
*/
protected abstract void loadData();
/**
* 弹出消息
*
* @param msg
*/
protected void showToast(String msg) {
if (StringUtil.isEmpty(msg)) return;
Toast.makeText(this, msg, Toast.LENGTH_SHORT).show();
}
@Override
public void startActivity(Intent intent) {//跳转Activity动画
super.startActivity(intent);
overridePendingTransition(R.anim.base_slide_right_in, R.anim.base_slide_remain);
}
// Press the back button in mobile phone
@Override
public void onBackPressed() {//关闭Activity动画
super.onBackPressed();
overridePendingTransition(0, R.anim.base_slide_right_out);
}
/**
* 初始化头部布局
*
* @param lable
*/
public void initTitle(String lable) {
((TextView) findViewById(R.id.tv_tlable)).setText(lable);
((ImageView) findViewById(R.id.iv_back)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
overridePendingTransition(0, R.anim.base_slide_right_out);
}
});
}
/**
* 设置缓冲布局View
* 参数如果为 true 正常展示缓存界面 false则轻触重新加载 自动调用初始化数据
*
* @desc TODO
* @author:wh
* @create:2016/4/1
*/
protected void setLoadProgress(boolean load) {
setLoadProgress(load, "轻触重新加载");
}
/**
* 设置缓冲布局View 自己定义缓冲文字
* 参数如果为 true 正常展示缓存界面 false则轻触重新加载 自动调用初始化数据
*
* @desc TODO
* @author:wh
* @create:2016/4/1
*/
protected void setLoadProgress(boolean load, String lable) {
if (null == ps_bview) {
ps_bview = findViewById(R.id.ps_bview);
iv_ps = (ImageView) findViewById(R.id.iv_ps);
AnimationDrawable animationDrawable = (AnimationDrawable) iv_ps.getDrawable();
animationDrawable.start();
tv_pmsg = (TextView) findViewById(R.id.tv_pmsg);
}
ps_bview.setVisibility(View.VISIBLE);
iv_ps.setVisibility(View.VISIBLE);
if (load) {//如果是正常的
tv_pmsg.setText("加载中...");
ps_bview.setOnClickListener(null);//设置没有点击事件
} else {
iv_ps.setVisibility(View.GONE);//菊花隐藏
tv_pmsg.setText(lable);
ps_bview.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
setLoadProgress(true);//设置缓冲布局展示
loadData();//重新加载数据
}
});
}
}
/**
* 取消缓冲布局
*
* @desc TODO
* @author:wxh
* @create:2015/4/1
*/
protected void setProgressDisMiss() {
if (null != ps_bview) {
ps_bview.setVisibility(View.GONE);
}
}
/**
* 显示进度框-自定义内容
*/
public void showLoading(Context context) {
showLoading(context, "");
}
public void showLoading(Context con, String content) {
if (null == content || "".equals(content))
content = "请稍候";
if (mProgressDialog == null) {
mProgressDialog = new LoadingDialog(con);
}
if (!mProgressDialog.isShowing()) {
mProgressDialog.setMessage(content);
mProgressDialog.setCanceledOnTouchOutside(false);
mProgressDialog.setCancelable(false);
mProgressDialog.show();
}
}
public void cancelLoading() {
if (mProgressDialog != null) {
mProgressDialog.cancel();
mProgressDialog = null;
}
}
public boolean chkStatus(HttpModel model, boolean ishow) {
if (model == null) {
if(ishow){
setLoadProgress(false);
}
return false;
} else if (model.code != HttpStatus.OK) {
if(ishow){
setLoadProgress(false);
}
showToast(model.info);
return false;
}
setProgressDisMiss();
return true;
}
public boolean chkStatus(HttpModel model){
return chkStatus(model,true);
}
}
|
# find index of the target element
target_index = arr.index(target)
# print index
puts target_index
# Output: 2 |
#!/bin/bash
#
# Copyright (c) 2019-2020 P3TERX <https://p3terx.com>
#
# This is free software, licensed under the MIT License.
# See /LICENSE for more information.
#
# https://github.com/P3TERX/Actions-OpenWrt
# File name: diy-part1.sh
# Description: OpenWrt DIY script part 1 (Before Update feeds)
#
# Uncomment a feed source
# sed -i 's/^#\(.*helloworld\)/\1/' feeds.conf.default
# Add a feed source
#sed -i '$a src-git lienol https://github.com/Lienol/openwrt-package' feeds.conf.default
|
<filename>src/utils/index.js
const fs = require('fs');
const path = require('path');
const shell = require('child-process-promise');
const { v4: uuid } = require('uuid');
/**
* Create an unique directory using uuid as directory's name
*/
async function udir({ prefix }) {
const _prefix = prefix || '';
const folder = path.join(process.env.PWD, 'runners', `${_prefix}-${uuid()}`);
await fs.promises.mkdir(folder, { recursive: true });
return folder;
}
/**
* Execute cmd with specified input and compare it's output to given expected
* @param {*} cmd the shell command to execute
* @param {*} input the content passed into stdin stream
* @param {*} expected the content expected to be written to stdout stream
*/
async function execute(cmd, input, expected) {
const now = Date.now();
const { stdout: actual } = await shell.exec(
`timeout -s 9 3 bash -c "${cmd}"`
);
const _expected = expected.trim();
const _actual = actual.trim();
const passed = _expected.trim() === _actual.trim();
return {
elapsedTime: Date.now() - now,
passed,
input: input.trim(),
expected: _expected,
actual: passed ? undefined : _actual,
};
}
/**
* Analyze test case results
* @param {*} results list of results after running all test cases
*/
function analyze(results) {
const total = results.length;
let totalPassed = 0;
let totalElapsedTime = 0;
const failedIndexes = [];
results.forEach((result, index) => {
totalElapsedTime += result.elapsedTime;
if (result.passed) totalPassed += 1;
else failedIndexes.push(index);
});
return {
total,
passed: totalPassed,
failed: total - totalPassed,
failedIndexes: failedIndexes.length !== 0 ? failedIndexes : undefined,
totalElapsedTime,
results,
};
}
module.exports = {
udir,
execute,
analyze,
};
|
<gh_stars>1-10
/*
* //******************************************************************
* //
* // Copyright 2016 Samsung Electronics All Rights Reserved.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
* //
* // Licensed under the Apache License, Version 2.0 (the "License");
* // you may not use this file except in compliance with the License.
* // You may obtain a copy of the License at
* //
* // http://www.apache.org/licenses/LICENSE-2.0
* //
* // Unless required by applicable law or agreed to in writing, software
* // distributed under the License is distributed on an "AS IS" BASIS,
* // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* // See the License for the specific language governing permissions and
* // limitations under the License.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
*/
package org.iotivity.cloud.base.device;
import org.iotivity.cloud.base.protocols.IResponse;
import io.netty.channel.ChannelHandlerContext;
public class HttpDevice extends Device {
public HttpDevice(ChannelHandlerContext ctx) {
super(ctx);
}
@Override
public void sendResponse(IResponse response) {
// This message must converted to HttpResponse
}
@Override
public void onConnected() {
// TODO Auto-generated method stub
}
@Override
public void onDisconnected() {
// TODO Auto-generated method stub
}
@Override
public String getDeviceId() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getUserId() {
// TODO Auto-generated method stub
return null;
}
@Override
public String getAccessToken() {
// TODO Auto-generated method stub
return null;
}
}
|
package com.iovation.launchkey.sdk.client;
import com.iovation.launchkey.sdk.domain.service.AuthorizationResponse;
import com.iovation.launchkey.sdk.domain.webhook.AuthorizationResponseWebhookPackage;
import com.iovation.launchkey.sdk.domain.webhook.ServiceUserSessionEndWebhookPackage;
import com.iovation.launchkey.sdk.domain.webhook.WebhookPackage;
import com.iovation.launchkey.sdk.error.InvalidRequestException;
import com.iovation.launchkey.sdk.transport.Transport;
import com.iovation.launchkey.sdk.transport.domain.*;
import junit.framework.TestCase;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.mockito.ArgumentMatchers;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(Enclosed.class)
public class BasicServiceClientGetAuthorizationResponseTest extends TestCase {
@RunWith(MockitoJUnitRunner.class)
public static class StandardTests {
@Mock
public Transport transport;
private final static UUID serviceId = UUID.randomUUID();
private final static UUID authRequestId = UUID.randomUUID();
private BasicServiceClient client;
private ServiceV3AuthsGetResponse authResponse;
@Before
public void setUp() throws Exception {
client = new BasicServiceClient(serviceId, transport);
authResponse = new ServiceV3AuthsGetResponse(
new EntityIdentifier(EntityIdentifier.EntityType.SERVICE, serviceId),
UUID.randomUUID(),
"service-user-hash",
"org-user-hash",
"user-push-id",
authRequestId,
true,
"device-id",
new String[]{"service", "pins"},
"type",
"reason",
"denial-reason"
);
when(transport.serviceV3AuthsGet(any(UUID.class), any(EntityIdentifier.class))).thenReturn(authResponse);
}
@Test
public void sendsExpectedServiceEntity() throws Exception {
EntityIdentifier expected = new EntityIdentifier(EntityIdentifier.EntityType.SERVICE, serviceId);
client.getAuthorizationResponse(authRequestId.toString());
verify(transport).serviceV3AuthsGet(any(UUID.class), eq(expected));
}
@Test
public void sendsExpectedAuthRequestId() throws Exception {
client.getAuthorizationResponse(authRequestId.toString());
verify(transport).serviceV3AuthsGet(eq(authRequestId), any(EntityIdentifier.class));
}
@Test
public void usesResponseAuthorized() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertTrue(response.isAuthorized());
}
@Test
public void usesProvidedAuthRequestId() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals(authResponse.getAuthorizationRequestId().toString(), response.getAuthorizationRequestId());
}
@Test
public void usesServiceUserHash() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals("service-user-hash", response.getServiceUserHash());
}
@Test
public void usesOrganizationUserHash() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals("org-user-hash", response.getOrganizationUserHash());
}
@Test
public void usesUserPushId() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals("user-push-id", response.getUserPushId());
}
@Test
public void usesDeviceId() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals("device-id", response.getDeviceId());
}
@Test
public void usesServicePins() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals(Arrays.asList("service", "pins"), response.getServicePins());
}
@Test
public void usesDenialreason() throws Exception {
AuthorizationResponse response = client.getAuthorizationResponse(authRequestId.toString());
assertEquals("denial-reason", response.getDenialReason());
}
}
@RunWith(Parameterized.class)
public static class ResponseTypeTests {
private final String input;
private final AuthorizationResponse.Type expectedOutput;
@Parameterized.Parameters()
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "AUTHORIZED", AuthorizationResponse.Type.AUTHORIZED },
{ "DENIED", AuthorizationResponse.Type.DENIED },
{ "FAILED", AuthorizationResponse.Type.FAILED },
{ "UNKNOWN", AuthorizationResponse.Type.OTHER },
});
}
public ResponseTypeTests(String input, AuthorizationResponse.Type expectedOutput) {
this.input = input;
this.expectedOutput = expectedOutput;
}
@Test
public void properlyMapsType() throws Exception {
Transport transport = mock(Transport.class);
when(transport.serviceV3AuthsGet(any(UUID.class), any(EntityIdentifier.class))).thenReturn(new ServiceV3AuthsGetResponse(
new EntityIdentifier(EntityIdentifier.EntityType.SERVICE, UUID.randomUUID()),
UUID.randomUUID(),
"service-user-hash",
"org-user-hash",
"user-push-id",
UUID.randomUUID(),
true,
"device-id",
new String[]{"service", "pins"},
this.input,
"reason",
"denial-reason"
));
ServiceClient client = new BasicServiceClient(UUID.randomUUID(), transport);
AuthorizationResponse response = client.getAuthorizationResponse(UUID.randomUUID().toString());
assertEquals(this.expectedOutput, response.getType());
}
}
@RunWith(Parameterized.class)
public static class ResponseReasonTests {
private final String input;
private final AuthorizationResponse.Reason expectedOutput;
@Parameterized.Parameters()
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "APPROVED", AuthorizationResponse.Reason.APPROVED },
{ "DISAPPROVED", AuthorizationResponse.Reason.DISAPPROVED },
{ "FRAUDULENT", AuthorizationResponse.Reason.FRAUDULENT },
{ "POLICY", AuthorizationResponse.Reason.POLICY },
{ "PERMISSION", AuthorizationResponse.Reason.PERMISSION },
{ "AUTHENTICATION", AuthorizationResponse.Reason.AUTHENTICATION },
{ "CONFIGURATION", AuthorizationResponse.Reason.CONFIGURATION },
{ "UNKNOWN", AuthorizationResponse.Reason.OTHER },
});
}
public ResponseReasonTests(String input, AuthorizationResponse.Reason expectedOutput) {
this.input = input;
this.expectedOutput = expectedOutput;
}
@Test
public void properlyMapsReason() throws Exception {
Transport transport = mock(Transport.class);
when(transport.serviceV3AuthsGet(any(UUID.class), any(EntityIdentifier.class))).thenReturn(new ServiceV3AuthsGetResponse(
new EntityIdentifier(EntityIdentifier.EntityType.SERVICE, UUID.randomUUID()),
UUID.randomUUID(),
"service-user-hash",
"org-user-hash",
"user-push-id",
UUID.randomUUID(),
true,
"device-id",
new String[]{"service", "pins"},
"type",
this.input,
"denial-reason"
));
ServiceClient client = new BasicServiceClient(UUID.randomUUID(), transport);
AuthorizationResponse response = client.getAuthorizationResponse(UUID.randomUUID().toString());
assertEquals(this.expectedOutput, response.getReason());
}
}
@RunWith(Parameterized.class)
public static class FraudTests {
private final String input;
private final Boolean expectedOutput;
@Parameterized.Parameters()
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{ "APPROVED", false },
{ "DISAPPROVED", false },
{ "FRAUDULENT", true },
{ "POLICY", false },
{ "PERMISSION", false },
{ "AUTHENTICATION", false },
{ "CONFIGURATION", false },
{ "UNKNOWN", false },
});
}
public FraudTests(String input, boolean expectedOutput) {
this.input = input;
this.expectedOutput = expectedOutput;
}
@Test
public void properlyMapsFraud() throws Exception {
Transport transport = mock(Transport.class);
when(transport.serviceV3AuthsGet(any(UUID.class), any(EntityIdentifier.class))).thenReturn(new ServiceV3AuthsGetResponse(
new EntityIdentifier(EntityIdentifier.EntityType.SERVICE, UUID.randomUUID()),
UUID.randomUUID(),
"service-user-hash",
"org-user-hash",
"user-push-id",
UUID.randomUUID(),
true,
"device-id",
new String[]{"service", "pins"},
"type",
this.input,
"denial-reason"
));
ServiceClient client = new BasicServiceClient(UUID.randomUUID(), transport);
AuthorizationResponse response = client.getAuthorizationResponse(UUID.randomUUID().toString());
assertEquals(this.expectedOutput, response.isFraud());
}
}
} |
package org.sonatype.nexus.repository.protop.internal.search;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import javax.annotation.Nullable;
import java.util.List;
/**
* Response for an protop V1 search request.
*
* @since 3.7
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ProtopSearchResponse {
@Nullable
private List<ProtopSearchResponseObject> objects;
@Nullable
private Integer total;
@Nullable
private String time;
@Nullable
public List<ProtopSearchResponseObject> getObjects() {
return objects;
}
public void setObjects(@Nullable final List<ProtopSearchResponseObject> objects) {
this.objects = objects;
}
@Nullable
public Integer getTotal() {
return total;
}
public void setTotal(@Nullable final Integer total) {
this.total = total;
}
@Nullable
public String getTime() {
return time;
}
public void setTime(@Nullable final String time) {
this.time = time;
}
}
|
#!/usr/bin/env bash
set -o pipefail # trace ERR through pipes
set -o errtrace # trace ERR through 'time command' and other functions
set -o nounset ## set -u : exit the script if you try to use an uninitialised variable
set -o errexit ## set -e : exit the script if any statement returns a non-true return value
source "$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.config.sh"
if [ "$#" -lt 1 ]; then
echo "No project type defined (either typo3, neos or git)"
exit 1
fi
#if app dir exists then backup it with timestamp
[ ! -d "$CODE_DIR" ] || mv "$CODE_DIR" "$CODE_DIR".$(date +%Y%m%d%H%M%S);
mkdir -p -- "$CODE_DIR/"
chmod 777 "$CODE_DIR/"
case "$1" in
###################################
## TYPO3 CMS
###################################
"typo3")
execInDir "$CODE_DIR" "docker run --rm --env COMPOSER_CACHE_DIR=/tmp --user $(id -u):$(id -g) -v \$(pwd):/app composer:latest create-project typo3/cms-base-distribution /app"
execInDir "$CODE_DIR" "touch web/FIRST_INSTALL"
;;
###################################
## TYPO3 NEOS
###################################
"neos")
execInDir "$CODE_DIR" "docker run --rm --env COMPOSER_CACHE_DIR=/tmp --user $(id -u):$(id -g) -v \$(pwd):/app composer:latest create-project neos/neos-base-distribution /app"
echo "\nNOTE: You probably want to change the WEB_DOCUMENT_ROOT env in your etc/environment.yml to '/app/Web/' and run 'docker-composer up -d app' to populate the change."
;;
###################################
## GIT
###################################
"git")
if [ "$#" -lt 2 ]; then
echo "Missing git url"
exit 1
fi
git clone --recursive "$2" "$CODE_DIR"
;;
esac
touch -- "$CODE_DIR/.gitkeep"
|
export default t;
declare function t({ directory }: {
directory: any;
}): Promise<void>;
|
#!/bin/sh
export PATH="$PATH:/srv/magma/bin"
if [ "$BASE_DIR" = "" ]; then
export BASE_DIR=/magma
fi
LOCK_FILE="$BASE_DIR/install.lock"
run() {
/sbin/service haveged start
if [ ! -f "$LOCK_FILE" ]; then
/scripts/install.sh $MYSQL_HOST $MYSQL_USER $MYSQL_PASSWORD $MYSQL_SCHEMA
touch $LOCK_FILE
else
HOSTNAME=$(hostname)
mysql -h $MYSQL_HOST -u $MYSQL_USER -p$MYSQL_PASSWORD -e "UPDATE Hosts SET hostname = '$HOSTNAME' WHERE hostnum = 1;" $MYSQL_SCHEMA
fi
magmad $BASE_DIR/etc/magma.config
}
$@
|
#!/bin/bash
pushd "$(dirname $0)" >/dev/null
if [[ "$(whoami)" != "root" ]]; then
echo "must be root"
exit 1
fi
IMG_NAME="machine" \
USE_QCOW2="1" \
RELEASE="buster" \
TARGET_HOSTNAME="machine" \
FIRST_USER_NAME="user" \
FIRST_USER_PASS="password" \
PUBKEY_SSH_FIRST_USER="$(cat /home/user/.ssh/id_rsa.pub)" \
ENABLE_SSH="1" \
PUBKEY_ONLY_SSH="1" \
STAGE_LIST="stage0 stage1 stage2 stage6 stage99" \
\
./build.sh
popd >/dev/null
|
<filename>src/decorators/cache_test.ts
// deno-lint-ignore-file require-await
import { assert, assertEquals, delay } from "../../test_deps.ts";
import { Cache } from "./cache.ts";
Deno.test("cache hit", async () => {
const callStacks: number[] = [];
class A {
@Cache(200)
async method(id: number) {
callStacks.push(id);
await delay(10);
return id;
}
@Cache(100)
async method2(id: number) {
callStacks.push(id);
await delay(10);
return id;
}
}
const a = new A();
const p1 = a.method(1);
const p2 = a.method(1);
const p3 = a.method(2);
assertEquals(callStacks, [1, 2]);
assert(p1 === p2);
const p4 = a.method2(1);
const p5 = a.method2(1);
assertEquals(callStacks, [1, 2, 1]);
assert(p4 === p5);
assertEquals(await p1, 1);
assertEquals(await p2, 1);
assertEquals(await p3, 2);
assertEquals(await p4, 1);
assertEquals(await p5, 1);
await delay(200);
callStacks.length = 0;
const p6 = a.method(1);
assertEquals(callStacks, [1]);
await p6;
await delay(200);
});
Deno.test("self key", async () => {
const callStacks: number[] = [];
class A {
@Cache(200, (id) => `${id + 123}`)
method(id: number) {
callStacks.push(1);
return id;
}
@Cache(200, (id) => `${id + 123}`)
method2(id: number) {
callStacks.push(2);
return id;
}
@Cache(100, (id) => `${id + "a"}`)
async method3(id: number) {
callStacks.push(3);
return id;
}
@Cache(100, (id) => `${id + "a"}`)
async method4(id: number) {
callStacks.push(4);
return id;
}
}
const a = new A();
const p1 = a.method(1);
const p2 = a.method(1);
assert(p1 === p2);
assertEquals(callStacks, [1]);
const p3 = a.method2(1);
assert(p3 === p2);
assertEquals(callStacks, [1, 2]);
const p4 = a.method3(1);
const p5 = a.method3(1);
assert(p4 === p5);
assertEquals(callStacks, [1, 2, 3]);
const p6 = a.method4(1);
assert(p6 !== p4);
assertEquals(callStacks, [1, 2, 3, 4]);
assertEquals(await p4, await p6);
await delay(200);
});
|
#!/bin/sh
# -----------------------------------------
# ZSH OPTIONS
# -----------------------------------------
setopt auto_cd
setopt auto_pushd
setopt pushd_ignore_dups
setopt pushd_minus
# -----------------------------------------
# ALIASES
# -----------------------------------------
alias -g ...='../..'
alias -g ....='../../..'
alias -g .....='../../../..'
alias -g ......='../../../../..'
alias 1='cd -'
alias 2='cd -2'
alias 3='cd -3'
alias 4='cd -4'
alias 5='cd -5'
alias 6='cd -6'
alias 7='cd -7'
alias 8='cd -8'
alias 9='cd -9'
alias md='mkdir -p'
alias rd=rmdir
alias d='dirs -v | head -10'
# List directory contents
EXA_ICON_SPACING=2
alias l='exa -l --icons'
alias ll='exa -l -a -g --git --icons'
alias lt='exa -T --icons'
alias llt='exa -T -a --icons'
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_donut_large_twotone = void 0;
var ic_donut_large_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M13 5.08c3.06.44 5.48 2.86 5.92 5.92h3.03c-.47-4.72-4.23-8.48-8.95-8.95v3.03zM18.92 13c-.44 3.06-2.86 5.48-5.92 5.92v3.03c4.72-.47 8.48-4.23 8.95-8.95h-3.03zM11 18.92c-3.39-.49-6-3.4-6-6.92s2.61-6.43 6-6.92V2.05c-5.05.5-9 4.76-9 9.95 0 5.19 3.95 9.45 9 9.95v-3.03z"
},
"children": []
}]
};
exports.ic_donut_large_twotone = ic_donut_large_twotone; |
<reponame>joonhocho/sanivali
import type { ISanivaliDef } from '_src/types';
export type ValidParam = boolean | undefined;
export type ValidRuleItem = 'valid' | ['valid', ValidParam?];
export const validDef: ISanivaliDef = {
validator: (param?: ValidParam) =>
param === false ? () => false : () => true,
runOnNil: true,
};
|
using System;
using System.Collections.Generic;
public class Booking
{
public string CustomerName { get; set; }
public string Destination { get; set; }
public DateTime DateOfTravel { get; set; }
}
public class TravelAgency
{
private List<Booking> bookings = new List<Booking>();
public void AddBooking(string customerName, string destination, DateTime dateOfTravel)
{
bookings.Add(new Booking
{
CustomerName = customerName,
Destination = destination,
DateOfTravel = dateOfTravel
});
}
public void RemoveBooking(string customerName, string destination, DateTime dateOfTravel)
{
Booking bookingToRemove = bookings.Find(b =>
b.CustomerName == customerName &&
b.Destination == destination &&
b.DateOfTravel == dateOfTravel);
if (bookingToRemove != null)
{
bookings.Remove(bookingToRemove);
}
}
public List<Booking> GetBookingsForDestination(string destination)
{
return bookings.FindAll(b => b.Destination == destination);
}
} |
#!/usr/bin/env bash
nohup ./startup.sh $1 >/dev/null 2>&1 & |
#!/bin/sh
docker build . -t cadastraldatadisposal
echo
echo
echo "To run the docker container execute:"
echo " $ docker run -p 8080:8080 cadastraldatadisposal"
|
<reponame>benoom/NewsAppP6P7<gh_stars>0
package com.example.android.newsappp6p7;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
/**
* An {@link NewsAdapter} knows how to create a list item layout for each news article
* in the data source (a list of {@link News} objects).
*
* These list item layouts will be provided to an adapter view like ListView
* to be displayed to the user.
*/
public class NewsAdapter extends ArrayAdapter<News> {
/**
* Tag for the log messages
*/
private static final String LOG_TAG = NewsAdapter.class.getSimpleName();
/**
* Constructs a new {@link NewsAdapter}.
*
* @param context of the app
* @param news is the list of news articles, which is the data source of the adapter
*/
public NewsAdapter(Context context, List<News> news) {
super(context, 0, news);
}
/**
* Returns a list item view that displays information about the news article at the given
* position in the list of news articles.
*/
@Override
public View getView(int position, View convertView, ViewGroup parent) {
// Check if there is an existing list item view (called convertView) that we can reuse,
// otherwise, if convertView is null, then inflate a new list item layout.
View listItemView = convertView;
if (listItemView == null) {
listItemView = LayoutInflater.from(getContext()).inflate(
R.layout.news_list_item, parent, false);
}
// Find the news article at the given position in the list of news articles
News currentNews = getItem(position);
// Find the TextView with the view ID section_id
TextView sectionView = listItemView.findViewById(R.id.section_id);
// Display the current news article section in that TextView
sectionView.setText(currentNews.getNameofSection());
// Find the TextView with the view ID section_id
TextView contributorView = listItemView.findViewById(R.id.contributor);
// Display the current news article section in that TextView
contributorView.setText(currentNews.getContributor());
// Find the TextView with view ID article_title
TextView titleView = listItemView.findViewById(R.id.article_title);
// Display the current news article title in that TextView
titleView.setText(currentNews.getArticleTitle());
// Create a new Date object from the time of the news article
String dateString = new String(currentNews.getTimePublished());
// Find the TextView with view ID date
TextView dateView = listItemView.findViewById(R.id.date);
// Format the date string (i.e. "Mar 3, 1984")
String formattedDate = formatDate(dateString);
// Display the date of the current news article in that TextView
dateView.setText(formattedDate);
// Find the TextView with view ID time
TextView timeView = listItemView.findViewById(R.id.time);
// Format the time string (i.e. "4:30PM")
String formattedTime = formatTime(dateString);
// Display the time of the current news article in that TextView
timeView.setText(formattedTime);
// Return the list item view that is now showing the appropriate data
return listItemView;
}
/**
* Return the formatted date string (i.e. "Mar 3, 1984")
*/
private String formatDate(String input) {
SimpleDateFormat formatInput =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault());
SimpleDateFormat formatOutput =
new SimpleDateFormat("MM-dd-yyyy", Locale.getDefault());
String output = null;
try {
Date dt = formatInput.parse(input);
output = formatOutput.format(dt);
} catch (ParseException e) {
Log.e(LOG_TAG, "Error during formatting date", e);
}
return output;
}
/**
* Return the formatted date string (i.e. "Mar 3, 1984")
*/
private String formatTime(String input) {
SimpleDateFormat formatInput =
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.getDefault());
SimpleDateFormat formatOutput =
new SimpleDateFormat("HH:mm", Locale.getDefault());
String output = null;
try {
Date dt = formatInput.parse(input);
output = formatOutput.format(dt);
} catch (ParseException e) {
Log.e(LOG_TAG, "Error during formatting time", e);
}
return output;
}
}
|
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
/**
* Value Schema
*/
var ValueSchema = new Schema({
name: {
type: String,
default: '',
required: 'Please fill Value name',
trim: true
},
abbreviation: {
type: String,
default: '',
required: 'Please fill abbreviation name',
trim: true
},
type: {
type: String,
default: '',
enum: ['crisp', 'interval', 'fuzzy', 'z-number'],
required: 'Please select value type'
},
value: {
type: [Number]
//required: 'Please insert the value'
},
minValue:{
type: Number,
required: 'Enter minimum value'
},
maxValue:{
type: Number,
required: 'Enter maximum value'
},
created: {
type: Date,
default: Date.now
},
user: {
type: Schema.ObjectId,
ref: 'User'
}
});
mongoose.model('Value', ValueSchema);
|
<reponame>mdblp/viz
/*
* == BSD2 LICENSE ==
* Copyright (c) 2017, Tidepool Project
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the associated License, which is identical to the BSD 2-Clause
* License as published by the Open Source Initiative at opensource.org.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the License for more details.
*
* You should have received a copy of the License along with this program; if
* not, you can obtain one from Tidepool Project at tidepool.org.
* == BSD2 LICENSE ==
*/
/* the logic here (and the tests) are a port of tideline's
js/plot/util/commonbolus.js */
import _ from 'lodash';
import { formatDecimalNumber, formatPercentage } from './format';
/**
* fixFloatingPoint
* @param {Number} numeric value
*
* @return {Number} numeric value rounded to 3 decimal places
*/
function fixFloatingPoint(n) {
return parseFloat(formatDecimalNumber(n, 3));
}
/**
* getBolusFromInsulinEvent
* @param {Object} insulinEvent - a Tidepool wizard or bolus object
*
* @return {Object} a Tidepool bolus object
*/
export function getBolusFromInsulinEvent(insulinEvent) {
let bolus = insulinEvent;
if (insulinEvent.bolus) {
bolus = insulinEvent.bolus;
}
return bolus;
}
/**
* getCarbs
* @param {Object} insulinEvent - a Tidepool wizard or bolus object
*
* @return {Number} grams of carbs input into bolus calculator
* NaN if bolus calculator not used; null if no carbInput
*/
export function getCarbs(insulinEvent) {
if (insulinEvent.type !== 'wizard') {
return NaN;
}
return _.get(insulinEvent, 'carbInput', null);
}
/**
* getProgrammed
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} value of insulin programmed for delivery in the given insulinEvent
*/
export function getProgrammed(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
if (!_.inRange(bolus.normal, Infinity) && !_.inRange(bolus.extended, Infinity)) {
return NaN;
}
}
if (bolus.extended != null && bolus.expectedExtended != null) {
if (bolus.normal != null) {
if (bolus.expectedNormal != null) {
return fixFloatingPoint(bolus.expectedNormal + bolus.expectedExtended);
}
return fixFloatingPoint(bolus.normal + bolus.expectedExtended);
}
return bolus.expectedExtended;
} else if (bolus.extended != null) {
if (bolus.normal != null) {
if (bolus.expectedNormal != null) {
// this situation should not exist!
throw new Error(
'Combo bolus found with a cancelled `normal` portion and non-cancelled `extended`!'
);
}
return fixFloatingPoint(bolus.normal + bolus.extended);
}
return bolus.extended;
}
return bolus.expectedNormal || bolus.normal;
}
/**
* getRecommended
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} total recommended insulin dose
*/
export function getRecommended(insulinEvent) {
// a simple manual/"quick" bolus won't have a `recommended` field
if (!insulinEvent.recommended) {
return NaN;
}
const netRecommendation = _.get(insulinEvent, ['recommended', 'net'], null);
if (netRecommendation !== null) {
return netRecommendation;
}
let rec = 0;
rec += _.get(insulinEvent, ['recommended', 'carb'], 0);
rec += _.get(insulinEvent, ['recommended', 'correction'], 0);
return fixFloatingPoint(rec);
}
/**
* getDelivered
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} units of insulin delivered in this insulinEvent
*/
export function getDelivered(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
if (!_.inRange(bolus.normal, Infinity) && !_.inRange(bolus.extended, Infinity)) {
return NaN;
}
}
if (bolus.extended != null) {
if (bolus.normal != null) {
return fixFloatingPoint(bolus.extended + bolus.normal);
}
return bolus.extended;
}
return bolus.normal;
}
/**
* getDuration
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} duration value in milliseconds
*/
export function getDuration(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
}
// don't want truthiness here because want to return duration
// from a bolus interrupted immediately (duration = 0)
if (!_.inRange(bolus.duration, Infinity)) {
return NaN;
}
return bolus.duration;
}
/**
* getExtended
* @param {Object} insulinEvent - a Tidepool wizard or bolus object
*
* @return {Number} units of insulin delivered over an extended duration
*/
export function getExtended(insulinEvent) {
const bolus = getBolusFromInsulinEvent(insulinEvent);
// don't want truthiness here because want to return expectedExtended
// from a bolus interrupted immediately (extended = 0)
if (!_.inRange(bolus.extended, Infinity)) {
return NaN;
}
return bolus.extended;
}
/**
* getExtendedPercentage
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {String} percentage of combo bolus delivered later
*/
export function getExtendedPercentage(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
}
if (!bolus.normal || !(bolus.extended || bolus.expectedExtended)) {
return NaN;
}
const extended = bolus.expectedExtended || bolus.extended;
const programmed = getProgrammed(bolus);
return formatPercentage(extended / programmed);
}
/**
* getMaxDuration
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} duration value in milliseconds
*/
export function getMaxDuration(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
}
// don't want truthiness here because want to return expectedDuration
// from a bolus interrupted immediately (duration = 0)
if (!_.inRange(bolus.duration, Infinity)) {
return NaN;
}
return bolus.expectedDuration || bolus.duration;
}
/**
* getMaxValue
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Number} max programmed or recommended value wrt the insulinEvent
*/
export function getMaxValue(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
if (!bolus.normal && !bolus.extended) {
return NaN;
}
}
const programmed = getProgrammed(bolus);
const recommended = getRecommended(insulinEvent) || 0;
return (recommended > programmed) ? recommended : programmed;
}
/**
* getNormalPercentage
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {String} percentage of combo bolus delivered immediately
*/
export function getNormalPercentage(insulinEvent) {
let bolus = insulinEvent;
if (_.get(insulinEvent, 'type') === 'wizard') {
bolus = getBolusFromInsulinEvent(insulinEvent);
}
if (!(bolus.normal || bolus.expectedNormal) || !(bolus.extended || bolus.expectedExtended)) {
return NaN;
}
const normal = bolus.expectedNormal || bolus.normal;
const programmed = getProgrammed(bolus);
return formatPercentage(normal / programmed);
}
/**
* getTotalBolus
* @param {Array} insulinEvents - Array of Tidepool bolus or wizard objects
*
* @return {Number} total bolus insulin in units
*/
export function getTotalBolus(insulinEvents) {
return _.reduce(insulinEvents, (result, insulinEvent) => (
result + getDelivered(insulinEvent)
), 0);
}
/**
* hasExtended
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Boolean} whether the bolus has an extended delivery portion
*/
export function hasExtended(insulinEvent) {
const bolus = getBolusFromInsulinEvent(insulinEvent);
// NB: intentionally invoking truthiness here
// a bolus with `extended` value 0 and `expectedExtended` value 0 is pointless to render
return Boolean(bolus.extended || bolus.expectedExtended) || false;
}
/**
* isInterruptedBolus
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Boolean} whether the bolus was interrupted or not
*/
export function isInterruptedBolus(insulinEvent) {
const bolus = getBolusFromInsulinEvent(insulinEvent);
const cancelledDuringNormal = Boolean(
bolus.normal != null &&
bolus.expectedNormal &&
bolus.normal !== bolus.expectedNormal
);
const cancelledDuringExtended = Boolean(
bolus.extended != null &&
bolus.expectedExtended &&
bolus.extended !== bolus.expectedExtended
);
if (_.inRange(bolus.normal, Infinity)) {
if (!bolus.extended) {
return cancelledDuringNormal;
}
return cancelledDuringNormal || cancelledDuringExtended;
}
return cancelledDuringExtended;
}
/**
* isOverride
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Boolean} whether the bolus programmed was larger than the calculated recommendation
*/
export function isOverride(insulinEvent) {
return getRecommended(insulinEvent) < getProgrammed(insulinEvent);
}
/**
* isUnderride
* @param {Object} insulinEvent - a Tidepool bolus or wizard object
*
* @return {Boolean} whether the bolus programmed was smaller than the calculated recommendation
*/
export function isUnderride(insulinEvent) {
return getRecommended(insulinEvent) > getProgrammed(insulinEvent);
}
/**
* getAnnoations
* @param {Object} insulinEvent - a Tidebool bolus or wizard object
*
* @returns {Array} array of annotations for the bolus or an empty array
*/
export function getAnnotations(insulinEvent) {
const bolus = getBolusFromInsulinEvent(insulinEvent);
const annotations = _.get(bolus, 'annotations', []);
return annotations;
}
|
class NotPayOrderTableViewCell: UITableViewCell {
// ... (other properties and methods)
var order: Order! {
didSet {
upDataUi()
}
}
func upDataUi() {
// Assuming there are outlets for labels in the cell for displaying order information
orderIdLabel.text = "Order ID: \(order.orderId)"
customerNameLabel.text = "Customer: \(order.customerName)"
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "MMM dd, yyyy HH:mm"
let formattedDate = dateFormatter.string(from: order.orderDate)
orderDateLabel.text = "Order Date: \(formattedDate)"
let formattedAmount = String(format: "$%.2f", order.totalAmount)
totalAmountLabel.text = "Total Amount: \(formattedAmount)"
}
} |
<gh_stars>100-1000
import React from 'react';
import { TabNavigator as ReactTabNavigator, createNavigationContainer, createNavigator } from 'react-navigation';
import TabView from 'react-navigation/src/views/TabView/TabView';
import NavigatorTypes from 'react-navigation/src/navigators/NavigatorTypes';
import TabRouter from '../routers/TabRouter';
const TabNavigator = (
routeConfigs,
config = {}
) => {
// Use the look native to the platform by default
const mergedConfig = { ...TabNavigator.Presets.Default, ...config };
const {
tabBarComponent,
tabBarPosition,
tabBarOptions,
swipeEnabled,
animationEnabled,
lazy,
...tabsConfig
} = mergedConfig;
const router = TabRouter(routeConfigs, tabsConfig);
const navigator = createNavigator(
router,
routeConfigs,
config,
NavigatorTypes.STACK
)((props) => (
<TabView
{...props}
tabBarComponent={tabBarComponent}
tabBarPosition={tabBarPosition}
tabBarOptions={tabBarOptions}
swipeEnabled={swipeEnabled}
animationEnabled={animationEnabled}
lazy={lazy}
/>
));
return createNavigationContainer(navigator, tabsConfig.containerOptions);
};
TabNavigator.Presets = ReactTabNavigator.Presets;
export default TabNavigator; |
<filename>src/main/java/org/vertx/java/core/DeferredAction.java
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vertx.java.core;
/**
* <p>DeferredAction is useful when you want to create your own Deferred actions.</p>
*
* <p>Normally, instances of Deferred are returned from vert.x modules to represent operations such as getting a key from
* a Redis server, or copying a file. However if you wish to create your own instances you can do this by subclassing this
* class and implementing the {@link #run} method.</p>
*
* <p>When the operation is complete be sure to call {@link #setResult} or {@link #setException}</p>
*
* @author <a href="http://tfox.org"><NAME></a>
*/
public abstract class DeferredAction<T> extends SimpleFuture<T> implements Deferred<T> {
protected boolean executed;
/**
* {@inheritDoc}
*/
public Deferred<T> execute() {
if (!executed) {
run();
executed = true;
}
return this;
}
/**
* {@inheritDoc}
*/
public Deferred<T> handler(CompletionHandler<T> completionHandler) {
super.handler(completionHandler);
return this;
}
/**
* Override this method to implement the deferred operation.
* When the operation is complete be sure to call {@link #setResult} or {@link #setException}
*/
protected abstract void run();
}
|
# frozen_string_literal: true
require "spec_helper"
describe "version check spec" do
include_context "rpc_command"
let(:headers) do
{
"Cookie" => "username=john;"
}
end
let(:middleware) do
AnyCable::MiddlewareChain.new.tap do |chain|
chain.use(AnyCable::Middlewares::CheckVersion.new("test-v1"))
end
end
let(:request) { AnyCable::ConnectionRequest.new(env: env) }
let(:meta) { {} }
let(:handler) { AnyCable::RPC::Handler.new(middleware: middleware) }
subject { handler.handle(:connect, request, meta) }
it "passes with a single matching version in meta" do
meta["protov"] = "test-v1"
expect(subject).to be_success
end
it "passes with multiple versions including matching" do
meta["protov"] = "test-v0,test-v1"
expect(subject).to be_success
end
it "fails without matching version" do
meta["protov"] = "test-v0,test-v01"
expect { subject }.to raise_error(
%r{Client supported versions: test-v0,test-v01}
)
end
it "fails without metadata" do
expect { subject }.to raise_error(
%r{Client supported versions: unknown}
)
end
end
|
const db = require('../../data/db-config');
const findAll = async () => {
return await db('readingStatuses');
};
const findBy = async (filter) => {
return await db('readingStatuses').where(filter);
};
const findById = async (id) => {
return db('readingStatuses').where({ id }).first();
};
const create = async (readingStatus) => {
const [id] = await db('readingStatuses')
.insert(readingStatus)
.returning('id');
return findById(id);
};
const update = async (id, readingStatus) => {
return db('readingStatuses')
.where({ id: id })
.first()
.update(readingStatus)
.returning('*');
};
const remove = async (id) => {
return await db('readingStatuses').where({ id }).del();
};
module.exports = { findAll, findBy, findById, create, update, remove };
|
#include <mutex>
#include <vector>
class LegoI2C {
private:
std::vector<uint8_t> m_rx_buff;
std::mutex m_rx_mutex;
public:
void write(uint8_t data) {
std::lock_guard<std::mutex> guard(m_rx_mutex);
m_rx_buff.push_back(data);
}
uint8_t read() {
std::lock_guard<std::mutex> guard(m_rx_mutex);
if (m_rx_buff.empty()) {
// Handle empty buffer, e.g., throw an exception or return a default value
return 0;
}
uint8_t data = m_rx_buff.front();
m_rx_buff.erase(m_rx_buff.begin());
return data;
}
}; |
def get_stock_prices(stock_list):
prices = {}
for stock in stock_list:
url = "https://www.marketwatch.com/investing/stock/"+stock
html = requests.get(url)
soup = BeautifulSoup(html.content, 'html.parser')
price_data = soup.find("div", {"class": "intraday__price"})
price = float(price_data.text)
prices[stock] = price
return prices |
var params = {
TableName: "myTable",
KeySchema: [
{ AttributeName: "Name", KeyType: "HASH" }
],
AttributeDefinitions: [
{ AttributeName: "Name", AttributeType: "S" },
{ AttributeName: "Description", AttributeType: "S" },
{ AttributeName: "Status", AttributeType: "BOOL" }
],
ProvisionedThroughput: {
ReadCapacityUnits: 1,
WriteCapacityUnits: 1
}
};
dynamodb.createTable(params, function(err, data) {
if (err) {
console.error("Unable to create table. Error JSON:", JSON.stringify(err, null, 2));
} else {
console.log("Created table. Table description JSON:", JSON.stringify(data, null, 2));
}
}); |
#!/bin/bash
curl -XPOST \
-H"content-type: application/json" \
-H"Expect: " \
-H"User-Agent: GitHub-Hookshot/b6210f6" \
-H"X-GitHub-Delivery: 5529067a-14f1-11ea-8f35-75cb7053287b" \
-H "X-GitHub-Event: push" \
-H "X-Hub-Signature: sha1=f6b0ccbd7dbe39d2a807668670e60bd07dbd6b6a" \
-d @push-event-payload.json \
http://localhost:8080/plugins/github-trigger |
<filename>api/src/main/java/org/ednovo/gooru/domain/cassandra/service/ContentProviderService.java
package org.ednovo.gooru.domain.cassandra.service;
import org.ednovo.gooru.cassandra.core.service.EntityCassandraService;
import org.ednovo.gooru.core.cassandra.model.ContentProviderCio;
public interface ContentProviderService extends EntityCassandraService<String,ContentProviderCio> {
}
|
<gh_stars>10-100
package com.pearson.docussandra.persistence;
import com.datastax.driver.core.Session;
import com.pearson.docussandra.domain.ParsedQuery;
import com.pearson.docussandra.domain.objects.QueryResponseWrapper;
import com.pearson.docussandra.exception.IndexParseException;
/**
* Repository for querying for records.
*
* @author https://github.com/JeffreyDeYoung
*/
public interface QueryRepository
{
/**
* @return the session
*/
Session getSession();
/**
* Do a query without limit or offset.
*
* @param query ParsedQuery to execute.
* @return A query response.
* @throws IndexParseException If the query is not on a valid index.
*/
public QueryResponseWrapper query(ParsedQuery query) throws IndexParseException;
/**
* Do a query with limit and offset.
*
* @param query ParsedQuery to execute.
* @param limit Maximum number of results to return.
* @param offset Number of records at the beginning of the results to
* discard.
* @return A query response.
* @throws IndexParseException If the query is not on a valid index.
*/
public QueryResponseWrapper query(ParsedQuery query, int limit, long offset) throws IndexParseException;
}
|
#!/usr/bin/env bash
# shellcheck disable=SC1090
source "$(dirname "${BASH_SOURCE[0]}")/os.bash"
function create_cloud_init_config_from_template() {
local SSH_KEY="id_rsa_${VM_NAME}"
local CLOUD_INIT_FILE="$CONFIG_BASE_PATH/${VM_NAME}-cloud-init.yaml"
cp "$CLOUD_INIT_TEMPLATE" "$CLOUD_INIT_FILE"
#@ToDo: Optimize Edits
#docker_sed "s,ssh-rsa.*$,$(cat "$SSH_KEY_PATH"/"${SSH_KEY}".pub),g" "/config/${VM_NAME}-cloud-init.yaml"
#docker_sed "s,hostname:.*$,hostname:\ $VM_NAME,g" "/config/${VM_NAME}-cloud-init.yaml"
file_replace_text "ssh-rsa.*$" "$(cat "$SSH_KEY_PATH"/"${SSH_KEY}".pub)" "$CLOUD_INIT_FILE"
file_replace_text "hostname:.*$" "hostname:\ $VM_NAME" "$CLOUD_INIT_FILE"
file_replace_text "_rsyslog_ip_port_" "$(get_local_ip):5514" "$CLOUD_INIT_FILE"
echo "$CLOUD_INIT_FILE Generated for $VM_NAME"
}
function run_main(){
create_cloud_init_config_from_template
}
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]
then
if ! run_main "$@"
then
exit 1
fi
fi |
import {Merchant} from '../common/Merchant';
export interface MerchantsList
{
merchants:Merchant[];
error:any;
loading:boolean;
}
export interface MerchantData
{
merchant:Merchant|null;
error:any;
loading:boolean;
}
export interface StoreState
{
merchantsList:MerchantsList;
newMerchant:MerchantData;
activeMerchant:MerchantData;
deletedMerchant:MerchantData;
} |
#include <iostream>
int search(int array[], int n, int x)
{
int i;
for (i = 0; i < n; i++)
if (array[i] == x)
return i;
return -1;
}
int main(void)
{
int array[] = { 2, 5, 8, 9, 10 };
int x = 5;
int n = sizeof(array) / sizeof(array[0]);
int result = search(array, n, x);
(result == -1) ? std::cout << "Element is not present in array"
: std::cout << "Element is present at index " << result;
return 0;
} |
<gh_stars>0
import styled from 'styled-components';
export const Container = styled.div`
background: #273FAD;
padding-top: 2rem;
`;
export const Content = styled.div`
max-width: 1120px;
margin: 0 auto;
color: #FFF;
padding: 1rem 1rem 2rem;
display: flex;
align-items: center;
justify-content: center;
`;
|
<filename>circle/impl/dao/im_circle_dao_test.go
/*
* Copyright © 2019 <NAME>.
*/
package dao_test
import (
"github.com/hedzr/voxr-lite/circle/impl/dao"
"github.com/hedzr/voxr-lite/circle/impl/models"
"github.com/hedzr/voxr-lite/internal/dbe"
"testing"
)
func TestAaa(t *testing.T) {
_, err := prepareDb()
if err != nil {
t.Fatalf("open failed: %v", err)
}
defer localClose()
// gorm.DefaultTableNameHandler = func(db *gorm.DB, defaultTableName string) string {
// return "t_" + defaultTableName
// }
dbe.GormDb().SingularTable(true)
if err = dbe.GormDb().AutoMigrate(&models.Circle{}, &models.CircleImage{}).Error; err != nil {
t.Fatal(err)
}
dx := dao.NewCircleDao()
in := &models.Circle{Title: "hello", Header: "header", Footer: "footer", Content: "xxx world", Remarks: "remarks", UserId: 3, HeadUrl: "cover"}
if err := dx.Add(in, 1); err != nil {
t.Fatal(err)
} else {
t.Logf("ret = %v", in)
}
if err := dx.SaveImage(&models.CircleImage{CircleId: 2, UserId: 3, BaseName: "vx.png", Mime: "image/png", Size: 3, LocalPath: "vx.png", Url: "vx.png"}); err != nil {
t.Fatal(err)
} else {
//
}
if obj, err := dx.GetById(1); err != nil {
t.Fatal(err)
} else {
t.Logf("obj = %v", obj)
}
if obj, err := dx.Get(&models.Circle{BaseModel: models.BaseModel{Id: 1}}); err != nil {
t.Fatal(err)
} else {
t.Logf("obj = %v", obj)
}
in.Header = "hoho"
if err := dx.Update(in); err != nil {
t.Fatal(err)
} else {
t.Logf("in.updated = %v", in)
}
if ret, err := dx.List(20, 0, "", ""); err != nil {
t.Fatal(err)
} else {
for ix, r := range ret {
t.Logf("%5d. %v", ix, r)
}
}
if err := dx.Remove(in); err != nil {
t.Fatal(err)
} else {
t.Log("removed")
}
}
|
#!/bin/bash
RES=0
##################### Functions definition
function check_result {
if [ "$1" -gt "0" ] ; then
RES=1
exit 1
fi
}
# Install ffmpeg
echo "######################################"
echo "# ffmpeg software installation #"
echo "######################################"
echo "Fetching needed packages ..."
apt-get install ffmpeg -qq
check_result $?
echo "######################################"
echo "# Done #"
echo "######################################"
exit $RES
|
#/bin/bash
export DISPLAY=:0
FANSPEED_1070TI=50
FANSPEED_1080TI=50
FANSPEED_1080TI_FTW=50
# 1070 TI
POWER_1070TI=110
CLOCK_1070TI=200
MEMORY_1070TI=1000
# 1080 TI
POWER_1080TI=140
CLOCK_1080TI=150
MEMORY_1080TI=525
# 1080 TI FTW3 Edition
POWER_1080TI_FTW=140
CLOCK_1080TI_FTW=150
MEMORY_1080TI_FTW=600
# Enable persistance
nvidia-smi -pm 1
# Adjust power limits
nvidia-smi -i 0 -pl $POWER_1070TI
nvidia-smi -i 1 -pl $POWER_1070TI
nvidia-smi -i 2 -pl $POWER_1070TI
nvidia-smi -i 3 -pl $POWER_1080TI_FTW
nvidia-smi -i 4 -pl $POWER_1080TI
nvidia-smi -i 5 -pl $POWER_1080TI
nvidia-smi -i 6 -pl $POWER_1080TI
nvidia-smi -i 7 -pl $POWER_1080TI
# Adjust fan speeds
nvidia-settings -a "[gpu:0]/GPUFanControlState=1" -a "[fan:0]/GPUTargetFanSpeed=$FANSPEED_1070TI"
nvidia-settings -a "[gpu:1]/GPUFanControlState=1" -a "[fan:1]/GPUTargetFanSpeed=$FANSPEED_1070TI"
nvidia-settings -a "[gpu:2]/GPUFanControlState=1" -a "[fan:2]/GPUTargetFanSpeed=$FANSPEED_1070TI"
nvidia-settings -a "[gpu:3]/GPUFanControlState=1" -a "[fan:3]/GPUTargetFanSpeed=$FANSPEED_1080TI_FTW"
nvidia-settings -a "[gpu:4]/GPUFanControlState=1" -a "[fan:4]/GPUTargetFanSpeed=$FANSPEED_1080TI"
nvidia-settings -a "[gpu:5]/GPUFanControlState=1" -a "[fan:5]/GPUTargetFanSpeed=$FANSPEED_1080TI"
nvidia-settings -a "[gpu:6]/GPUFanControlState=1" -a "[fan:6]/GPUTargetFanSpeed=$FANSPEED_1080TI"
nvidia-settings -a "[gpu:7]/GPUFanControlState=1" -a "[fan:7]/GPUTargetFanSpeed=$FANSPEED_1080TI"
############## 1070 TI Memory & Clock Settings
#GPU 0
nvidia-settings -c :0 -a "[gpu:0]/GPUGraphicsClockOffset[3]=$CLOCK_1070TI"
nvidia-settings -c :0 -a "[gpu:0]/GPUMemoryTransferRateOffset[3]=$MEMORY_1070TI"
#GPU 1
nvidia-settings -c :0 -a "[gpu:1]/GPUGraphicsClockOffset[3]=$CLOCK_1070TI"
nvidia-settings -c :0 -a "[gpu:1]/GPUMemoryTransferRateOffset[3]=$MEMORY_1070TI"
#GPU 2
nvidia-settings -c :0 -a "[gpu:2]/GPUGraphicsClockOffset[3]=$CLOCK_1070TI"
nvidia-settings -c :0 -a "[gpu:2]/GPUMemoryTransferRateOffset[3]=$MEMORY_1070TI"
############## 1080 TI Memory & Clock Settings
#GPU 4
nvidia-settings -c :0 -a "[gpu:4]/GPUGraphicsClockOffset[3]=$CLOCK_1080TI"
nvidia-settings -c :0 -a "[gpu:4]/GPUMemoryTransferRateOffset[3]=$MEMORY_1080TI"
#GPU 5
nvidia-settings -c :0 -a "[gpu:5]/GPUGraphicsClockOffset[3]=$CLOCK_1080TI"
nvidia-settings -c :0 -a "[gpu:5]/GPUMemoryTransferRateOffset[3]=$MEMORY_1080TI"
#GPU 6
nvidia-settings -c :0 -a "[gpu:6]/GPUGraphicsClockOffset[3]=$CLOCK_1080TI"
nvidia-settings -c :0 -a "[gpu:6]/GPUMemoryTransferRateOffset[3]=$MEMORY_1080TI"
#GPU 7
nvidia-settings -c :0 -a "[gpu:7]/GPUGraphicsClockOffset[3]=$CLOCK_1080TI"
nvidia-settings -c :0 -a "[gpu:7]/GPUMemoryTransferRateOffset[3]=$MEMORY_1080TI"
############## 1080 TI FTW3 Memory & Clock Settings
#GPU 3
nvidia-settings -c :0 -a "[gpu:3]/GPUGraphicsClockOffset[3]=$CLOCK_1080TI_FTW"
nvidia-settings -c :0 -a "[gpu:3]/GPUMemoryTransferRateOffset[3]=$MEMORY_1080TI_FTW"
|
/*******************************************************************
*
* Tick List -- saves a lot of buggering about with checkboxes...
*
*/
#include "MFStdLib.h"
#include <windows.h>
#include "ticklist.h"
/*
* OEM Resource Ordinal Numbers
*/
#define OBM_CLOSE 32754
#define OBM_UPARROW 32753
#define OBM_DNARROW 32752
#define OBM_RGARROW 32751
#define OBM_LFARROW 32750
#define OBM_REDUCE 32749
#define OBM_ZOOM 32748
#define OBM_RESTORE 32747
#define OBM_REDUCED 32746
#define OBM_ZOOMD 32745
#define OBM_RESTORED 32744
#define OBM_UPARROWD 32743
#define OBM_DNARROWD 32742
#define OBM_RGARROWD 32741
#define OBM_LFARROWD 32740
#define OBM_MNARROW 32739
#define OBM_COMBO 32738
#define OBM_UPARROWI 32737
#define OBM_DNARROWI 32736
#define OBM_RGARROWI 32735
#define OBM_LFARROWI 32734
#define OBM_OLD_CLOSE 32767
#define OBM_SIZE 32766
#define OBM_OLD_UPARROW 32765
#define OBM_OLD_DNARROW 32764
#define OBM_OLD_RGARROW 32763
#define OBM_OLD_LFARROW 32762
#define OBM_BTSIZE 32761
#define OBM_CHECK 32760
#define OBM_CHECKBOXES 32759
#define OBM_BTNCORNERS 32758
#define OBM_OLD_REDUCE 32757
#define OBM_OLD_ZOOM 32756
#define OBM_OLD_RESTORE 32755
void ticklist_init(HWND hWnd, SLONG id, CBYTE *pc[], SLONG bitmask) {
SLONG c0 = 1;
HWND the_ctrl = GetDlgItem(hWnd,id);
LPTSTR lbitem_str = pc[0];
while(*lbitem_str!='!')
{
SendMessage(the_ctrl,LB_ADDSTRING,0,(LPARAM)lbitem_str);
SendMessage(the_ctrl,LB_SETITEMDATA,c0-1,(bitmask & (1<<(c0-1))) ? 1 : 0);
lbitem_str = pc[c0++];
}
WNDPROC previous = (WNDPROC) SetWindowLong(the_ctrl,
GWL_WNDPROC,(long)ticklist_proc);
SetWindowLong(the_ctrl,GWL_USERDATA,(long)previous);
}
void ticklist_close(HWND hWnd, SLONG id) {
HWND the_ctrl = GetDlgItem(hWnd,id);
WNDPROC previous = (WNDPROC) GetWindowLong(the_ctrl, GWL_USERDATA);
SetWindowLong(the_ctrl,GWL_WNDPROC,(long)previous);
}
BOOL CALLBACK ticklist_proc(HWND hWnd,UINT message,WPARAM wParam,LPARAM lParam) {
WNDPROC previous = (WNDPROC) GetWindowLong(hWnd, GWL_USERDATA);
switch(message) {
case WM_CHAR:
if (wParam==32) {
SLONG res,item;
RECT rc;
item=SendMessage(hWnd,LB_GETCURSEL,0,0);
res=1-SendMessage(hWnd,LB_GETITEMDATA,item,0);
SendMessage(hWnd,LB_SETITEMDATA,item,res);
SendMessage(hWnd,LB_GETITEMRECT,item,(long)&rc);
InvalidateRect(hWnd,&rc,0);
return FALSE;
}
break;
case WM_LBUTTONDOWN:
case WM_LBUTTONDBLCLK:
if (LOWORD(lParam)<16) {
SLONG res,item;
RECT rc;
res=SendMessage(hWnd,LB_ITEMFROMPOINT,0,lParam);
item=LOWORD(res);
res=1-SendMessage(hWnd,LB_GETITEMDATA,item,0);
SendMessage(hWnd,LB_SETITEMDATA,item,res);
SendMessage(hWnd,LB_GETITEMRECT,item,(long)&rc);
InvalidateRect(hWnd,&rc,0);
return FALSE;
}
break;
}
return CallWindowProc(previous, hWnd, message, wParam, lParam);
}
SLONG ticklist_bitmask(HWND hWnd, SLONG id) {
HWND ctl = GetDlgItem(hWnd,id);
UBYTE i,ctr;
SLONG mask=0;
ctr=SendMessage(ctl,LB_GETCOUNT,0,0);
for (i=0;i<ctr;i++) {
mask|=(SendMessage(ctl,LB_GETITEMDATA,i,0)<<i);
}
return mask;
}
BOOL ticklist_measure(HWND hWnd, WPARAM wParam, LPARAM lParam)
{
LPMEASUREITEMSTRUCT item = (LPMEASUREITEMSTRUCT) lParam;
RECT rc;
GetWindowRect(GetDlgItem(hWnd,item->CtlID),&rc);
item->itemWidth=rc.right-rc.left;
item->itemHeight=16;
return TRUE;
}
BOOL ticklist_draw(HWND hWnd, WPARAM wParam, LPARAM lParam)
{
HWND ctl = GetDlgItem(hWnd,wParam);
HDC memdc;
LPDRAWITEMSTRUCT item = (LPDRAWITEMSTRUCT) lParam;
HBITMAP bmp,obmp;
CBYTE pc[255];
FillRect(item->hDC, &item->rcItem, (HBRUSH) GetStockObject(WHITE_BRUSH));
SendMessage(ctl,LB_GETTEXT,item->itemID,(long)pc);
TextOut(item->hDC,item->rcItem.left+20,item->rcItem.top+2,pc,strlen(pc));
memdc=CreateCompatibleDC(item->hDC);
bmp=(HBITMAP)LoadImage(NULL, (LPCTSTR)OBM_CHECKBOXES, IMAGE_BITMAP, 0, 0, LR_DEFAULTCOLOR);
obmp=(HBITMAP)SelectObject(memdc,bmp);
BitBlt(item->hDC,item->rcItem.left+2,item->rcItem.top+2,12,12,memdc,(13*item->itemData),0,SRCCOPY);
SelectObject(memdc,obmp);
DeleteDC(memdc);
DeleteObject(bmp);
if (item->itemState & ODS_FOCUS)
DrawFocusRect(item->hDC,&item->rcItem);
return TRUE;
}
|
package com.zuoxiaolong.dao;
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.zuoxiaolong.model.ViewMode;
import com.zuoxiaolong.orm.BaseDao;
import com.zuoxiaolong.orm.Operation;
import com.zuoxiaolong.orm.TransactionalOperation;
import com.zuoxiaolong.util.StringUtil;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author 左潇龙
* @since 2015年5月29日 上午1:04:31
*/
public class CategoryDao extends BaseDao {
public boolean delete(final int articleId) {
return execute((TransactionalOperation<Boolean>) connection -> {
try {
PreparedStatement statement = connection.prepareStatement("delete from article_category where article_id=?");
statement.setInt(1, articleId);
int result = statement.executeUpdate();
return result > 0;
} catch (SQLException e) {
error("delete article_category failed ..." , e);
}
return false;
});
}
public List<Map<String, String>> getAll() {
return getAll("categories");
}
public List<Map<String, String>> getCategories(final int articleId) {
return execute(new Operation<List<Map<String, String>>>() {
@Override
public List<Map<String, String>> doInConnection(Connection connection) {
List<Map<String, String>> result = new ArrayList<Map<String,String>>();
try {
PreparedStatement statement = connection.prepareStatement("select * from categories where id in (select category_id from article_category where article_id=?)");
statement.setInt(1, articleId);
ResultSet resultSet = statement.executeQuery();
while (resultSet.next()) {
Map<String, String> category = new HashMap<String, String>();
category.put("id", resultSet.getString("id"));
category.put("category_name", resultSet.getString("category_name"));
result.add(category);
}
} catch (SQLException e) {
error("query article_category failed ..." , e);
}
return result;
}
});
}
public Integer save(final String categoryName) {
return execute(new TransactionalOperation<Integer>() {
@Override
public Integer doInConnection(Connection connection) {
try {
PreparedStatement statement = connection.prepareStatement("insert into categories (category_name) values (?)",Statement.RETURN_GENERATED_KEYS);
statement.setString(1, categoryName);
int result = statement.executeUpdate();
if (result > 0) {
ResultSet resultSet = statement.getGeneratedKeys();
if (resultSet.next()) {
return resultSet.getInt(1);
}
}
} catch (SQLException e) {
error("save categories failed ..." , e);
}
return null;
}
});
}
public Integer getId(final String categoryName) {
return execute(new Operation<Integer>() {
@Override
public Integer doInConnection(Connection connection) {
try {
PreparedStatement statement = connection.prepareStatement("select id from categories where category_name=?");
statement.setString(1, categoryName);
ResultSet resultSet = statement.executeQuery();
if (resultSet.next()) {
return resultSet.getInt(1);
}
} catch (SQLException e) {
error("query categories failed ..." , e);
}
return null;
}
});
}
@Override
public Map<String, String> transfer(ResultSet resultSet, ViewMode viewMode) {
Map<String, String> category = new HashMap<String, String>();
try {
category.put("id", resultSet.getString("id"));
String categoryName = resultSet.getString("category_name");
category.put("category_name", categoryName);
category.put("short_category_name", StringUtil.substring(categoryName, 4));
} catch (SQLException e) {
throw new RuntimeException(e);
}
return category;
}
}
|
const CustomError = require("../extensions/custom-error");
module.exports = function createDreamTeam(members) {
if (!members || members.constructor !== Array) {
return false
}
const arr = []
for (i of members) {
if (typeof i === "string") {
arr.push(i.trim()[0].toUpperCase())
}
}
return arr.sort().join('')
};
|
<gh_stars>0
import React from 'react';
import './PageHeader.css';
const PageHeader = ({header, subheader, img, imgStyle}) => {
const backgroundStyle = {
backgroundImage: `url(${img})`,
...imgStyle
}
return (
<div className="page-header-container" style={backgroundStyle}>
<h1>{header}</h1>
{subheader}
</div>
);
}
export default PageHeader;
|
<filename>modules/payments/client-react/stripe/subscription/containers/withStripeSubscription.tsx
import React, { ComponentType } from 'react';
import { Query } from 'react-apollo';
import SUBSCRIPTION_QUERY from '../graphql/SubscriptionQuery.graphql';
export const withStripeSubscription = (Component: ComponentType) => {
return (props: any) => {
return (
<Query query={SUBSCRIPTION_QUERY} fetchPolicy="network-only">
{({ loading, data: { stripeSubscription } }) => (
<Component loading={loading} stripeSubscription={stripeSubscription} {...props} />
)}
</Query>
);
};
};
|
<reponame>eengineergz/Lambda
/*
Wikipedia -> https://en.wikipedia.org/wiki/Edit_distance
Q. -> Given two strings `word1` and `word2`. You can perform these operations on any of the string to make both strings similar.
- Insert
- Remove
- Replace
Find the minimum operation cost required to make both same. Each operation cost is 1.
Algorithm details ->
time complexity - O(n*m)
space complexity - O(n*m)
*/
const minimumEditDistance = ( word1, word2 ) => {
const n = word1.length
const m = word2.length
const dp = new Array( m + 1 ).fill( 0 ).map( item => [] )
/*
fill dp matrix with default values -
- first row is filled considering no elements in word2.
- first column filled considering no elements in word1.
*/
for ( let i = 0; i < n + 1; i++ ) {
dp[ 0 ][ i ] = i
}
for ( let i = 0; i < m + 1; i++ ) {
dp[ i ][ 0 ] = i
}
/*
indexing is 1 based for dp matrix as we defined some known values at first row and first column/
*/
for ( let i = 1; i < m + 1; i++ ) {
for ( let j = 1; j < n + 1; j++ ) {
const letter1 = word1[ j - 1 ]
const letter2 = word2[ i - 1 ]
if ( letter1 === letter2 ) {
dp[ i ][ j ] = dp[ i - 1 ][ j - 1 ]
} else {
dp[ i ][ j ] = Math.min( dp[ i - 1 ][ j ], dp[ i - 1 ][ j - 1 ], dp[ i ][ j - 1 ] ) + 1
}
}
}
return dp[ m ][ n ]
}
const main = () => {
console.log( minimumEditDistance( 'horse', 'ros' ) )
console.log( minimumEditDistance( 'cat', 'cut' ) )
console.log( minimumEditDistance( '', 'abc' ) )
console.log( minimumEditDistance( 'google', 'glgool' ) )
}
main()
|
#!/bin/bash
# 00-automount.sh
# Ubuntu
#
# Created by Kanav Gupta on 16/01/21.
#
echo """\
[Unit]
Description=Automount disks according to disk labels
[Service]
ExecStart=/opt/sdslabs/automount.sh
[Install]
WantedBy=multi-user.target
""" > /etc/systemd/system/automount.service
mkdir -p /opt/sdslabs
echo """\
#!/bin/bash
for disk in /dev/disk/by-label/*; do
label=\"\$(basename \$disk)\"
mkdir -p \"/mnt/\$label\"
mount \"\$disk\" \"/mnt/\$label\"
chown default \"/mnt/\$label\"
chgrp default \"/mnt/\$label\"
done
""" > /opt/sdslabs/automount.sh
chmod +x /opt/sdslabs/automount.sh
systemctl enable automount.service
|
#!/bin/bash -e
#
# Create a standard tmux session for this repo.
#
# # Create an amp tmux session for $HOME/src/amp1
# > dev_scripts/tmux_amp.sh amp 1
#
# # Create a cmamp tmux session for $HOME/src/cmamp2
# > dev_scripts/tmux_amp.sh cmamp 2
echo "##> dev_scripts/tmux_amp.sh"
set -x
SERVER_NAME=$(uname -n)
echo "SERVER_NAME=$SERVER_NAME"
# Try macOS setup.
DIR_NAME="/Users/$USER"
if [[ -d $DIR_NAME ]]; then
echo "Inferred macOS setup"
HOME_DIR=$DIR_NAME
else
# Try AWS setup.
DIR_NAME="/data/$USER"
if [[ -d $DIR_NAME ]]; then
echo "Inferred AWS setup"
HOME_DIR=$DIR_NAME
fi;
fi;
if [[ -z $HOME_DIR ]]; then
echo "ERROR: Can't infer where your home dir is located"
exit -1
fi;
echo "HOME_DIR=$HOME_DIR"
# #############################################################################
# Parse command options.
# #############################################################################
DIR_PREFIX=$1
if [[ -z $DIR_PREFIX ]]; then
echo "ERROR: You need to specify directory prefix, e.g. 'amp' or 'cmamp'"
fi;
IDX=$2
if [[ -z $IDX ]]; then
echo "ERROR: You need to specify IDX={1,2,3}"
exit -1
fi;
AMP_DIR="${HOME_DIR}/src/${DIR_PREFIX}${IDX}"
echo "AMP_DIR=$AMP_DIR"
# #############################################################################
# Open the tmux session.
# #############################################################################
SETENV="dev_scripts/setenv_amp.sh"
# No `clear` since we want to see issues, if any.
#CMD="source ${SETENV} && reset && clear"
CMD="source ${SETENV}"
TMUX_NAME="${DIR_PREFIX}${IDX}"
tmux new-session -d -s $TMUX_NAME -n "---${TMUX_NAME}---"
# The first one window seems a problem.
tmux send-keys "white; cd ${AMP_DIR} && $CMD" C-m C-m
#
tmux new-window -n "dbash"
tmux send-keys "green; cd ${AMP_DIR} && $CMD" C-m C-m
#
tmux new-window -n "regr"
tmux send-keys "yellow; cd ${AMP_DIR} && $CMD" C-m C-m
#
tmux new-window -n "jupyter"
tmux send-keys "yellow; cd ${AMP_DIR} && $CMD" C-m C-m
# Go to the first tab.
tmux select-window -t $TMUX_NAME:0
tmux -2 attach-session -t $TMUX_NAME
|
#!/usr/bin/env bash
set -o errexit -o nounset
target="/var/vcap/all-releases/jobs-src/eirini/eirini-ssh-extension/templates/bpm.yml.erb"
sentinel="${target}.patch_sentinel"
if [[ -f "${sentinel}" ]]; then
echo "Patch already applied. Skipping"
exit 0
fi
# Patch BPM, since we're actually running in-cluster without BPM
patch --verbose "${target}" <<'EOT'
@@ -3,21 +3,8 @@ processes:
executable: /var/vcap/packages/eirini-ssh-extension/bin/eirini-ssh-extension
args: []
env:
- KUBERNETES_SERVICE_HOST: "<%= p("eirini-ssh-extension.kube_service_host") %>"
- KUBERNETES_SERVICE_PORT: "<%= p("eirini-ssh-extension.kube_service_port") %>"
EIRINI_EXTENSION_HOST: "<%= p("eirini-ssh-extension.operator_webhook_host") %>"
EIRINI_EXTENSION_PORT: "<%= p("eirini-ssh-extension.operator_webhook_port") %>"
EIRINI_EXTENSION_NAMESPACE: "<%= p("eirini-ssh-extension.namespace") %>"
OPERATOR_SERVICE_NAME: "<%= p("eirini-ssh-extension.operator_webhook_servicename") %>"
OPERATOR_WEBHOOK_NAMESPACE: "<%= p("eirini-ssh-extension.operator_webhook_namespace") %>"
- <% if properties.opi&.k8s&.host_url.nil? %>
- # The ServiceAccount admission controller has to be enabled.
- # https://kubernetes.io/docs/tasks/access-application-cluster/access-cluster/#accessing-the-api-from-a-pod
- additional_volumes:
- - path: /var/run/secrets/kubernetes.io/serviceaccount/token
- mount_only: true
- - path: /var/run/secrets/kubernetes.io/serviceaccount/ca.crt
- mount_only: true
- - path: /var/run/secrets/kubernetes.io/serviceaccount/namespace
- mount_only: true
- <% end %>
EOT
touch "${sentinel}"
|
<filename>src/index.ts
import { Config, CountryData, CountryModel, LinkedData } from './module';
import request from './request';
import matches from './matches';
import fs from 'fs';
import path from 'path';
import { URL } from 'url';
import { Pattern } from './patterns';
const _config = readConfig();
read(_config)
.then((data: CountryModel[]) => {
fs.writeFileSync(
_config.output,
JSON.stringify(data, null, ' ')
);
console.log('done');
})
.catch((error: Error) => console.error(error));
function readConfig(): Config {
const { rootURL, cachePath, output } = (<Config>JSON.parse(fs.readFileSync(
path.resolve('./config.json'),
'utf-8')));
const safeCachePath = path.resolve(cachePath);
if (!fs.existsSync(safeCachePath)) {
fs.mkdirSync(safeCachePath);
}
return {
rootURL: rootURL,
cachePath: safeCachePath,
output: path.resolve(output)
};
}
async function read(config: Config): Promise<CountryModel[]> {
const countries = Array<CountryModel>();
const rootHtml = await request(config.rootURL, config);
for (const linkedData of iterator(rootHtml, config)) {
countries.push({
name: linkedData.name,
code2: linkedData.data!.code2,
code3: linkedData.data!.code3,
code: linkedData.data!.code,
flag: await request(linkedData.data!.flag, config)
});
}
return countries;
}
function* iterator(html: string, config: Config): IterableIterator<LinkedData<CountryData>> {
for (const group of matches(Pattern, ['g', 'i', 's'], html)) {
yield {
name: group['name'].valueOf(),
url: '',
data: {
code2: group['code2'].valueOf(),
code3: group['code3'].valueOf(),
code: group['code'].valueOf(),
flag: new URL(config.rootURL).protocol + group['flag'].valueOf()
}
};
}
} |
#define bmYEAR 2015
#define bmMONTH 3
#define bmDAY 4
#define bmHOUR 17
#define bmMINUTE 34
#define bmSECOND 11
#define bmTIME Time(2015, 3, 4, 17, 34, 11)
#define bmMACHINE "RAUL-THINK"
#define bmUSER "Raul"
|
package arouter.dawn.zju.edu.module_pay.callback;
/**
* @Auther: Dawn
* @Date: 2018/11/22 22:01
* @Description:
* 支付结果回调
*/
public interface PayCallback {
void paySuccess();
void payFailed(String msg);
}
|
export type IMapStateToProps<TState, TStateProps, TOwnProps> = (
state: TState,
ownProps: TOwnProps
) => TStateProps;
|
<filename>src/server/ws/client.js
var Client = module.exports = class Client {
constructor (ws, cid, odm) {
this.ws = ws;
this.odm = odm;
console.log(`New client, ID=${cid}`);
}
send (event, data) {
if (this.ws) this.ws.send(JSON.stringify({ event: event, data: data }));
}
/**
* @param {string} event
* @param {*} data
*/
received (event, data) {
console.log(`received event ${event} with data ${data}`);
var self = this,
odm = this.odm,
ev;
((ev = {
newUser: function (d) {
odm.addNewUser(d, function (err, user) {
self.send(
"message",
err ? err : `User ${user.getFullName()} added!`
);
});
},
listUsers: function () {
odm.listUsers(function (err, users) {
self.send(
err ? "message" : "displayUsers",
err ? "Unable to display users: " + err : users
);
});
},
editUser: function (d) {
odm.addNewUser(d, function (err, user) {
self.send(
"message",
err ? err : `User ${user.getFullName()} changed!`
);
if (!err) ev.listUsers();
});
},
removeUser: function (id) {
odm.removeUser(id, function (err) {
self.send(
"message",
err ? "Unable to remove this user" : "User successfully removed!"
);
if (!err) ev.listUsers();
});
}
})[event] || function () { self.send("message", `Unable to process ${event}.`) })(data);
}
}; |
class Point {
constructor(x, y) {
this.x = x;
this.y = y;
}
getX() {
return this.x;
}
getY() {
return this.y;
}
} |
import React from 'react'
import { useStaticQuery, graphql } from 'gatsby'
import Img from 'gatsby-image'
import Title from '../Title'
import style from '../../css/about.module.css'
const getAbout = graphql`
query aboutImage {
aboutImage:file(relativePath:{eq:"defaultBcg.jpeg"}){
childImageSharp {
fluid(maxWidth:600){
...GatsbyImageSharpFluid
}
}
}
}
`
const About = () => {
const {aboutImage} = useStaticQuery(getAbout)
return (
<div className={style.about}>
<Title title="about" subtitle="us"/>
<div className={style.aboutCenter}>
<article className={style.aboutImg}>
<div className={style.imgContainer}>
{/* <img src={img} alt="About"/> */}
<Img fluid={aboutImage.childImageSharp.fluid} alt="Landscape"/>
</div>
</article>
<article className={style.aboutInfo}>
<h4>explore the difference</h4>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Fusce maximus, lectus ac mattis convallis, diam nunc interdum nulla, vitae aliquam leo diam ut lacus.
Mauris sed tempus nunc. Curabitur elementum porta odio, et sagittis orci fermentum ac.
</p>
<button type="button" className="btn-primary">read more</button>
</article>
</div>
</div>
)
}
export default About
|
SELECT *
FROM customers
WHERE age < 18
ORDER BY age ASC; |
function isMobileDevice(userAgent) {
if (
userAgent.match(/webOS/i) ||
userAgent.match(/iPhone/i) ||
userAgent.match(/iPad/i) ||
userAgent.match(/iPod/i) ||
userAgent.match(/BlackBerry/i) ||
userAgent.match(/Windows Phone/i)
) {
return true;
} else if (userAgent.includes('Microsoft Internet Explorer')) {
return false;
} else {
return false;
}
} |
class FunctionExtractor {
getStatementTerminator(arg1: JSExtractFunctionHandler.ContextInfo): string {
// Implementation to retrieve the statement terminator based on the context information
// ...
}
getReturnStatementPrefix(): string {
// Implementation to retrieve the prefix for a return statement
// ...
}
getSignatureGenerator(): ExtractedFunctionSignatureGenerator {
// Implementation to retrieve the signature generator for the extracted function
// ...
}
getVariableDeclarationPrefix(): string {
// Implementation to retrieve the prefix for a variable declaration
// ...
}
addBodyToSignature(arg1: string, arg2: string): string {
// Implementation to add the body to the function signature and return the updated signature
// ...
}
static findFunctionAnchor(arg1: PsiElement, arg2: PsiElement): PsiElement {
// Static method implementation to find the anchor element for the function within the given PsiElements
// ...
}
} |
'use strict';
const util = require('./util');
const FileManager = require('./fileManager');
const async = require('async');
require('colors');
const Result = function(module, options) {
util.enforceArgs(options, ['code', 'offender', 'description', 'mitigation']);
options.data = util.defaultValue(options.data, {});
options.code = module + '-' + options.code;
return Object.freeze(options);
};
const ModuleResults = function(module) {
const data = {
high: [],
medium: [],
low: [],
critical: []
};
const self = {};
self.module = module;
const handleResult = function(category, ...args) {
const result = new Result(module.key, ...args);
data[category].push(result);
};
self.critical = function(...args) {
handleResult('critical', ...args);
};
self.high = function(...args) {
handleResult('high', ...args);
};
self.medium = function(...args) {
handleResult('medium', ...args);
};
self.low = function(...args) {
handleResult('low', ...args);
};
self.results = function() {
return data;
};
return Object.freeze(self);
};
module.exports = function Scan(rc) {
rc = util.defaultValue(rc, {});
const logger = util.defaultValue(rc.logger, () => { return new require('./logger')(); });
const moduleIndex = new require('./modules')(rc);
const modules = moduleIndex.asArray;
rc.target = moduleIndex.target;
const fileManager = new FileManager(rc);
logger.log('Target for scan:', rc.target);
const self = {};
const generateOutput = results => {
return results.map(result => {
var tmpresults = result.results();
switch (rc.threshold) {
case 'critical':
delete tmpresults.high;
delete tmpresults.medium;
delete tmpresults.low;
break;
case 'high':
delete tmpresults.medium;
delete tmpresults.low;
break;
case 'medium':
delete tmpresults.low;
break;
default:
break;
}
return {
module: result.module,
results: result.results()
};
});
};
self.start = function(done) {
util.enforceNotEmpty(modules, 'You must specify the modules to scan');
const results = [];
let modulesToRun = [];
const whichModules = rc.modules;
if(whichModules[0] === 'all') {
modulesToRun = modules.filter(m => { return m.enabled; });
} else {
whichModules.forEach(key => {
const module = modules.find(m => { return m.key === key; });
if(util.isEmpty(module)) {
logger.warn('Unknown module:', key);
} else {
modulesToRun.push(module);
}
});
}
async.forEachSeries(modulesToRun, (module, next) => {
if(module.handles(fileManager)) {
logger.log('Running module'.bold, module.name);
const moduleResults = new ModuleResults(module,rc);
results.push(moduleResults);
module.run(moduleResults, err => {
if(err) {
logger.error(module.name, 'returned an error!');
console.log(' ' + err.message);
}
next();
});
} else {
logger.log('Not Handling', module.name);
next();
}
}, err => {
const output = generateOutput(results);
return done(err, output);
});
};
return Object.freeze(self);
};
|
<filename>src/scripts/node-wrappers/read-dir.js
/* Read Dir
*
* This function is a promise wrapper for the node.js fs.readdir function.
*/
const fs = require("fs");
function readDir(directory) {
"use strict";
return new Promise((resolve, reject) => {
fs.readdir(directory, (err, fileNames) => {
if (err)
reject(err);
else {
console.info(`${directory} read`);
resolve(fileNames);
}
});
});
}
module.exports = readDir;
|
#!/bin/bash
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
. ${SCRIPT_DIR}/common.inc
get_ghc() {
local VER=$1
local DIR=ghc-$VER
if [[ ! -d ${DIR} ]]; then
pushd /tmp
fetch https://downloads.haskell.org/~ghc/${VER}/ghc-${VER}-x86_64-deb8-linux.tar.xz | tar Jxf -
cd /tmp/ghc-${VER}
./configure --prefix=${OPT}/${DIR}
make install
popd
fi
}
get_ghc 8.0.2
# Can't install ghc 8.2.1: https://ghc.haskell.org/trac/ghc/ticket/13945
# get_ghc 8.2.1
get_ghc 8.2.2
get_ghc 8.4.1
get_ghc 8.4.2
get_ghc 8.4.3
get_ghc 8.4.4
get_ghc 8.6.1
get_ghc 8.6.2
get_ghc 8.6.3
|
def search(x, y):
for i in range(len(y)):
if y[i] == x:
return i
return -1
print(search(2, [1, 2, 3, 4])) # outputs 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.