text stringlengths 1 1.05M |
|---|
<filename>modules/fabric/chuck_module.py
description = """
Adds Fabric support to your project and creates a default fabfile.
Fabric is a simple, Pythonic tool for remote execution and deployment.
For more information, visit:
http://pypi.python.org/pypi/Fabric
"""
|
#!/bin/bash
R="$(printf '\033[1;31m')"
G="$(printf '\033[1;32m')"
Y="$(printf '\033[1;33m')"
B="$(printf '\033[1;34m')"
C="$(printf '\033[1;36m')"
W="$(printf '\033[1;37m')"
banner() {
clear
printf "\033[33m _ _ ___ _ _ _ _ ___ _ _ _ _ ____ ___ \033[0m\n"
printf "\033[36m | | |__] | | |\ | | | | |\/| | | | \ \033[0m\n"
printf "\033[32m |__| |__] |__| | \| | |__| | | |__| |__/ \033[0m\n"
printf "\033[0m\n"
printf " \033[32mA modded gui version of ubuntu for Termux\033[0m\n"
printf "\033[0m\n"
}
package() {
echo -e "${R} [${W}-${R}]${C} Checking required packages..."${W}
termux-setup-storage
if [[ `command -v pulseaudio` && `command -v proot-distro` && `command -v wget` ]]; then
echo -e "\n${R} [${W}-${R}]${G} Packages already installed."${W}
else
packs=(pulseaudio proot-distro wget)
for hulu in "${packs[@]}"; do
type -p "$hulu" &>/dev/null || {
echo -e "\n${R} [${W}-${R}]${G} Installing package : ${Y}$hulu${C}"${W}
apt update -y
apt upgrade -y
apt install "$hulu" -y
}
done
fi
}
distro() {
echo -e "\n${R} [${W}-${R}]${C} Checking for Distro..."${W}
termux-reload-settings
if [[ -d "$PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu" ]]; then
echo -e "\n${R} [${W}-${R}]${G} Distro already installed."${W}
exit 0
else
proot-distro install ubuntu
termux-reload-settings
fi
if [[ -d "$PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu" ]]; then
echo -e "\n${R} [${W}-${R}]${G} Installed Successfully !!"${W}
else
echo -e "\n${R} [${W}-${R}]${G} Error Installing Distro !\n"${W}
exit 0
fi
}
sound() {
echo -e "\n${R} [${W}-${R}]${C} Fixing Sound Problem..."${W}
if [[ ! -e "$HOME/.bashrc" ]]; then
touch $HOME/.bashrc
fi
echo "pulseaudio --start --exit-idle-time=-1" >> $HOME/.bashrc
echo "pacmd load-module module-native-protocol-tcp auth-ip-acl=127.0.0.1 auth-anonymous=1" >> $HOME/.bashrc
}
permission() {
banner
echo -e "${R} [${W}-${R}]${C} Setting up Environment..."${W}
if [[ -e "$PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu/root/user.sh" ]]; then
chmod +x $PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu/root/user.sh
else
wget https://raw.githubusercontent.com/modded-ubuntu/modded-ubuntu/master/distro/user.sh
mv -f user.sh $PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu/root/user.sh
chmod +x $PREFIX/var/lib/proot-distro/installed-rootfs/ubuntu/root/user.sh
fi
echo "proot-distro login ubuntu" > $PREFIX/bin/ubuntu
if [[ -e "$PREFIX/bin/ubuntu" ]]; then
chmod +x $PREFIX/bin/ubuntu
termux-reload-settings
banner
echo -e "\n${R} [${W}-${R}]${G} Ubuntu-21.04(CLI) is now Installed on your Termux"${W}
echo -e "\n${R} [${W}-${R}]${G} Restart your Termux to Prevent Some Issues."${W}
echo -e "\n${R} [${W}-${R}]${G} Type ${C}ubuntu${G} to run Ubuntu CLI."${W}
echo -e "\n${R} [${W}-${R}]${G} If you Want to Use UBUNTU in GUI MODE then ,"${W}
echo -e "\n${R} [${W}-${R}]${G} Run ${C}ubuntu${G} first & then type ${C}bash user.sh "${W}
echo -e "\n"
exit 0
else
echo -e "\n${R} [${W}-${R}]${G} Error Installing Distro !"${W}
exit 0
fi
}
banner
package
distro
sound
permission
|
function reverseArray(arr) {
let reversed = [];
for (let i = arr.length - 1; i >= 0; i--) {
reversed.push(arr[i]);
}
return reversed;
}
let arr = [1, 2, 3, 4, 5];
let reversedArr = reverseArray(arr);
console.log(reversedArr); // Output: [5, 4, 3, 2, 1] |
#!/bin/bash
cd ..
./Marathoner --script Script_Duktape --autorun-plugin Script_Duktape \
--autorun-script test/script/script.js
cd test_launch
|
create database tmspnn;
-- User
create table "user" (
id serial primary key,
mobile varchar(64) unique not null,
password text not null,
nickname varchar(256) unique not null,
profile varchar(256) not null default '',
-- 0: male, 1: female
gender smallint not null default 0,
-- 0: normal, 1: abuse_reported, -1: removed
state smallint not null default 0,
description varchar(256) not null default '',
location varchar(128) not null default '',
fame numeric not null default 1.0,
email varchar(64) unique not null,
identity_no varchar(64) unique not null,
articles_count integer not null default 0,
followings_count integer not null default 0,
followers_count integer not null default 0,
ratings_count integer not null default 0,
inbox text [] not null default '{}',
obj jsonb not null default '{}' :: jsonb,
ts_vector tsvector not null default to_tsvector(''),
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now()
);
create unique index user_nickname_uniq_idx on "user" (nickname);
create index user_fame_idx on "user" (fame);
create index user_followers_count_idx on "user" (followers_count);
create index user_obj_gin on "user" using gin (obj jsonb_path_ops);
create index user_search_idx on "user" using gin (ts_vector);
-- Article
create table "article" (
id serial primary key,
created_by integer not null,
rating numeric not null default 3.0,
weight numeric not null default 1.0,
fame numeric not null default 0.0,
cover varchar(256) not null default '',
title varchar(256) not null,
author varchar(256) not null,
author_profile varchar(256) not null default '',
summary varchar(256) not null,
wordcount smallint not null,
pageview integer not null default 0,
content text not null,
-- 0: normal, 1: private, 2: comment_unavailable, 3: abuse_reported, -1: removed
state smallint not null default 0,
obj jsonb not null default '{}' :: jsonb,
ts_vector tsvector not null default to_tsvector(''),
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now()
);
create index article_weight_idx on "article" (weight);
create index article_fame_idx on "article" (fame);
create index article_create_by_idx on "article" (created_by);
create index article_obj_gin on "article" using gin (obj jsonb_path_ops);
create index article_search_idx on "article" using gin (ts_vector);
-- Rating
create table "rating" (
id serial primary key,
created_by integer not null,
article_id integer not null,
rating numeric not null default 0.0,
weight numeric not null default 1.0,
obj jsonb not null default '{}' :: jsonb,
created_at timestamp with time zone not null default now()
);
create index rating_created_by_idx on "rating" (created_by);
create index rating_article_id_idx on "rating" (article_id);
-- Comment
create table "comment" (
id serial primary key,
created_by integer not null,
author varchar(256) not null,
author_profile varchar(256) not null,
article_id integer not null,
article_title varchar(256) not null,
refer_to integer not null,
reference_author varchar(256) not null,
reference_author_profile varchar(256) not null,
reference_content text not null,
reference_created_at timestamp with time zone not null,
content text not null,
advocators_count integer not null default 0,
obj jsonb not null default '{}' :: jsonb,
-- 0:normal, 1: abuse_reported, -1: removed
state smallint not null default 0,
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now()
);
create index comment_created_by_idx on "comment" (created_by);
create index comment_article_id_idx on "comment" (article_id);
-- Interaction
create table "interaction" (
id serial primary key,
"type" smallint not null,
created_by integer not null,
refer_to integer not null,
obj jsonb not null default '{}' :: jsonb,
created_at timestamp with time zone not null default now()
);
create index interaction_created_by_idx on "interaction" (created_by);
create index interaction_refer_to_idx on "interaction" (refer_to);
-- Conversation
create table "conversation" (
id serial primary key,
created_by integer not null,
members integer [] not null,
title varchar(128) not null,
muted_by integer [] not null default '{}',
obj jsonb not null default '{}' :: jsonb,
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now()
);
create index conversation_created_by_idx on "conversation" (created_by);
create index conversation_members_idx on "conversation" using gin (members jsonb_path_ops);
-- Message
create table "message" (
id serial primary key,
"uuid" uuid not null,
created_by integer not null,
nickname varchar(256) not null,
profile varchar(256) not null,
conversation_id integer not null,
-- 0: text, 1: image, 2: video
"type" smallint not null default 0,
"text" text not null default '',
-- Pathname of the resource, not the whole uri
"file" varchar(256) not null default '',
obj jsonb not null default '{}' :: jsonb,
created_at timestamp with time zone not null default now(),
updated_at timestamp with time zone not null default now()
);
create index uuid_idx on "message" ("uuid");
create index message_created_by_idx on "message" (created_by);
create index message_conversation_id_idx on "message" (conversation_id); |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-N-VB-IP/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-N-VB-IP/7-512+512+512-only-pad-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function pad_first_third_full --eval_function last_element_eval |
#!/bin/sh
# Copyright 2020 Coinbase, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
# Inspired by: https://github.com/golangci/golangci-lint/blob/master/install.sh
usage() {
this=$1
cat <<EOF
$this: download pre-compiled Docker images for ScArFaCe2020/rosetta-euno
Usage: $this [-d]
-d turns on debug logging
EOF
exit 2
}
parse_args() {
while getopts "dh?" arg; do
case "$arg" in
d) log_set_priority 10 ;;
h | \?) usage "$0" ;;
esac
done
shift $((OPTIND - 1))
TAG=$1
}
execute() {
tmpdir=$(mktemp -d)
log_info "downloading image into ${tmpdir}"
http_download "${tmpdir}/${TARBALL}" "${TARBALL_URL}" "" "1"
docker load --input "${tmpdir}/${TARBALL}"
docker tag "rosetta-euno:${TAG}" "rosetta-euno:latest"
log_info "loaded rosetta-euno:${TAG} and tagged as rosetta-euno:latest"
rm -rf "${tmpdir}"
log_info "removed temporary directory ${tmpdir}"
}
github_tag() {
log_info "checking GitHub for latest tag"
REALTAG=$(github_release "$OWNER/$REPO" "${TAG}")
TAG="$REALTAG"
}
cat /dev/null <<EOF
------------------------------------------------------------------------
https://github.com/client9/shlib - portable posix shell functions
Public domain - http://unlicense.org
https://github.com/client9/shlib/blob/master/LICENSE.md
but credit (and pull requests) appreciated.
------------------------------------------------------------------------
EOF
is_command() {
command -v "$1" >/dev/null
}
echoerr() {
echo "$@" 1>&2
}
log_prefix() {
echo "$0"
}
_logp=6
log_set_priority() {
_logp="$1"
}
log_priority() {
if test -z "$1"; then
echo "$_logp"
return
fi
[ "$1" -le "$_logp" ]
}
log_tag() {
case $1 in
0) echo "emerg" ;;
1) echo "alert" ;;
2) echo "crit" ;;
3) echo "err" ;;
4) echo "warning" ;;
5) echo "notice" ;;
6) echo "info" ;;
7) echo "debug" ;;
*) echo "$1" ;;
esac
}
log_debug() {
log_priority 7 || return 0
echoerr "$(log_prefix)" "$(log_tag 7)" "$@"
}
log_info() {
log_priority 6 || return 0
echoerr "$(log_prefix)" "$(log_tag 6)" "$@"
}
log_err() {
log_priority 3 || return 0
echoerr "$(log_prefix)" "$(log_tag 3)" "$@"
}
log_crit() {
log_priority 2 || return 0
echoerr "$(log_prefix)" "$(log_tag 2)" "$@"
}
untar() {
tarball=$1
case "${tarball}" in
*.tar.gz | *.tgz) tar --no-same-owner -xzf "${tarball}" ;;
*.tar) tar --no-same-owner -xf "${tarball}" ;;
*.zip) unzip "${tarball}" ;;
*)
log_err "untar unknown archive format for ${tarball}"
return 1
;;
esac
}
http_download_curl() {
local_file=$1
source_url=$2
header=$3
loud=$4
quiet_var="-L"
if [ -z "$loud" ]; then
quiet_var="-sL"
fi
if [ -z "$header" ]; then
code=$(curl -w '%{http_code}' "$quiet_var" -o "$local_file" "$source_url")
else
code=$(curl -w '%{http_code}' "$quiet_var" -H "$header" -o "$local_file" "$source_url")
fi
if [ "$code" != "200" ]; then
log_debug "http_download_curl received HTTP status $code"
return 1
fi
return 0
}
http_download_wget() {
local_file=$1
source_url=$2
header=$3
loud=$4
quiet_var=""
if [ -z "$loud" ]; then
quiet_var="-q"
fi
if [ -z "$header" ]; then
wget "$quiet_var" -O "$local_file" "$source_url"
else
wget "$quiet_var" --header "$header" -O "$local_file" "$source_url"
fi
}
http_download() {
log_debug "http_download $2"
if is_command curl; then
http_download_curl "$@"
return
elif is_command wget; then
http_download_wget "$@"
return
fi
log_crit "http_download unable to find wget or curl"
return 1
}
http_copy() {
tmp=$(mktemp)
http_download "${tmp}" "$1" "$2" || return 1
body=$(cat "$tmp")
rm -f "${tmp}"
echo "$body"
}
github_release() {
owner_repo=$1
version=$2
test -z "$version" && version="latest"
giturl="https://github.com/${owner_repo}/releases/${version}"
json=$(http_copy "$giturl" "Accept:application/json")
test -z "$json" && return 1
version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//')
test -z "$version" && return 1
echo "$version"
}
cat /dev/null <<EOF
------------------------------------------------------------------------
End of functions from https://github.com/client9/shlib
------------------------------------------------------------------------
EOF
BINARY=rosetta-euno
FORMAT=tar.gz
OWNER=ScArFaCe2020
REPO="rosetta-euno"
PREFIX="$OWNER/$REPO"
# use in logging routines
log_prefix() {
echo "$PREFIX"
}
GITHUB_DOWNLOAD=https://github.com/${OWNER}/${REPO}/releases/download
parse_args "$@"
github_tag
log_info "found version: ${TAG}"
NAME=${BINARY}-${TAG}
TARBALL=${NAME}.${FORMAT}
TARBALL_URL=${GITHUB_DOWNLOAD}/${TAG}/${TARBALL}
execute
|
docker run --rm -it \
--network="posta-net" \
--link orderer0.example.com:orderer0.example.com \
--link orderer1.example.com:orderer1.example.com \
--link orderer2.example.com:orderer2.example.com \
--name peer2.org1.example.com -p 11051:7051 -p 11053:7053 \
-e CORE_PEER_ADDRESSAUTODETECT=true \
-e CORE_PEER_CHAINCODELISTENADDRESS=peer2.org1.example.com:7052 \
-e CORE_VM_ENDPOINT=unix:///host/var/run/docker.sock \
-e CORE_LOGGING_LEVEL=INFO \
-e CORE_PEER_NETWORKID=peer2.org1.example.com \
-e CORE_NEXT=true \
-e CORE_PEER_ENDORSER_ENABLED=true \
-e CORE_PEER_ID=peer1.org1.example.com \
-e CORE_PEER_PROFILE_ENABLED=true \
-e CORE_PEER_GOSSIP_ORGLEADER=false \
-e CORE_PEER_GOSSIP_EXTERNALENDPOINT=peer2.org1.example.com:7051 \
-e CORE_PEER_GOSSIP_IGNORESECURITY=true \
-e CORE_PEER_LOCALMSPID=Org1MSP \
-e CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE=posta-net \
-e CORE_PEER_GOSSIP_BOOTSTRAP=peer0.org1.example.com:7051 \
-e CORE_PEER_GOSSIP_USELEADERELECTION=true \
-e CORE_PEER_TLS_ENABLED=false -v /var/run/:/host/var/run/ \
-v $(pwd)/crypto-config/peerOrganizations/org1.example.com/peers/peer2.org1.example.com/msp:/etc/hyperledger/fabric/msp \
-v /var/hyperledger/peer12:/var/hyperledger/production \
-w /opt/gopath/src/github.com/hyperledger/fabric/peer hyperledger/fabric-peer peer node start \
|
#
#/**
# * Licensed to the Apache Software Foundation (ASF) under one
# * or more contributor license agreements. See the NOTICE file
# * distributed with this work for additional information
# * regarding copyright ownership. The ASF licenses this file
# * to you under the Apache License, Version 2.0 (the
# * "License"); you may not use this file except in compliance
# * with the License. You may obtain a copy of the License at
# *
# * http://www.apache.org/licenses/LICENSE-2.0
# *
# * Unless required by applicable law or agreed to in writing, software
# * distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# */
# included in all the hbase scripts with source command
# should not be executable directly
# also should not be passed any arguments, since we need original $*
# Modelled after $HADOOP_HOME/bin/hadoop-env.sh.
# resolve links - "${BASH_SOURCE-$0}" may be a softlink
this="${BASH_SOURCE-$0}"
while [ -h "$this" ]; do
ls=`ls -ld "$this"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
this="$link"
else
this=`dirname "$this"`/"$link"
fi
done
# convert relative path to absolute path
bin=`dirname "$this"`
script=`basename "$this"`
bin=`cd "$bin">/dev/null; pwd`
this="$bin/$script"
# the root of the hbase installation
if [ -z "$HBASE_HOME" ]; then
export HBASE_HOME=`dirname "$this"`/..
fi
#check to see if the conf dir or hbase home are given as an optional arguments
while [ $# -gt 1 ]
do
if [ "--config" = "$1" ]
then
shift
confdir=$1
shift
HBASE_CONF_DIR=$confdir
elif [ "--hosts" = "$1" ]
then
shift
hosts=$1
shift
HBASE_REGIONSERVERS=$hosts
elif [ "--auth-as-server" = "$1" ]
then
shift
# shellcheck disable=SC2034
AUTH_AS_SERVER="true"
else
# Presume we are at end of options and break
break
fi
done
# Allow alternate hbase conf dir location.
HBASE_CONF_DIR="${HBASE_CONF_DIR:-$HBASE_HOME/conf}"
# List of hbase regions servers.
HBASE_REGIONSERVERS="${HBASE_REGIONSERVERS:-$HBASE_CONF_DIR/regionservers}"
# List of hbase secondary masters.
HBASE_BACKUP_MASTERS="${HBASE_BACKUP_MASTERS:-$HBASE_CONF_DIR/backup-masters}"
# Thrift JMX opts
if [[ -n "$HBASE_JMX_OPTS" && -z "$HBASE_THRIFT_JMX_OPTS" ]]; then
HBASE_THRIFT_JMX_OPTS="$HBASE_JMX_OPTS -Dcom.sun.management.jmxremote.port=10103"
fi
# Thrift opts
if [ -z "$HBASE_THRIFT_OPTS" ]; then
export HBASE_THRIFT_OPTS="$HBASE_THRIFT_JMX_OPTS"
fi
# REST JMX opts
if [[ -n "$HBASE_JMX_OPTS" && -z "$HBASE_REST_JMX_OPTS" ]]; then
HBASE_REST_JMX_OPTS="$HBASE_JMX_OPTS -Dcom.sun.management.jmxremote.port=10105"
fi
# REST opts
if [ -z "$HBASE_REST_OPTS" ]; then
export HBASE_REST_OPTS="$HBASE_REST_JMX_OPTS"
fi
# Source the hbase-env.sh. Will have JAVA_HOME defined.
# HBASE-7817 - Source the hbase-env.sh only if it has not already been done. HBASE_ENV_INIT keeps track of it.
if [ -z "$HBASE_ENV_INIT" ] && [ -f "${HBASE_CONF_DIR}/hbase-env.sh" ]; then
. "${HBASE_CONF_DIR}/hbase-env.sh"
export HBASE_ENV_INIT="true"
fi
# Verify if hbase has the mlock agent
if [ "$HBASE_REGIONSERVER_MLOCK" = "true" ]; then
MLOCK_AGENT="$HBASE_HOME/lib/native/libmlockall_agent.so"
if [ ! -f "$MLOCK_AGENT" ]; then
cat 1>&2 <<EOF
Unable to find mlockall_agent, hbase must be compiled with -Pnative
EOF
exit 1
fi
if [ -z "$HBASE_REGIONSERVER_UID" ] || [ "$HBASE_REGIONSERVER_UID" == "$USER" ]; then
HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -agentpath:$MLOCK_AGENT"
else
HBASE_REGIONSERVER_OPTS="$HBASE_REGIONSERVER_OPTS -agentpath:$MLOCK_AGENT=user=$HBASE_REGIONSERVER_UID"
fi
fi
# Newer versions of glibc use an arena memory allocator that causes virtual
# memory usage to explode. Tune the variable down to prevent vmem explosion.
export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
# Now having JAVA_HOME defined is required
if [ -z "$JAVA_HOME" ]; then
cat 1>&2 <<EOF
+======================================================================+
| Error: JAVA_HOME is not set |
+----------------------------------------------------------------------+
| Please download the latest Sun JDK from the Sun Java web site |
| > http://www.oracle.com/technetwork/java/javase/downloads |
| |
| HBase requires Java 1.6 or later. |
+======================================================================+
EOF
exit 1
fi
|
def eliminate_element(arr, element):
for i in range(len(arr)):
if arr[i] == element:
del arr[i]
return arr |
#!/bin/bash
unset PASSPHRASE
unset SIGN_PASSPHRASE
unset FTP_PASSWORD
unset AWS_ACCESS_KEY_ID
unset AWS_SECRET_ACCESS_KEY
|
const YTDL = require('ytdl-core');
const Commando = require('discord.js-commando');
function JoinAndPlayMusic(message, args){
if(message.member.voiceChannel){
if(!message.guild.voiceConnection){
message.member.voiceChannel.join().then(connection => {
if(args == ""){
//PlayMusic(connection, "https://www.youtube.com/watch?v=owtl9rk_UL0", message);
}else if(args == "René"){
PlayMusic(connection, "https://www.youtube.com/watch?v=owtl9rk_UL0", message);
} else {
PlayMusic(connection, args, message);
}
})
.catch(console.log);
}
} else {
message.reply("Tu dois être dans un channel vocal pour faire ça");
}
}
function PlayMusic(connection, music, message){
const dispatcher = connection.playStream(YTDL(music, {filter: "audioonly"}));
dispatcher.on("end", function(){
message.guild.voiceConnection.disconnect();
});
dispatcher.on('error', e=> {
console.log(e);
});
dispatcher.setVolume(0.3);
}
class SoundPlayer extends Commando.Command {
constructor(client){
super(client, {
name: 'play',
group: 'soundplayer',
memberName: 'play',
description: 'Play Sound in a voice channel'
});
}
async run(message, args){
JoinAndPlayMusic(message, args);
}
}
module.exports = SoundPlayer; |
from django.db import models
class Download(models.Model):
query = models.TextField()
finished = models.BooleanField(default=False)
url = models.TextField()
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
"""Tests for parsing ISA assay files"""
import io
import os
import pytest
from altamisa.constants import table_headers
from altamisa.exceptions import IsaWarning
from altamisa.isatab import models
from altamisa.isatab import (
InvestigationReader,
InvestigationValidator,
AssayRowReader,
AssayReader,
AssayValidator,
)
def test_assay_row_reader_minimal_assay(minimal_investigation_file, minimal_assay_file):
"""Use ``AssayRowReader`` to read in minimal assay file."""
# Create new row reader and check read headers
row_reader = AssayRowReader.from_stream("S1", "A1", minimal_assay_file)
assert 5 == len(row_reader.header)
# Read all rows in assay
rows = list(row_reader.read())
# Check results
assert 1 == len(rows)
first_row = rows[0]
assert 4 == len(first_row)
expected = models.Material(
"Sample Name",
"S1-sample-0815-N1",
"0815-N1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == first_row[0]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-N1-DNA1-WES1-3",
"0815-N1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == first_row[1]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL4",
"0815-N1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == first_row[2]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL5",
"0815-N1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == first_row[3]
def test_assay_reader_minimal_assay(minimal_investigation_file, minimal_assay_file):
"""Use ``AssayReader`` to read in minimal assay file.
Using the ``AssayReader`` instead of the ``AssayRowReader`` gives us
``Assay`` objects instead of just the row-wise nodes.
"""
# Load investigation (tested elsewhere)
investigation = InvestigationReader.from_stream(minimal_investigation_file).read()
with pytest.warns(IsaWarning) as record:
InvestigationValidator(investigation).validate()
# Check warnings
assert 2 == len(record)
# Create new row reader and check read headers
reader = AssayReader.from_stream("S1", "A1", minimal_assay_file)
assert 5 == len(reader.header)
# Read and validate assay
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check results
assert os.path.normpath(str(assay.file)).endswith(
os.path.normpath("data/i_minimal/a_minimal.txt")
)
assert 5 == len(assay.header)
assert 3 == len(assay.materials)
assert 1 == len(assay.processes)
assert 3 == len(assay.arcs)
expected = models.Material(
"Sample Name",
"S1-sample-0815-N1",
"0815-N1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == assay.materials["S1-sample-0815-N1"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL4",
"0815-N1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL4"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL5",
"0815-N1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL5"]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-N1-DNA1-WES1-3",
"0815-N1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == assay.processes["S1-A1-0815-N1-DNA1-WES1-3"]
expected = (
models.Arc("S1-sample-0815-N1", "S1-A1-0815-N1-DNA1-WES1-3"),
models.Arc(
"S1-A1-0815-N1-DNA1-WES1-3", "S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL4"
),
models.Arc(
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL4",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL5",
),
)
assert expected == assay.arcs
def test_assay_row_reader_small_assay(small_investigation_file, small_assay_file):
"""Use ``AssayRowReader`` to read in small assay file."""
# Create new row reader and check read headers
row_reader = AssayRowReader.from_stream("S1", "A1", small_assay_file)
assert 9 == len(row_reader.header)
# Read all rows in assay
rows = list(row_reader.read())
# Check results
assert 2 == len(rows)
first_row = rows[0]
second_row = rows[1]
assert 8 == len(first_row)
expected = models.Material(
"Sample Name",
"S1-sample-0815-N1",
"0815-N1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == first_row[0]
expected = models.Process(
"library preparation",
"S1-A1-library preparation-2-1",
None,
None,
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF],
)
assert expected == first_row[1]
expected = models.Material(
"Library Name",
"S1-A1-0815-N1-DNA1-COL3",
"0815-N1-DNA1",
None,
(),
(),
(),
None,
[table_headers.LIBRARY_NAME],
)
assert expected == first_row[2]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-N1-DNA1-WES1-5",
"0815-N1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == first_row[3]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"0815-N1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == first_row[4]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
"0815-N1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == first_row[5]
expected = models.Process(
"Unknown",
"S1-A1-somatic variant calling-1-8",
"somatic variant calling-1",
"Data Transformation Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.DATA_TRANSFORMATION_NAME],
)
assert expected == first_row[6]
expected = models.Material(
"Derived Data File",
"S1-A1-0815-somatic.vcf.gz-COL9",
"0815-somatic.vcf.gz",
None,
(),
(),
(),
None,
[table_headers.DERIVED_DATA_FILE],
)
assert expected == first_row[7]
assert 8 == len(second_row)
expected = models.Material(
"Sample Name",
"S1-sample-0815-T1",
"0815-T1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == second_row[0]
expected = models.Process(
"library preparation",
"S1-A1-library preparation-2-2",
None,
None,
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF],
)
assert expected == second_row[1]
expected = models.Material(
"Library Name",
"S1-A1-0815-T1-DNA1-COL3",
"0815-T1-DNA1",
None,
(),
(),
(),
None,
[table_headers.LIBRARY_NAME],
)
assert expected == second_row[2]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-T1-DNA1-WES1-5",
"0815-T1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == second_row[3]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-T1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"0815-T1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == second_row[4]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-T1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
"0815-T1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == second_row[5]
expected = models.Process(
"Unknown",
"S1-A1-somatic variant calling-1-8",
"somatic variant calling-1",
"Data Transformation Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.DATA_TRANSFORMATION_NAME],
)
assert expected == second_row[6]
expected = models.Material(
"Derived Data File",
"S1-A1-0815-somatic.vcf.gz-COL9",
"0815-somatic.vcf.gz",
None,
(),
(),
(),
None,
[table_headers.DERIVED_DATA_FILE],
)
assert expected == second_row[7]
def test_assay_reader_small_assay(small_investigation_file, small_assay_file):
"""Use ``AssayReader`` to read in small assay file."""
# Load investigation (tested elsewhere)
investigation = InvestigationReader.from_stream(small_investigation_file).read()
with pytest.warns(IsaWarning) as record:
InvestigationValidator(investigation).validate()
# Check warnings
assert 2 == len(record)
# Create new row reader and check read headers
reader = AssayReader.from_stream("S1", "A1", small_assay_file)
assert 9 == len(reader.header)
# Read assay
with pytest.warns(IsaWarning) as record:
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check warnings
assert 1 == len(record)
# Check results
assert os.path.normpath(str(assay.file)).endswith(os.path.normpath("data/i_small/a_small.txt"))
assert 9 == len(assay.header)
assert 9 == len(assay.materials)
assert 5 == len(assay.processes)
assert 13 == len(assay.arcs)
expected = models.Material(
"Sample Name",
"S1-sample-0815-N1",
"0815-N1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == assay.materials["S1-sample-0815-N1"]
expected = models.Material(
"Sample Name",
"S1-sample-0815-T1",
"0815-T1",
None,
(),
(),
(),
None,
[table_headers.SAMPLE_NAME],
)
assert expected == assay.materials["S1-sample-0815-T1"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"0815-N1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL6"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
"0815-N1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL7"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-T1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"0815-T1-DNA1-WES1_L???_???_R1.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-T1-DNA1-WES1_L???_???_R1.fastq.gz-COL6"]
expected = models.Material(
"Raw Data File",
"S1-A1-0815-T1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
"0815-T1-DNA1-WES1_L???_???_R2.fastq.gz",
None,
(),
(),
(),
None,
[table_headers.RAW_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-T1-DNA1-WES1_L???_???_R2.fastq.gz-COL7"]
expected = models.Material(
"Derived Data File",
"S1-A1-0815-somatic.vcf.gz-COL9",
"0815-somatic.vcf.gz",
None,
(),
(),
(),
None,
[table_headers.DERIVED_DATA_FILE],
)
assert expected == assay.materials["S1-A1-0815-somatic.vcf.gz-COL9"]
expected = models.Process(
"library preparation",
"S1-A1-library preparation-2-1",
None,
None,
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF],
)
assert expected == assay.processes["S1-A1-library preparation-2-1"]
expected = models.Process(
"library preparation",
"S1-A1-library preparation-2-2",
None,
None,
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF],
)
assert expected == assay.processes["S1-A1-library preparation-2-2"]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-N1-DNA1-WES1-5",
"0815-N1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == assay.processes["S1-A1-0815-N1-DNA1-WES1-5"]
expected = models.Process(
"nucleic acid sequencing",
"S1-A1-0815-T1-DNA1-WES1-5",
"0815-T1-DNA1-WES1",
"Assay Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.ASSAY_NAME],
)
assert expected == assay.processes["S1-A1-0815-T1-DNA1-WES1-5"]
expected = (
models.Arc("S1-sample-0815-N1", "S1-A1-library preparation-2-1"),
models.Arc("S1-A1-library preparation-2-1", "S1-A1-0815-N1-DNA1-COL3"),
models.Arc("S1-A1-0815-N1-DNA1-COL3", "S1-A1-0815-N1-DNA1-WES1-5"),
models.Arc(
"S1-A1-0815-N1-DNA1-WES1-5", "S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL6"
),
models.Arc(
"S1-A1-0815-N1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
),
models.Arc(
"S1-A1-0815-N1-DNA1-WES1_L???_???_R2.fastq.gz-COL7", "S1-A1-somatic variant calling-1-8"
),
models.Arc("S1-A1-somatic variant calling-1-8", "S1-A1-0815-somatic.vcf.gz-COL9"),
models.Arc("S1-sample-0815-T1", "S1-A1-library preparation-2-2"),
models.Arc("S1-A1-library preparation-2-2", "S1-A1-0815-T1-DNA1-COL3"),
models.Arc("S1-A1-0815-T1-DNA1-COL3", "S1-A1-0815-T1-DNA1-WES1-5"),
models.Arc(
"S1-A1-0815-T1-DNA1-WES1-5", "S1-A1-0815-T1-DNA1-WES1_L???_???_R1.fastq.gz-COL6"
),
models.Arc(
"S1-A1-0815-T1-DNA1-WES1_L???_???_R1.fastq.gz-COL6",
"S1-A1-0815-T1-DNA1-WES1_L???_???_R2.fastq.gz-COL7",
),
models.Arc(
"S1-A1-0815-T1-DNA1-WES1_L???_???_R2.fastq.gz-COL7", "S1-A1-somatic variant calling-1-8"
),
)
assert expected == assay.arcs
def test_assay_reader_small2_assay(small2_investigation_file, small2_assay_file):
"""Use ``AssayReader`` to read in small assay file."""
# Load investigation (tested elsewhere)
investigation = InvestigationReader.from_stream(small2_investigation_file).read()
with pytest.warns(IsaWarning) as record:
InvestigationValidator(investigation).validate()
# Check warnings
assert 1 == len(record)
# Create new row reader and check read headers
reader = AssayReader.from_stream("S1", "A1", small2_assay_file)
assert 14 == len(reader.header)
# Read assay
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check results
assert os.path.normpath(str(assay.file)).endswith(
os.path.normpath("data/i_small2/a_small2.txt")
)
assert 14 == len(assay.header)
assert 25 == len(assay.materials)
assert 41 == len(assay.processes)
assert 74 == len(assay.arcs)
# Comments
expected = models.Comment(name="Replicate", value="B")
assert assay.materials["S1-A1-0815-T1-Pro1-B-115-COL5"].comments[0] == expected
# Expected arcs
expected = (
models.Arc("S1-sample-0815-N1", "S1-A1-extraction-2-1"),
models.Arc("S1-sample-0815-T1", "S1-A1-extraction-2-2"),
models.Arc("S1-A1-extraction-2-1", "S1-A1-0815-N1-Pro1-COL3"),
models.Arc("S1-A1-extraction-2-2", "S1-A1-0815-T1-Pro1-COL3"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-1"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-2"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-3"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-4"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-5"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-6"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-7"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-8"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-9"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-10"),
models.Arc("S1-A1-0815-N1-Pro1-COL3", "S1-A1-labeling-4-11"),
models.Arc("S1-A1-0815-T1-Pro1-COL3", "S1-A1-labeling-4-12"),
models.Arc("S1-A1-labeling-4-1", "S1-A1-0815-N1-Pro1-A-114-COL5"),
models.Arc("S1-A1-labeling-4-2", "S1-A1-0815-T1-Pro1-A-115-COL5"),
models.Arc("S1-A1-labeling-4-3", "S1-A1-0815-N1-Pro1-B-114-COL5"),
models.Arc("S1-A1-labeling-4-4", "S1-A1-0815-T1-Pro1-B-115-COL5"),
models.Arc("S1-A1-labeling-4-5", "S1-A1-0815-N1-Pro1-C-114-COL5"),
models.Arc("S1-A1-labeling-4-6", "S1-A1-0815-T1-Pro1-C-115-COL5"),
models.Arc("S1-A1-labeling-4-7", "S1-A1-0815-N1-Pro1-D-114-COL5"),
models.Arc("S1-A1-labeling-4-8", "S1-A1-0815-T1-Pro1-D-115-COL5"),
models.Arc("S1-A1-labeling-4-9", "S1-A1-0815-N1-Pro1-E-114-COL5"),
models.Arc("S1-A1-labeling-4-10", "S1-A1-0815-T1-Pro1-E-115-COL5"),
models.Arc("S1-A1-labeling-4-11", "S1-A1-0815-N1-Pro1-F-114-COL5"),
models.Arc("S1-A1-labeling-4-12", "S1-A1-0815-T1-Pro1-F-115-COL5"),
models.Arc("S1-A1-0815-N1-Pro1-A-114-COL5", "S1-A1-chromatography-8-1"),
models.Arc("S1-A1-0815-T1-Pro1-A-115-COL5", "S1-A1-chromatography-8-2"),
models.Arc("S1-A1-0815-N1-Pro1-B-114-COL5", "S1-A1-chromatography-8-3"),
models.Arc("S1-A1-0815-T1-Pro1-B-115-COL5", "S1-A1-chromatography-8-4"),
models.Arc("S1-A1-0815-N1-Pro1-C-114-COL5", "S1-A1-chromatography-8-5"),
models.Arc("S1-A1-0815-T1-Pro1-C-115-COL5", "S1-A1-chromatography-8-6"),
models.Arc("S1-A1-0815-N1-Pro1-D-114-COL5", "S1-A1-chromatography-8-7"),
models.Arc("S1-A1-0815-T1-Pro1-D-115-COL5", "S1-A1-chromatography-8-8"),
models.Arc("S1-A1-0815-N1-Pro1-E-114-COL5", "S1-A1-chromatography-8-9"),
models.Arc("S1-A1-0815-T1-Pro1-E-115-COL5", "S1-A1-chromatography-8-10"),
models.Arc("S1-A1-0815-N1-Pro1-F-114-COL5", "S1-A1-chromatography-8-11"),
models.Arc("S1-A1-0815-T1-Pro1-F-115-COL5", "S1-A1-chromatography-8-12"),
models.Arc("S1-A1-chromatography-8-1", "S1-A1-poolA-10"),
models.Arc("S1-A1-chromatography-8-2", "S1-A1-poolA-10"),
models.Arc("S1-A1-chromatography-8-3", "S1-A1-mass spectrometry-9-3"),
models.Arc("S1-A1-chromatography-8-4", "S1-A1-mass spectrometry-9-4"),
models.Arc("S1-A1-chromatography-8-5", "S1-A1-poolC-10"),
models.Arc("S1-A1-chromatography-8-6", "S1-A1-poolC-10"),
models.Arc("S1-A1-chromatography-8-7", "S1-A1-mass spectrometry-9-7"),
models.Arc("S1-A1-chromatography-8-8", "S1-A1-mass spectrometry-9-8"),
models.Arc("S1-A1-chromatography-8-9", "S1-A1-poolE-10"),
models.Arc("S1-A1-chromatography-8-10", "S1-A1-poolE-10"),
models.Arc("S1-A1-chromatography-8-11", "S1-A1-poolF-10"),
models.Arc("S1-A1-chromatography-8-12", "S1-A1-poolF-10"),
models.Arc("S1-A1-poolA-10", "S1-A1-poolA.raw-COL11"),
models.Arc("S1-A1-mass spectrometry-9-3", "S1-A1-poolB.raw-COL11"),
models.Arc("S1-A1-mass spectrometry-9-4", "S1-A1-poolB.raw-COL11"),
models.Arc("S1-A1-poolC-10", "S1-A1-Empty Raw Spectral Data File-11-5"),
models.Arc("S1-A1-mass spectrometry-9-7", "S1-A1-Empty Raw Spectral Data File-11-7"),
models.Arc("S1-A1-mass spectrometry-9-8", "S1-A1-Empty Raw Spectral Data File-11-8"),
models.Arc("S1-A1-poolE-10", "S1-A1-poolE.raw-COL11"),
models.Arc("S1-A1-poolF-10", "S1-A1-Empty Raw Spectral Data File-11-11"),
models.Arc("S1-A1-poolA.raw-COL11", "S1-A1-data transformation-12-1"),
models.Arc("S1-A1-poolB.raw-COL11", "S1-A1-data transformation-12-3"),
models.Arc("S1-A1-Empty Raw Spectral Data File-11-5", "S1-A1-data transformation-12-5"),
models.Arc("S1-A1-Empty Raw Spectral Data File-11-7", "S1-A1-data transformation-12-7"),
models.Arc("S1-A1-Empty Raw Spectral Data File-11-8", "S1-A1-data transformation-12-8"),
models.Arc("S1-A1-poolE.raw-COL11", "S1-A1-data transformation-12-9"),
models.Arc("S1-A1-Empty Raw Spectral Data File-11-11", "S1-A1-data analysis-13"),
models.Arc("S1-A1-data transformation-12-1", "S1-A1-results.csv-COL14"),
models.Arc("S1-A1-data transformation-12-3", "S1-A1-results.csv-COL14"),
models.Arc("S1-A1-data transformation-12-5", "S1-A1-results.csv-COL14"),
models.Arc("S1-A1-data transformation-12-7", "S1-A1-results.csv-COL14"),
models.Arc("S1-A1-data transformation-12-8", "S1-A1-results.csv-COL14"),
models.Arc("S1-A1-data transformation-12-9", "S1-A1-Empty Derived Data File-14-9"),
models.Arc("S1-A1-data analysis-13", "S1-A1-results.csv-COL14"),
)
assert sorted(expected) == sorted(assay.arcs)
def test_assay_reader_gelelect(gelelect_investigation_file, gelelect_assay_file):
"""Use ``AssayReader`` to read in small assay file."""
with pytest.warns(IsaWarning) as record:
# Load investigation
investigation = InvestigationReader.from_stream(gelelect_investigation_file).read()
InvestigationValidator(investigation).validate()
# Create new row reader and check read headers
reader = AssayReader.from_stream("S1", "A1", gelelect_assay_file)
assert 22 == len(reader.header)
# Read assay
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check warnings
assert 5 == len(record)
# Check results
assert os.path.normpath(str(assay.file)).endswith(
os.path.normpath(
"data/test_gelelect/a_study01_protein_expression_profiling_gel_electrophoresis.txt"
)
)
assert 22 == len(assay.header)
assert 10 == len(assay.materials)
assert 11 == len(assay.processes)
assert 20 == len(assay.arcs)
expected = models.Material(
"Image File",
"S1-A1-Image01.jpeg-COL19",
"Image01.jpeg",
None,
(),
(),
(),
None,
[table_headers.IMAGE_FILE],
)
assert expected == assay.materials["S1-A1-Image01.jpeg-COL19"]
expected = models.Process(
"data collection",
"S1-A1-Scan02-18",
"Scan02",
"Scan Name",
None,
None,
(),
(),
None,
None,
None,
[table_headers.PROTOCOL_REF, table_headers.SCAN_NAME],
)
assert expected == assay.processes["S1-A1-Scan02-18"]
header_electrophoresis = [
table_headers.PROTOCOL_REF,
table_headers.GEL_ELECTROPHORESIS_ASSAY_NAME,
table_headers.FIRST_DIMENSION,
table_headers.TERM_SOURCE_REF,
table_headers.TERM_ACCESSION_NUMBER,
table_headers.SECOND_DIMENSION,
table_headers.TERM_SOURCE_REF,
table_headers.TERM_ACCESSION_NUMBER,
]
expected = models.Process(
"electrophoresis",
"S1-A1-Assay01-10",
"Assay01",
"Gel Electrophoresis Assay Name",
None,
None,
(),
(),
None,
models.OntologyTermRef("", "", ""),
models.OntologyTermRef("", "", ""),
header_electrophoresis,
)
assert expected == assay.processes["S1-A1-Assay01-10"]
expected = models.Process(
"electrophoresis",
"S1-A1-electrophoresis-9-2",
"",
"Gel Electrophoresis Assay Name",
None,
None,
(),
(),
None,
models.OntologyTermRef("AssayX", None, None),
models.OntologyTermRef("AssayY", None, None),
header_electrophoresis,
)
assert expected == assay.processes["S1-A1-electrophoresis-9-2"]
def test_assay_reader_minimal_assay_iostring(minimal_investigation_file, minimal_assay_file):
# Load investigation (tested elsewhere)
stringio = io.StringIO(minimal_investigation_file.read())
investigation = InvestigationReader.from_stream(stringio).read()
with pytest.warns(IsaWarning) as record:
InvestigationValidator(investigation).validate()
# Check warnings
assert 2 == len(record)
stringio = io.StringIO(minimal_assay_file.read())
# Create new assay reader and read from StringIO with original filename indicated
reader = AssayReader.from_stream("S1", "A1", stringio, filename="data/i_minimal/a_minimal.txt")
assert 5 == len(reader.header)
# Read and validate assay
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check results
assert os.path.normpath(str(assay.file)).endswith(
os.path.normpath("data/i_minimal/a_minimal.txt")
)
assert 5 == len(assay.header)
assert 3 == len(assay.materials)
assert 1 == len(assay.processes)
assert 3 == len(assay.arcs)
def test_assay_reader_minimal_assay_iostring2(minimal_investigation_file, minimal_assay_file):
# Load investigation (tested elsewhere)
stringio = io.StringIO(minimal_investigation_file.read())
investigation = InvestigationReader.from_stream(stringio).read()
with pytest.warns(IsaWarning) as record:
InvestigationValidator(investigation).validate()
# Check warnings
assert 2 == len(record)
# Create new assay reader and read from StringIO with no filename indicated
stringio = io.StringIO(minimal_assay_file.read())
reader = AssayReader.from_stream("S1", "A1", stringio)
assert 5 == len(reader.header)
# Read and validate assay
assay = reader.read()
AssayValidator(
investigation, investigation.studies[0], investigation.studies[0].assays[0], assay
).validate()
# Check results
assert str(assay.file) == os.path.normpath("<no file>")
assert 5 == len(assay.header)
assert 3 == len(assay.materials)
assert 1 == len(assay.processes)
assert 3 == len(assay.arcs)
|
#!/bin/bash
# Pull DOCKER_OPTS from config file
if [ -z "$DOCKER_OPTS" ];then
source /etc/default/docker
fi
# Allow user to specify image to build
images=$*
if [ -z "$images" ];then
for i in */; do images+="$i "; done
fi
for i in $images; do
img=${i%/}
echo Building image local/$img
docker ${DOCKER_OPTS} build --no-cache -t local/$img $img || exit $?
done
|
#!/bin/bash
GO_CLOUDLOG_COVERAGE_TMP=$(mktemp -d)
go test -v -coverprofile=${GO_CLOUDLOG_COVERAGE_TMP}/go_cloudlog.coverage ./
echo "mode: set" > ${GO_CLOUDLOG_COVERAGE_TMP}/combined.coverage
egrep -v '^mode:' ${GO_CLOUDLOG_COVERAGE_TMP}/go_cloudlog.coverage >> ${GO_CLOUDLOG_COVERAGE_TMP}/combined.coverage
go tool cover -html=${GO_CLOUDLOG_COVERAGE_TMP}/combined.coverage -o coverage.html
if [ ! -z "${GO_CLOUDLOG_COVERAGE_TMP}" ]
then
rm -rf "${GO_CLOUDLOG_COVERAGE_TMP}"
fi
|
package appcenter
import (
"context"
"github.com/elko-dev/spawn/appcenter/accounts"
"github.com/elko-dev/spawn/appcenter/apps"
"github.com/elko-dev/spawn/appcenter/builds"
"github.com/elko-dev/spawn/appcenter/organization"
"github.com/elko-dev/spawn/constants"
log "github.com/sirupsen/logrus"
)
const (
androidKeyStoreKey = "ANDROID_KEYSTORE_KEY"
authSecretName = "AUTH_CONFIG"
)
// Platform struct to create AppCenter
type Platform struct {
orgClient organization.Client
appClient apps.Client
buildClient builds.Client
accountsClient accounts.Client
organizationName string
projectName string
distributionMembers []string
authSecret string
externalUserID string
}
// Create AppCenter config
func (platform Platform) Create(repoURL string, repoID string, latestGitConfig string, gitType string) error {
// create organization
ctx := context.Background()
log.WithFields(log.Fields{
"organizationName": platform.organizationName,
"projectName": platform.projectName,
"repoURL": repoURL,
"latestGitConfig": latestGitConfig,
}).Debug("Creating appcenter organization")
_, err := platform.orgClient.CreateOrganization(ctx, &organization.CreateOrganizationArgs{
DisplayName: &platform.organizationName,
Name: &platform.organizationName,
})
if err != nil {
return err
}
distributionResponse, err := platform.accountsClient.CreateDistributionGroup(ctx, &accounts.DistributionGroupArg{
DisplayName: platform.organizationName,
Name: platform.organizationName,
}, &platform.organizationName)
if err != nil {
return err
}
// TODO: create a team and add app to team
// create app
androidName, err := createAndroidApp(ctx, &platform, &repoURL, &latestGitConfig, &distributionResponse.ID, repoID, gitType)
if err != nil {
return err
}
iosName, err := createIOSApp(ctx, &platform, &repoURL, &latestGitConfig, &distributionResponse.ID, repoID, gitType)
if err != nil {
return err
}
apps := make([]accounts.Apps, 0)
apps = append(apps, accounts.Apps{Name: androidName})
apps = append(apps, accounts.Apps{Name: iosName})
err = platform.accountsClient.CreateAppsDistributionGroup(ctx, &accounts.AppsForDistributionArg{
Apps: apps,
}, &platform.organizationName, &platform.organizationName)
if err != nil {
return err
}
return platform.accountsClient.AddMemberToDistribution(ctx, &accounts.AddMemberArgs{
UserEmails: platform.distributionMembers,
}, &platform.organizationName, &platform.organizationName)
}
// CreateApp for app center
func (platform Platform) CreateApp(ctx context.Context,
description *string,
os *string,
platformType *string,
releaseType *string,
repoURL *string,
latestGitConfig *string,
distributionID *string,
environmentVariables []builds.EnvironmentVariables,
repoID string,
gitType string) (string, error) {
println("Creating Firebase Project and Apps")
projectName := normalizeProjectName(platform.projectName, *os)
_, err := platform.appClient.CreateApp(ctx, &apps.CreateAppArgs{
DisplayName: &projectName,
Name: &projectName,
Description: description,
OS: os,
Platform: platformType,
ReleaseType: releaseType,
}, platform.organizationName)
logContext := log.WithFields(log.Fields{
"organizationName": platform.organizationName,
"projectName": platform.projectName,
"repoURL": *repoURL,
"latestGitConfig": *latestGitConfig,
"description": *description,
"os": *os,
"platformType": *platformType,
})
if err != nil {
logContext.Info("Error creating appcenter app")
return "", err
}
repoConfig := builds.RepoConfigArgs{
RepoURL: *repoURL,
RepoID: repoID,
}
if gitType == constants.Gitlab {
repoConfig.ExternalUserID = platform.externalUserID
}
_, err = platform.buildClient.ConfigureRepo(ctx, &repoConfig, platform.organizationName, projectName)
if err != nil {
logContext.Info("Error configuring appcenter app")
return "", err
}
args := builds.CreateConfigArgs(distributionID, environmentVariables, &builds.Keystore{
KeyAlias: "app",
KeyPassword: "<PASSWORD>",
KeystoreFilename: "my.keystore",
KeystorePassword: "<PASSWORD>",
})
_, err = platform.buildClient.ConfigureBuild(ctx,
args,
platform.organizationName,
projectName)
if err != nil {
logContext.Info("Error creating appcenter build")
return "", err
}
_, err = platform.buildClient.Build(ctx, &builds.BuildArgs{
SourceVersion: *latestGitConfig,
Debug: true,
}, platform.organizationName, projectName)
return projectName, err
}
func createAndroidApp(ctx context.Context,
platform *Platform,
repoURL *string,
latestGitConfig *string,
distributionID *string,
repoID string,
gitType string) (string, error) {
description := "Mobile application"
os := "Android"
platformType := "React-Native"
releastType := "Production"
//TODO: implement me
encryptToken := "<PASSWORD>"
environmentVariables := []builds.EnvironmentVariables{
builds.EnvironmentVariables{
Name: androidKeyStoreKey,
Value: encryptToken,
},
builds.EnvironmentVariables{
Name: authSecretName,
Value: platform.authSecret,
}}
return platform.CreateApp(ctx, &description, &os, &platformType, &releastType, repoURL, latestGitConfig, distributionID, environmentVariables, repoID, gitType)
}
func createIOSApp(ctx context.Context,
platform *Platform,
repoURL *string,
latestGitConfig *string,
distributionID *string,
repoID string,
gitType string) (string, error) {
description := "Mobile application"
os := "iOS"
platformType := "React-Native"
releastType := "Production"
environmentVariables := []builds.EnvironmentVariables{
builds.EnvironmentVariables{
Name: authSecretName,
Value: platform.authSecret,
},
}
return platform.CreateApp(ctx, &description, &os, &platformType, &releastType, repoURL, latestGitConfig, distributionID, environmentVariables, repoID, gitType)
}
func normalizeProjectName(projectName string, os string) string {
return projectName + "-" + os
}
// NewPlatform init
func NewPlatform(orgClient organization.Client,
appClient apps.Client,
buildClient builds.Client,
accountsClient accounts.Client,
organizationName string,
projectName string,
members []string,
authSecret string,
externalUserID string) Platform {
return Platform{
orgClient,
appClient,
buildClient,
accountsClient,
organizationName,
projectName,
members,
authSecret,
externalUserID,
}
}
|
<reponame>raoulmillais/js-ipfs<filename>src/core/ipns/routing/utils.js
'use strict'
const multibase = require('multibase')
module.exports.encodeBase32 = (buf) => {
return multibase.encode('base32', buf).slice(1) // slice off multibase codec
}
|
#ifndef UTIL_H
#define UTIL_H
#define UTIL_STR_MAX_LEN 65536
/* Utility Functions */
int* util_int_to_heap(int);
float* util_float_to_heap(float);
double* util_double_to_heap(double);
unsigned* util_unsigned_to_heap(unsigned);
long* util_long_to_heap(long);
char* util_char_to_heap(char);
char* util_str_to_heap(char*);
size_t util_strlen(char*);
/* Node Class Structure */
struct Node;
struct Node* node_new(void*);
void node_destroy(struct Node*);
/* Node Setters and Getters */
void node_set_value(struct Node*, void*);
void node_set_next(struct Node*, struct Node*);
void* node_get_value(struct Node*);
struct Node* node_get_next(struct Node*);
/* LinkedList Class Structure */
struct LinkedList;
struct LinkedList* linkedlist_new(void);
void linkedlist_destroy(struct LinkedList*);
/* LinkedList Object Functions */
void linkedlist_append(struct LinkedList*, void*);
void* linkedlist_value_at(struct LinkedList*, int);
int linkedlist_length(struct LinkedList*);
/* LinkedList Setters and Getters */
void linkedlist_set_head(struct LinkedList*, struct Node*);
struct Node* linkedlist_get_head(struct LinkedList*);
#endif
|
const gallery = {
photo1: {
title: 'My first photo',
location: 'Tibet',
img: 'images/photo1.jpg'
},
photo2: {
title: 'My second photo',
location: 'Nepal',
img: 'images/photo2.jpg'
},
photo3: {
title: 'My third photo',
location: 'Thailand',
img: 'images/photo3.jpg'
},
photo4: {
title: 'My fourth photo',
location: 'Japan',
img: 'images/photo4.jpg'
}
}; |
/**
*
* TableRowEmpty
*
*/
import styled from 'styled-components';
import colors from '../../assets/styles/colors';
import sizes from '../../assets/styles/sizes';
const TableRowEmpty = styled.tr`
width: 100%;
height: 108px;
background: #ffffff;
td {
height: 106px;
line-height: 106px;
font-size: 1.3rem;
font-weight: ${sizes.fontWeight.regular};
color: ${colors.blueTxt};
text-align: center;
border-collapse: collapse;
/* stylelint-disable */
border-top: 1px solid #f1f1f2 !important;
/* stylelint-enable */
}
`;
export default TableRowEmpty;
|
def classify_boolean(x, conditions):
output = []
for condition in conditions:
if eval(condition):
output.append(True)
else:
output.append(False)
return output
# Output: [False, True, True] |
<gh_stars>1-10
import { Component } from '@angular/core';
import { ProductService} from './product.service';
import { Product} from './product.model';
import { Router, ActivatedRoute } from '@angular/router';
import { LoginService } from '../auth/login.service';
import { OrderService } from '../order/order.service';
import { Order } from '../order/order.model';
import { ProductAmount} from './productamount.model';
@Component({
selector: 'app-singproduct',
templateUrl: './singleProduct.component.html',
styleUrls: ['./product.component.css']
})
export class SingleProductComponent {
product: Product;
pAmount: ProductAmount;
order: Order;
qty: number;
constructor(private router: Router, activatedRoute: ActivatedRoute, public service: ProductService, public loginService: LoginService, private orderService: OrderService) {
const id = activatedRoute.snapshot.params.id;
service.getProductById(id).subscribe((product => this.product = product), (error) => console.error(error));
}
ngOnInit() {
this.qty = 0;
this.orderService.getCurrentOrder().subscribe(
order => this.order = order,
error => console.log(error)
);
}
deleteProduct() {
this.service.deleteProduct(this.product).subscribe((_) => this.router.navigate(['/product']), (error) => console.error(error));
}
addProductToOrder(productAux: Product, qt: number){
let productAmount = { product: productAux, amount: qt }
this.pAmount = productAmount;
this.orderService.addProduct(this.pAmount, this.order.id ).subscribe(
order => { this.order = order;
this.router.navigate(['/order']);
},error => console.log(error));
}
}
|
define([
], function () {
function AllowClear () { }
AllowClear.prototype.bind = function (decorated, container, $container) {
var self = this;
decorated.call(this, container, $container);
this.$selection.on('mousedown', '.select2-selection__clear',
function (evt) {
// Ignore the event if it is disabled
if (self.options.get('disabled')) {
return;
}
evt.stopPropagation();
self.$element.val(self.placeholder.id).trigger('change');
self.trigger('toggle');
});
};
AllowClear.prototype.update = function (decorated, data) {
decorated.call(this, data);
if (this.$selection.find('.select2-selection__placeholder').length > 0 ||
data.length === 0) {
return;
}
var $remove = $(
'<span class="select2-selection__clear">' +
'×' +
'</span>'
);
this.$selection.find('.select2-selection__rendered').append($remove);
};
return AllowClear;
});
|
#! /bin/bash
ps -ef | grep agent | grep -v grep | awk '{print $2}' | xargs kill
|
<filename>Problem Solving/Data Structures/Stacks/Equal Stacks/full-solution.java
import java.io.*;
import java.math.*;
import java.security.*;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.regex.*;
import java.util.stream.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
class Result {
// Solution
public static int equalStacks(List<Integer> h1, List<Integer> h2, List<Integer> h3) {
// Getting sum of elements in each array
int sum1 = 0, sum2 = 0, sum3 = 0;
sum1 = sumOfElements(h1);
sum2 = sumOfElements(h2);
sum3 = sumOfElements(h3);
// Comparing sums and decreasing stacks
boolean flag = true;
while (flag) {
if (sum1 == sum2 && sum1 == sum3) {
flag = false;
return sum1;
}
else if (sum1 >= sum2 && sum1 >= sum3) {
int value = h1.remove(0);
sum1 -= value;
continue;
}
else if (sum2 >= sum1 && sum2 >= sum3) {
int value = h2.remove(0);
sum2 -= value;
continue;
}
else if (sum3 >= sum1 && sum3 >= sum2) {
int value = h3.remove(0);
sum3 -= value;
continue;
}
else return 0;
}
return 0;
}
// Sum array
public static int sumOfElements(List<Integer> arr) {
int sum = 0;
for (int i = 0; i < arr.size(); i++) {
sum += arr.get(i);
}
return sum;
}
}
public class Solution {
public static void main(String[] args) throws IOException {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(System.getenv("OUTPUT_PATH")));
String[] firstMultipleInput = bufferedReader.readLine().replaceAll("\\s+$", "").split(" ");
int n1 = Integer.parseInt(firstMultipleInput[0]);
int n2 = Integer.parseInt(firstMultipleInput[1]);
int n3 = Integer.parseInt(firstMultipleInput[2]);
List<Integer> h1 = Stream.of(bufferedReader.readLine().replaceAll("\\s+$", "").split(" "))
.map(Integer::parseInt)
.collect(toList());
List<Integer> h2 = Stream.of(bufferedReader.readLine().replaceAll("\\s+$", "").split(" "))
.map(Integer::parseInt)
.collect(toList());
List<Integer> h3 = Stream.of(bufferedReader.readLine().replaceAll("\\s+$", "").split(" "))
.map(Integer::parseInt)
.collect(toList());
int result = Result.equalStacks(h1, h2, h3);
bufferedWriter.write(String.valueOf(result));
bufferedWriter.newLine();
bufferedReader.close();
bufferedWriter.close();
}
}
|
<filename>src/classes/server.js
const exec = require("../utils/exec")
const fs = require("fs")
const EventEmitter = require('events');
const { stderr, exit } = require("process");
const doneRegex = /Done \([0-9]+.[0-9]+s\)! For help, type "help"/
const listRegex = /There are ([0-9]+) of a max of ([0-9]+) players online: ?([A-z0-9,_ ]+)/
const joinLeaveRegex = /([A-z0-9_]+) ([A-z]+) the game/
class Server {
/**
* @event "event" (event) -> server started event
* @event "complete" (event) -> server completed event
* @event "action" (player, action) -> player has done action
* @event "crash" () -> server has crashed
*/
constructor (directory, java, jvm) {
this.directory = directory;
this.jarFile = "server.jar"
this.jvmArgs = [ "-Xmx6144M", "-Xms2048M" ]
this.javaExe = "java"
if (jvm !== undefined) this.jvmArgs = jvm.split(" ")
if (java !== undefined) this.javaExe = java
this.event = new EventEmitter()
// console event listeners
this.event.on("event", (event) => this.log(`${event}`))
this.event.on("complete", (event) => this.log(`${event} complete`))
this.event.on("action", (player, action) => this.log(`${player} has ${action}`))
this.event.on("crash", () => this.log("Crashed!"))
}
log(str) { stderr.write(`[Server] ${str}\n`) }
#parseLine(line) {
let matches
if (matches = line.match(doneRegex)) { // Server Started
this.event.emit("complete", "start")
} else if (matches = line.match(joinLeaveRegex)) { // Player has joined/left
this.event.emit("action", matches[1], matches[2])
}
}
start() {
this.event.emit("event", "start") // Server Starting
return new Promise((resolve, reject) => {
this.serverProcess = exec(this.javaExe, this.jvmArgs.concat(["-jar", this.jarFile, "nogui"]), this.directory);
this.serverProcess.stdout.on("data", (data) => {
if (data.toString().match(doneRegex)) resolve() // once server started resolves
this.#parseLine(data.toString())
})
this.serverProcess.once("close", (code) => {
if (code != 0) {
this.log(`Exited with code ${code}`)
this.event.emit("crash")
} else {
this.event.emit("complete", "stop")
}
})
})
}
stop() {
return new Promise((resolve, reject) => {
this.event.emit("event", "stop")
this.execute("stop")
})
}
list() {
return new Promise((resolve, reject) => {
this.execute("list").then(str => {
let matches = str.match(listRegex)
// broken when there are players for some reason
resolve( {playerCount: parseInt(matches[1]), playerMax: parseInt(matches[2]), playerList: matches[3].split(", ")} )
})
})
}
execute(command) {
return new Promise((resolve, reject) => {
this.serverProcess.stdin.write(command + "\n")
this.serverProcess.stdout.once("data", (data) => {
let returnStr = data.toString()
if (returnStr.includes("Unknown or incomplete command")) {
// there was an error executing the given command
}
resolve(returnStr)
})
})
}
}
module.exports = Server |
package function
import (
"fmt"
"math/rand"
"time"
)
func RangeInt(min int, max int) int {
if min > max {
panic(fmt.Errorf("rangeFloat: the min `%d` is larger then max `%d`", min, max))
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
n := r.Intn(max)
if n < min {
n += min
}
return n
}
|
<reponame>Adeon18/Solver_Algorithms<gh_stars>1-10
import pygame
class Sudoku:
def __init__(self, path, vizual=None):
self.grid = self.read_grid(path)
if vizual:
self.vizual = vizual
def read_grid(self, path):
"""
takes the path to conditions
and convert them to grid --
-- list of lines where each line is a list of numbers in it
saves all the numbers as integers
saves the grid as an attribute
return None
"""
grid = []
with open(path, 'r', encoding='utf-8') as raw_grid:
for line in raw_grid:
line_lst = line[:-1].split()
grid.append([int(x) for x in line_lst])
return grid
def safe_to_place_in_row(self, row, col, number):
"""
checks whether the number on the coodinates (col, row)
is the only one like that in the row
return bool
"""
if number in self.grid[row]:
return False
return True
def safe_to_place_in_box(self, row, col, number):
"""
checks whether the number on the coodinates (col, row)
is the only one like that in the box 3x3
return bool
"""
box_col = col % 3
box_row = row % 3
for row_index in range(row - box_row, row - box_row + 3):
for col_index in range(col - box_col, col - box_col + 3):
if self.cell_is_number(row_index, col_index, number):
return False
return True
def safe_to_place_in_col(self, row, col, number):
"""
checks whether the number on the coodinates (col, row)
is the only one like that in the column
return bool
"""
for line_index in range(len(self.grid)):
if self.cell_is_number(line_index, col, number):
return False
return True
def cell_is_number(self, row, col, number):
"""
checks whether the element on the coodinates (col, row)
is equal to number
return bool
"""
if self.grid[row][col] == number:
return True
return False
def safe_to_place(self, row, col, number):
"""
checks whether the number on the coodinates (col, row)
fits all the sudoku criteria
return bool
"""
if self.cell_is_number(row, col, 0) \
and self.safe_to_place_in_box(row, col, number) \
and self.safe_to_place_in_col(row, col, number) \
and self.safe_to_place_in_row(row, col, number):
return True
return False
def empty_cell(self):
"""
returns the coordinates of the first empty cell on the grid
if there is no such cell, return False
"""
for row_index in range(9):
for col_index in range(9):
if self.cell_is_number(row_index, col_index, 0):
return [row_index, col_index]
return False
def solve(self):
"""
solves the sudoku with backtraking using recurtion
return True is there's no empty cells left
return False if it is impossible to solve Sudoku
"""
cell = self.empty_cell()
if not cell:
return True
row = cell[0]
col = cell[1]
for number in range(1, 10):
if self.safe_to_place(row, col, number):
self.grid[row][col] = number
# the place where the new number is placed
# Visuals
if self.vizual:
self.vizual.draw()
self.vizual.events()
pygame.time.wait(self.vizual.TIMESTEP)
if self.solve():
return True
self.grid[row][col] = 0
# the place when the assumption did not work and algorithm goes back
return False
def __str__(self):
line_splitter = ' -------------------------\n'
col_splitter = ' | '
# line_splitter = ' *************************\n'
# col_splitter = ' * '
to_print = ''
for row, line in enumerate(self.grid):
if row % 3 == 0:
to_print += line_splitter
for col, number in enumerate(line):
if col % 3 == 0:
to_print += col_splitter
else:
to_print += ' '
if number == 0:
number = '.'
to_print += f'{number}'
to_print += f'{col_splitter}\n'
to_print += line_splitter
return to_print
if __name__ == "__main__":
sudoku = Sudoku("condition1.txt")
print(sudoku)
sudoku.solve()
print(sudoku.grid)
|
#!/usr/bin/env bash
set -e
OPA_DIR=/go/src/github.com/meta-quick/opa
usage() {
echo "gen-dev-patch.sh --source-url=<git-url>"
echo " --version=<mj.mn.pt>"
}
for i in "$@"; do
case $i in
--source-url=*)
SOURCE_URL="${i#*=}"
shift
;;
--version=*)
VERSION="${i#*=}"
shift
;;
*)
usage
exit 1
;;
esac
done
if [ -z "$SOURCE_URL" ]; then
usage
exit 1
elif [ -z "$VERSION" ]; then
usage
exit 1
fi
git clone $SOURCE_URL $OPA_DIR
cd $OPA_DIR
LAST_VERSION=$(git describe --abbrev=0 --tags | cut -c 2-)
update_makefile() {
sed -i='' -e "s/Version\s\+=\s\+\".\+\"$/Version = \"$VERSION-dev\"/" version/version.go
}
update_changelog() {
cat >_CHANGELOG.md <<EOF
$(awk "1;/## $LAST_VERSION/{exit}" CHANGELOG.md | sed '$d')
## Unreleased
## $LAST_VERSION
$(sed "1,/## $LAST_VERSION/d" CHANGELOG.md)
EOF
mv _CHANGELOG.md CHANGELOG.md
}
main() {
update_makefile
update_changelog
git --no-pager diff --no-color
}
main
|
#!/bin/bash
###
# call with `update_default_python.sh <PYTHON_VERSION>`
###
NEW=$1
echo "Changing to python ${NEW}..."
# for fname in $(find $(dirname $0) -name testbuild.sh); do
for fname in $(find $(dirname $0)/.. -name build.sh); do
echo "Updating ${fname}"
sed -i.bak "s|PYTHON_VERSION=\${PYTHON_VERSION:-.*$|PYTHON_VERSION=\$\{PYTHON_VERSION:-${NEW}\}|" $fname
rm "${fname}.bak"
done
# Docker doesn't like `.*` for version tags, so truncate from tag
TAG="${NEW}"
if [[ ${TAG: -1} == "*" ]]; then
TAG=${TAG:0:${#TAG}-2}
fi
# for fname in $(find $(dirname $0)/.. -name test-compose.yml); do
for fname in $(find $(dirname $0)/.. -name docker-compose.yml); do
echo "Updating ${fname}"
sed -i.bak "s|PYTHON_VERSION:.*$|PYTHON_VERSION: ${NEW}|" $fname
sed -i.bak "s|python-.*$|python-${TAG}|" $fname
rm "${fname}.bak"
done
echo "Done."
|
import logging
from alpyne.client import alpyne_client
from alpyne.client.alpyne_client import AlpyneClient
from alpyne.data.spaces import Action
if __name__ == "__main__":
alpyne_client.LOG_LEVEL = logging.DEBUG
client = AlpyneClient(r"Exported\Traffic Light RL\model.jar", blocking=True, verbose=True)
print(client.configuration_template)
print(client.observation_template)
print(client.action_template)
print(client.output_names)
cfg = client.configuration_template
cfg.secs_per_phase = 300
cfg.secs_between_actions = 300
cfg.schedule_ns = "none_all_day"
cfg.schedule_ew = "constant_heavy"
cfg.engine_seed = 1 # reproducible runs
cfg.engine_stop_time = 20 # minutes
sim = client.create_reinforcement_learning(cfg)\
.run()
while not sim.is_terminal():
print(sim.get_observation())
sim.take_action(Action(do_next_phase=0))
ops = sim.get_outputs(["totalPassed", "meanTIS"])
print(ops) |
from typing import Dict, List
def filter_shapes_by_parts(nparts_dict: Dict[str, int], min_n_parts: int, max_n_parts: int) -> List[str]:
filtered_shape_names = []
for name in nparts_dict:
if min_n_parts <= nparts_dict[name] <= max_n_parts:
filtered_shape_names.append(name)
return filtered_shape_names |
if [ ! -f $1 ]
then
echo "File $1 not found"
exit 0
fi
$1 ./Test_Diffusion.xml
python3 ./verify.py
rm -f *.log
rm -f *.vtk
ls *.dat | grep -v '_ref.dat' | xargs rm -r
if [ $? -eq 0 ]
then
exit 0
else
exit 1
fi
|
<reponame>BZahorodnii/mail-builder
import * as React from 'react';
import menuContentComponents from '../../collections/menuContentComponents';
import {
ControlPanelContentWrapper,
ControlPanelContentItem
} from '../../styles';
const ContentItems: React.FC = () => {
return (
<ControlPanelContentWrapper>
{
menuContentComponents && menuContentComponents.length ? menuContentComponents.map((item, i) => {
return (
<ControlPanelContentItem key={`${item.title}-i`}>
{item.title}
</ControlPanelContentItem>
)
}) : null
}
</ControlPanelContentWrapper>
);
};
export default ContentItems; |
<reponame>Lostr9/webservice<gh_stars>0
package com.webservice.dao;
import com.webservice.domain.Contact;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public class ContactDAOImpl implements ContactDAO {
@Autowired
private SessionFactory sessionFactory;
public void addContact(Contact contact) {
sessionFactory.getCurrentSession().save(contact);
}
@SuppressWarnings("unchecked")
public List<Contact> listContact() {
return sessionFactory.getCurrentSession()
.createQuery("from Contact", Contact.class)
.list();
}
public void removeContact(Integer id) {
Contact contact = (Contact) sessionFactory.getCurrentSession().load(
Contact.class, id);
if (null != contact) {
sessionFactory.getCurrentSession().delete(contact);
}
}
}
|
#include <iostream>
#include <unordered_map>
// Simulated input class for testing
class Input {
public:
bool wasKeyPressed(char key) {
// Simulate key press based on user input
// For testing purposes, return true for 'P' key press
return (key == 'P');
}
};
// Game state manager class
class StateManager {
private:
std::unordered_map<std::string, bool> statePaused;
public:
StateManager() {}
// Method to pause a game state
void PauseState(const std::string& state) {
statePaused[state] = true;
std::cout << "State '" << state << "' paused." << std::endl;
}
// Method to resume a game state
void ResumeState(const std::string& state) {
statePaused[state] = false;
std::cout << "State '" << state << "' resumed." << std::endl;
}
// Method to check if a state is paused
bool IsStatePaused(const std::string& state) {
return statePaused[state];
}
};
int main() {
// Create instances of Input and StateManager
Input input;
StateManager stateManager;
// Simulate game loop
while (true) {
// Check for 'P' key press to pause a state
if (input.wasKeyPressed('P')) {
stateManager.PauseState("PAUSE");
}
// Simulate other game logic and state changes
// ...
// Check if 'PAUSE' state is paused and resume if needed
if (stateManager.IsStatePaused("PAUSE")) {
// Resume the 'PAUSE' state
stateManager.ResumeState("PAUSE");
}
// Simulate other game logic and state changes
// ...
// Break the loop after a few iterations for demonstration
break;
}
return 0;
} |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N-VB/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N-VB/512+512+512-N-IP-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function replace_all_but_nouns_first_third_sixth --eval_function penultimate_sixth_eval |
<gh_stars>1-10
frappe.ready(function (){
$('.download-invoice').on(click, (e) => {
frappe.call({
method: '/api/method/frappe.utils.'
})
})
})
|
<gh_stars>0
package dev.patika.quixotic95.model;
import javax.persistence.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
@Entity
public class Course {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private int id;
private String courseName;
private String courseCode;
private double creditScore;
@ManyToMany
private List<Student> courseStudents = new ArrayList<>();
@ManyToOne
private Instructor courseInstructor;
public Course() {
}
public Course(String courseName, String courseCode, double creditScore) {
this.courseName = courseName;
this.courseCode = courseCode;
this.creditScore = creditScore;
}
public String getCourseName() {
return courseName;
}
public void setCourseName(String courseName) {
this.courseName = courseName;
}
public String getCourseCode() {
return courseCode;
}
public void setCourseCode(String courseCode) {
this.courseCode = courseCode;
}
public double getCreditScore() {
return creditScore;
}
public void setCreditScore(double creditScore) {
this.creditScore = creditScore;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Course course = (Course) o;
return Objects.equals(courseCode, course.courseCode);
}
@Override
public int hashCode() {
return Objects.hash(courseCode);
}
public List<Student> getCourseStudents() {
return courseStudents;
}
public void setCourseStudents(List<Student> courseStudents) {
this.courseStudents = courseStudents;
}
public Instructor getCourseInstructor() {
return courseInstructor;
}
public void setCourseInstructor(Instructor courseInstructor) {
this.courseInstructor = courseInstructor;
}
public int getId() {
return id;
}
@Override
public String toString() {
return "Course{" +
"courseName='" + courseName + '\'' +
", courseCode='" + courseCode + '\'' +
", creditScore=" + creditScore +
'}';
}
}
|
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
* <p>
*/
package org.olat.core.util;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Stack;
import java.util.UUID;
import java.util.function.Predicate;
import javax.servlet.http.HttpSessionBindingEvent;
import javax.servlet.http.HttpSessionBindingListener;
import org.apache.logging.log4j.Logger;
import org.olat.core.CoreSpringFactory;
import org.olat.core.commons.persistence.DBFactory;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.control.Event;
import org.olat.core.id.Identity;
import org.olat.core.id.IdentityEnvironment;
import org.olat.core.id.OLATResourceable;
import org.olat.core.id.Roles;
import org.olat.core.id.context.BusinessControl;
import org.olat.core.id.context.ContextEntry;
import org.olat.core.id.context.HistoryPoint;
import org.olat.core.id.context.HistoryPointImpl;
import org.olat.core.logging.Tracing;
import org.olat.core.logging.activity.ThreadLocalUserActivityLoggerInstaller;
import org.olat.core.util.coordinate.CoordinatorManager;
import org.olat.core.util.event.EventBus;
import org.olat.core.util.event.GenericEventListener;
import org.olat.core.util.prefs.Preferences;
import org.olat.core.util.prefs.PreferencesFactory;
import org.olat.core.util.resource.OresHelper;
import org.olat.core.util.resource.WindowedResourceableList;
import org.olat.core.util.session.UserSessionManager;
import org.olat.course.assessment.model.TransientAssessmentMode;
/**
* Description: <BR/>the httpsession contains an instance of this class. the
* UserSession is either authenticated or not; and if it is, then it also
* contains things like the Identity, the locale etc. of the current user. <P/>
*
* @author <NAME>
*/
public class UserSession implements HttpSessionBindingListener, GenericEventListener, Serializable {
private static final Logger log = Tracing.createLoggerFor(UserSession.class);
private static final long serialVersionUID = 1975177605776990868L;
// the environment (identity, locale, ..) of the identity
private IdentityEnvironment identityEnvironment;
private SessionInfo sessionInfo;
private OLATResourceable lockResource;
private TransientAssessmentMode lockMode;
private List<TransientAssessmentMode> assessmentModes;
private final List<OLATResourceable> secondaryLockResources = new ArrayList<>();
private transient WindowedResourceableList resourceList = new WindowedResourceableList();
private transient Map<String,Object> store;
/**
* things to put into that should not be clear when signing on (e.g. remember url for a direct jump)
*/
private transient Map<String,Object> nonClearedStore = new HashMap<>();
private transient Object lockStores = new Object();
private boolean authenticated = false;
private boolean savedSession = false;
private transient Preferences guiPreferences;
private transient EventBus singleUserSystemBus;
private List<String> chats;
private final Stack<HistoryPoint> history = new Stack<>();
private String csrfToken;
public UserSession() {
init();
csrfToken = UUID.randomUUID().toString();
}
public UserSession(String csrfToken) {
init();
this.csrfToken = csrfToken;
}
public void init() {
store = new HashMap<>(4);
identityEnvironment = new IdentityEnvironment();
singleUserSystemBus = CoordinatorManager.getInstance().getCoordinator().createSingleUserInstance();
authenticated = false;
sessionInfo = null;
}
protected Object readResolve() {
store = new HashMap<>(4);
nonClearedStore = new HashMap<>();
lockStores = new Object();
resourceList = new WindowedResourceableList();
singleUserSystemBus = CoordinatorManager.getInstance().getCoordinator().createSingleUserInstance();
savedSession = true;
authenticated = false;//reset authentication
return this;
}
/**
* @return true if is authenticated
*/
public boolean isAuthenticated() {
return authenticated;
}
public void setAuthenticated(boolean authenticated) {
this.authenticated = authenticated;
}
public boolean isSavedSession() {
return savedSession;
}
public void setSavedSession(boolean savedSession) {
this.savedSession = savedSession;
}
public String getCsrfToken() {
return csrfToken;
}
public List<Object> getStoreValues() {
List<Object> values;
synchronized(lockStores) {
values = new ArrayList<>(store.values());
}
return values;
}
/**
* @param key
* @param o
*/
public void putEntry(String key, Object o) {
synchronized(lockStores) {
store.put(key, o);
}
}
public Object putEntryIfAbsent(String key, Object o) {
synchronized(lockStores) {
if (!store.containsKey(key)) {
return store.put(key, o);
} else {
return store.get(key);
}
}
}
/**
* @param key
* @return entry
*/
public Object getEntry(String key) {
if (key == null) {
return null;
}
synchronized(lockStores) {
if (store.get(key) != null) {
return store.get(key);
}
if (nonClearedStore.get(key) != null) {
return nonClearedStore.get(key);
}
}
return null;
}
/**
* @param key
* @return removed entry
*/
public Object removeEntry(String key) {
synchronized(lockStores) {
return store.remove(key);
}
}
/**
* put an entry in the usersession that even survives login/logouts from the
* users. needed e.g. for a direct jump url, when the url is remembered in the
* dmz, but used in auth. since a login occurs, all data from the previous
* user will be cleared, that is why we introduced this store.
*
* @param key
* @param o
*/
public void putEntryInNonClearedStore(String key, Object o) {
synchronized(lockStores) {
nonClearedStore.put(key, o);
}
}
/**
* @param key
* @return removed entry
*/
public Object removeEntryFromNonClearedStore(String key) {
synchronized(lockStores) {
return nonClearedStore.remove(key);
}
}
public List<String> getChats() {
if(chats == null) {
synchronized(lockStores) {
if(chats == null) {
chats = new ArrayList<>(5);
}
}
}
return chats;
}
/**
* @return Locale
*/
public Locale getLocale() {
return identityEnvironment.getLocale();
}
/**
* @return Identity
*/
public Identity getIdentity() {
return identityEnvironment.getIdentity();
}
/**
* Sets the locale.
*
* @param locale The locale to set
*/
public void setLocale(Locale locale) {
identityEnvironment.setLocale(locale);
}
/**
* Sets the identity.
*
* @param identity The identity to set
*/
public void setIdentity(Identity identity) {
identityEnvironment.setIdentity(identity);
//event on GUI Preferences external changes
if(identity.getKey() != null) {
OLATResourceable ores = OresHelper.createOLATResourceableInstance(Preferences.class, identity.getKey());
CoordinatorManager.getInstance().getCoordinator().getEventBus().deregisterFor(this, ores);
CoordinatorManager.getInstance().getCoordinator().getEventBus().registerFor(this, null, ores);
}
}
/**
* @return Roles
*/
public Roles getRoles() {
Roles result = identityEnvironment.getRoles();
if (result == null) {
log.warn("getRoles: null, this="+this, new RuntimeException("getRoles"));
}
return result;
}
/**
* Sets the roles.
*
* @param roles The roles to set
*/
public void setRoles(Roles roles) {
identityEnvironment.setRoles(roles);
}
/**
* @return identity environment
*/
public IdentityEnvironment getIdentityEnvironment() {
return identityEnvironment;
}
public boolean isInAssessmentModeProcess() {
return lockResource != null || lockMode != null
|| (assessmentModes != null && assessmentModes.size() > 0);
}
public OLATResourceable getLockResource() {
return lockResource;
}
public TransientAssessmentMode getLockMode() {
return lockMode;
}
public void setLockResource(OLATResourceable lockResource, TransientAssessmentMode mode) {
this.lockMode = mode;
this.lockResource = lockResource;
}
public void unlockResource() {
lockMode = null;
lockResource = null;
secondaryLockResources.clear();
}
/**
* @param ores The OLAT resource to compare
* @return true if the specified resource matches the primary or one
* of the secondary resources.
*/
public boolean matchLockResource(OLATResourceable ores) {
return matchPrimaryLockResource(ores) || matchSecondaryResource(ores);
}
/**
*
* @param ores The OLAT resource to compare
* @return true if a locked resource is present and match the specified one
*/
public boolean matchPrimaryLockResource(OLATResourceable ores) {
return (lockResource != null && OresHelper.equals(lockResource, ores));
}
/**
* @param ores The OLAT resource to compare
* @return true if the specified resource matches one of the secondary resources
*/
public boolean matchSecondaryResource(OLATResourceable ores) {
for(OLATResourceable secondaryLockResource:secondaryLockResources) {
if(OresHelper.equals(secondaryLockResource, ores)) {
return true;
}
}
return false;
}
/**
* Can add a secondary resources. The primary is the course,
* secondary ones are document edited within the course.
*
* @param ores Additional resources
*/
public void addSecondaryLockResource(OLATResourceable ores) {
if(ores != null) {
secondaryLockResources.add(OresHelper.clone(ores));
}
}
public List<TransientAssessmentMode> getAssessmentModes() {
return assessmentModes;
}
public void setAssessmentModes(List<TransientAssessmentMode> assessmentModes) {
this.assessmentModes = assessmentModes;
}
/**
* may be null
* <p>
* @return session info object
*/
public SessionInfo getSessionInfo() {
return sessionInfo;
}
/**
* @param sessionInfo
*/
public void setSessionInfo(SessionInfo sessionInfo) {
this.sessionInfo = sessionInfo;
}
/**
* @return Returns the guiPreferences.
*/
public Preferences getGuiPreferences() {
return guiPreferences;
}
public void reloadPreferences() {
Identity identity = identityEnvironment.getIdentity();
guiPreferences = PreferencesFactory.getInstance().getPreferencesFor(identity, identityEnvironment.getRoles().isGuestOnly());
}
/**
* This is the olatsystembus to broadcast event amongst controllers of a single user only
* (the one whom this usersession belongs to)
*
* @return the olatsystembus for the local user
*/
public EventBus getSingleUserEventCenter() {
return singleUserSystemBus;
}
public List<HistoryPoint> getHistoryStack() {
return new ArrayList<>(history);
}
public HistoryPoint getLastHistoryPoint() {
if(history.isEmpty()) {
return null;
}
return history.lastElement();
}
public HistoryPoint getLastHistoryPoint(Predicate<HistoryPoint> accept) {
if(history.isEmpty()) {
return null;
}
for(int i=history.size(); i-->0; ) {
HistoryPoint point = history.get(i);
if(accept.test(point)) {
return point;
}
}
return null;
}
public HistoryPoint getHistoryPoint(String id) {
if(history.isEmpty()) {
return null;
}
for(HistoryPoint point:history) {
if(id.equals(point.getUuid())) {
return point;
}
}
return null;
}
public HistoryPoint popLastHistoryEntry() {
if(history.isEmpty()) return null;
history.pop();//current point
if(history.isEmpty()) return null;
return history.pop();//remove last point from history
}
public void addToHistory(UserRequest ureq, HistoryPoint point) {
if(point == null) return;
history.add(new HistoryPointImpl(ureq.getUuid(), point.getBusinessPath(), point.getEntries()));
}
public void addToHistory(UserRequest ureq, BusinessControl businessControl) {
List<ContextEntry> entries = businessControl.getEntries();
String businessPath = businessControl.getAsString();
if(StringHelper.containsNonWhitespace(businessPath)) {
String uuid = ureq.getUuid();
if(!history.isEmpty()) {
//consolidate
synchronized(history) {
for(Iterator<HistoryPoint> it=history.iterator(); it.hasNext(); ) {
HistoryPoint p = it.next();
if(uuid.equals(p.getUuid())) {
it.remove();
}
}
}
}
history.push(new HistoryPointImpl(ureq.getUuid(), businessPath, entries));
if(history.size() > 20) {
history.remove(0);
}
}
}
public void removeFromHistory(BusinessControl businessControl) {
String businessPath = businessControl.getAsString();
synchronized(history) {
for(Iterator<HistoryPoint> it=history.iterator(); it.hasNext(); ) {
String path = it.next().getBusinessPath();
if(path.startsWith(businessPath)) {
it.remove();
}
}
}
}
public WindowedResourceableList getResourceList() {
return resourceList;
}
@Override
public void valueBound(HttpSessionBindingEvent be) {
if (log.isDebugEnabled()) {
log.debug("Opened UserSession: {}", this);
}
}
/**
* called when the session is invalidated either by app. server timeout or manual session.invalidate (logout)
*
* @see javax.servlet.http.HttpSessionBindingListener#valueUnbound(javax.servlet.http.HttpSessionBindingEvent)
*/
@Override
public void valueUnbound(HttpSessionBindingEvent be) {
try {
// the identity can be null if an loginscreen only session gets invalidated
// (no user was authenticated yet but a tomcat session was created)
Identity ident = identityEnvironment.getIdentity();
CoreSpringFactory.getImpl(UserSessionManager.class).signOffAndClear(this);
log.debug("Closed UserSession: identity = {}", (ident == null ? "n/a" : ident.getKey()));
//we do not have a request in the null case (app. server triggered) and user not yet logged in
//-> in this case we use the special empty activity logger
if (ident == null) {
ThreadLocalUserActivityLoggerInstaller.initEmptyUserActivityLogger();
}
} catch (Exception e) {
log.error("exception while session was unbound!", e);
}
// called by tomcat's timer thread -> we need to close!! since the next unbound will be called from the same tomcat-thread
finally {
//o_clusterNOK: put into managed transaction wrapper
DBFactory.getInstance().commitAndCloseSession();
}
}
/**
* only for preference changed event
*/
@Override
public void event(Event event) {
if("preferences.changed".equals(event.getCommand())) {
reloadPreferences();
}
}
@Override
public String toString() {
return "Session of " + identityEnvironment + ", " + super.toString();
}
} |
#!/bin/bash
# Copyright 2018 WSO2 Inc. (http://wso2.org)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ----------------------------------------------------------------------------
# Create a summary report from JMeter results
# ----------------------------------------------------------------------------
gcviewer_path=$1
#true or false argument
include_all=$2
if [[ ! -f $gcviewer_path ]]; then
echo "Please specify the path to GCViewer JAR file. Example: $0 gcviewer_jar_file include_all->(true/false)"
exit 1
fi
get_gc_headers() {
echo -ne ",$1 GC Throughput (%),$1 Footprint (M),$1 Average of Footprint After Full GC (M)"
echo -ne ",$1 Standard Deviation of Footprint After Full GC (M)"
}
get_loadavg_headers() {
echo -ne ",$1 Load Average - Last 1 minute,$1 Load Average - Last 5 minutes,$1 Load Average - Last 15 minutes"
}
filename="summary.csv"
if [[ ! -f $filename ]]; then
# Create File and save headers
echo -n "Heap Size","Ballerina File","Observability","Concurrent Users","Message Size (Bytes)","Sleep Time (ms)", > $filename
echo -n "# Samples","Error Count","Error %","Average (ms)","Min (ms)","Max (ms)", >> $filename
echo -n "90th Percentile (ms)","95th Percentile (ms)","99th Percentile (ms)","Throughput", >> $filename
echo -n "Received (KB/sec)","Sent (KB/sec)" >> $filename
echo -n $(get_gc_headers "Ballerina") >> $filename
if [ "$include_all" = true ] ; then
echo -n $(get_gc_headers "Netty Service") >> $filename
echo -n $(get_gc_headers "JMeter Client") >> $filename
echo -n $(get_gc_headers "JMeter Server 01") >> $filename
echo -n $(get_gc_headers "JMeter Server 02") >> $filename
fi
echo -n $(get_loadavg_headers "Ballerina") >> $filename
if [ "$include_all" = true ] ; then
echo -n $(get_loadavg_headers "Netty Service") >> $filename
echo -n $(get_loadavg_headers "JMeter Client") >> $filename
echo -n $(get_loadavg_headers "JMeter Server 01") >> $filename
echo -n $(get_loadavg_headers "JMeter Server 02") >> $filename
fi
echo -ne "\r\n" >> $filename
else
echo "$filename already exists"
exit 1
fi
write_column() {
statisticsTableData=$1
index=$2
echo -n "," >> $filename
echo -n "$(echo $statisticsTableData | jq -r ".overall | .data[$index]")" >> $filename
}
get_value_from_gc_summary() {
echo $(grep -m 1 $2\; $1 | sed -r 's/.*\;(.*)\;.*/\1/' | sed 's/,//g')
}
write_gc_summary_details() {
#Verify whether sleep time directory is available
if [ ! -z "$(find $message_size_dir -maxdepth 1 -name '*ms_sleep' )" ]; then
gc_log_file=$sleep_time_dir/$1_gc.log
else
gc_log_file=$message_size_dir/$1_gc.log
fi
gc_summary_file=/tmp/gc.txt
echo "Reading $gc_log_file"
java -Xms128m -Xmx128m -jar $gcviewer_path $gc_log_file $gc_summary_file -t SUMMARY &> /dev/null
echo -n ",$(get_value_from_gc_summary $gc_summary_file throughput)" >> $filename
echo -n ",$(get_value_from_gc_summary $gc_summary_file footprint)" >> $filename
echo -n ",$(get_value_from_gc_summary $gc_summary_file avgfootprintAfterFullGC)" >> $filename
echo -n ",$(get_value_from_gc_summary $gc_summary_file avgfootprintAfterFullGCσ)" >> $filename
}
write_loadavg_details() {
#Verify whether sleep time directory is available
if [ ! -z "$(find $message_size_dir -maxdepth 1 -name '*ms_sleep' )" ]; then
loadavg_file=$sleep_time_dir/$1_loadavg.txt
else
loadavg_file=$message_size_dir/$1_loadavg.txt
fi
if [[ -f $loadavg_file ]]; then
echo "Reading $loadavg_file"
loadavg_values=$(tail -2 $loadavg_file | head -1)
loadavg_array=($loadavg_values)
echo -n ",${loadavg_array[3]}" >> $filename
echo -n ",${loadavg_array[4]}" >> $filename
echo -n ",${loadavg_array[5]}" >> $filename
else
echo -n ",N/A,N/A,N/A" >> $filename
fi
}
for heap_size_dir in $(find . -maxdepth 1 -name '*_heap' | sort -V)
do
echo "Heap Size Dir " + $heap_size_dir
for bal_file_dir in $(find $heap_size_dir -maxdepth 1 -name '*_bal' | sort -V)
do
echo "bal_file_dir" + $bal_file_dir
for flags_dir in $(find $bal_file_dir -maxdepth 1 -name '*_flags' | sort -V)
do
echo "flags_dir" + $flags_dir
for user_dir in $(find $flags_dir -maxdepth 1 -name '*_users' | sort -V)
do
echo "user_dir" + $user_dir
for message_size_dir in $(find $user_dir -maxdepth 1 -name '*B' | sort -V)
do
echo "message_size_dir" + $message_size_dir
if [ ! -z "$(find $message_size_dir -maxdepth 1 -name '*ms_sleep' )" ]; then
for sleep_time_dir in $(find $message_size_dir -maxdepth 1 -name '*ms_sleep' | sort -V)
do
echo "sleep_time_dir" + $sleep_time_dir
dashboard_data_file=$sleep_time_dir/dashboard-measurement/content/js/dashboard.js
if [[ ! -f $dashboard_data_file ]]; then
echo "WARN: Dashboard data file not found: $dashboard_data_file"
continue
fi
statisticsTableData=$(grep '#statisticsTable' $dashboard_data_file | sed 's/^.*"#statisticsTable"), \({.*}\).*$/\1/')
echo "Getting data from $dashboard_data_file"
heap_size=$(echo $heap_size_dir | sed -r 's/.\/([0-9]+[a-zA-Z])_heap.*/\1/')
bal_file=$(echo $bal_file_dir | sed -nE 's/.*\/([[:alnum:]_]+.bal)_bal.*/\1/p')
flags=$(echo $flags_dir | sed -nE 's/.*\/([[:alnum:]]+)_flags.*/\1/p')
concurrent_users=$(echo $user_dir | sed -r 's/.*\/([0-9]+)_users.*/\1/')
message_size=$(echo $message_size_dir | sed -r 's/.*\/([0-9]+)B.*/\1/')
sleep_time=$(echo $sleep_time_dir | sed -r 's/.*\/([0-9]+)ms_sleep.*/\1/')
echo -n "$heap_size,$bal_file,$flags,$concurrent_users,$message_size,$sleep_time" >> $filename
write_column "$statisticsTableData" 1
write_column "$statisticsTableData" 2
write_column "$statisticsTableData" 3
write_column "$statisticsTableData" 4
write_column "$statisticsTableData" 5
write_column "$statisticsTableData" 6
write_column "$statisticsTableData" 7
write_column "$statisticsTableData" 8
write_column "$statisticsTableData" 9
write_column "$statisticsTableData" 10
write_column "$statisticsTableData" 11
write_column "$statisticsTableData" 12
write_gc_summary_details ballerina
if [ "$include_all" = true ] ; then
write_gc_summary_details netty
write_gc_summary_details jmeter
write_gc_summary_details jmeter1
write_gc_summary_details jmeter2
fi
write_loadavg_details ballerina
if [ "$include_all" = true ] ; then
write_loadavg_details netty
write_loadavg_details jmeter
write_loadavg_details jmeter1
write_loadavg_details jmeter2
fi
echo -ne "\r\n" >> $filename
done
else
dashboard_data_file=$message_size_dir/dashboard-measurement/content/js/dashboard.js
if [[ ! -f $dashboard_data_file ]]; then
echo "WARN: Dashboard data file not found: $dashboard_data_file"
continue
fi
statisticsTableData=$(grep '#statisticsTable' $dashboard_data_file | sed 's/^.*"#statisticsTable"), \({.*}\).*$/\1/')
echo "Getting data from $dashboard_data_file"
heap_size=$(echo $heap_size_dir | sed -r 's/.\/([0-9]+[a-zA-Z])_heap.*/\1/')
bal_file=$(echo $bal_file_dir | sed -nE 's/.*\/([[:alnum:]_]+.bal)_bal.*/\1/p')
flags=$(echo $flags_dir | sed -nE 's/.*\/([[:alnum:]]+)_flags.*/\1/p')
concurrent_users=$(echo $user_dir | sed -r 's/.*\/([0-9]+)_users.*/\1/')
message_size=$(echo $message_size_dir | sed -r 's/.*\/([0-9]+)B.*/\1/')
sleep_time=$(echo 'N/A')
echo -n "$heap_size,$bal_file,$flags,$concurrent_users,$message_size,$sleep_time" >> $filename
write_column "$statisticsTableData" 1
write_column "$statisticsTableData" 2
write_column "$statisticsTableData" 3
write_column "$statisticsTableData" 4
write_column "$statisticsTableData" 5
write_column "$statisticsTableData" 6
write_column "$statisticsTableData" 7
write_column "$statisticsTableData" 8
write_column "$statisticsTableData" 9
write_column "$statisticsTableData" 10
write_column "$statisticsTableData" 11
write_column "$statisticsTableData" 12
write_gc_summary_details ballerina
if [ "$include_all" = true ] ; then
write_gc_summary_details netty
write_gc_summary_details jmeter
write_gc_summary_details jmeter1
write_gc_summary_details jmeter2
fi
write_loadavg_details ballerina
if [ "$include_all" = true ] ; then
write_loadavg_details netty
write_loadavg_details jmeter
write_loadavg_details jmeter1
write_loadavg_details jmeter2
fi
echo -ne "\r\n" >> $filename
fi
done
done
done
done
done
echo "Completed. Open $filename."
|
import axios, { AxiosRequestConfig } from 'axios';
const axiosConfig: AxiosRequestConfig = {
baseURL: process.env.REACT_APP_API_URL,
headers: {
'Content-Type': 'application/json',
},
};
const api = axios.create(axiosConfig);
export default api; |
#include <iostream>
#include <string>
#include <unordered_map>
#include <json/json.h> // Assuming the use of a JSON library
class SystemStatus {
private:
bool m_setupFailed;
int m_shuttingDown;
bool m_startFailed;
bool m_setupFailedHasBeenSet;
bool m_shuttingDownHasBeenSet;
bool m_startFailedHasBeenSet;
public:
SystemStatus() : m_setupFailed(false), m_shuttingDown(0), m_startFailed(false),
m_setupFailedHasBeenSet(false), m_shuttingDownHasBeenSet(false), m_startFailedHasBeenSet(false) {}
void ProcessStatusJson(const Json::Value& jsonValue) {
if (jsonValue.isMember("SetupFailed")) {
m_setupFailed = jsonValue["SetupFailed"].asBool();
m_setupFailedHasBeenSet = true;
}
if (jsonValue.isMember("ShuttingDown")) {
m_shuttingDown = jsonValue["ShuttingDown"].asInt();
m_shuttingDownHasBeenSet = true;
}
if (jsonValue.isMember("StartFailed")) {
m_startFailed = jsonValue["StartFailed"].asBool();
m_startFailedHasBeenSet = true;
}
}
bool GetSetupFailed() const {
if (m_setupFailedHasBeenSet) {
return m_setupFailed;
}
return false; // Default value if not set
}
int GetShuttingDown() const {
if (m_shuttingDownHasBeenSet) {
return m_shuttingDown;
}
return 0; // Default value if not set
}
bool GetStartFailed() const {
if (m_startFailedHasBeenSet) {
return m_startFailed;
}
return false; // Default value if not set
}
};
int main() {
// Example usage
std::string jsonStr = R"({"SetupFailed": true, "ShuttingDown": 1, "StartFailed": false})";
Json::Value jsonValue;
Json::Reader reader;
bool parsingSuccessful = reader.parse(jsonStr, jsonValue);
if (parsingSuccessful) {
SystemStatus status;
status.ProcessStatusJson(jsonValue);
std::cout << "SetupFailed: " << status.GetSetupFailed() << std::endl;
std::cout << "ShuttingDown: " << status.GetShuttingDown() << std::endl;
std::cout << "StartFailed: " << status.GetStartFailed() << std::endl;
} else {
std::cerr << "Failed to parse JSON" << std::endl;
}
return 0;
} |
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.gui.util;
import org.fxmisc.richtext.model.StyleSpans;
import org.fxmisc.richtext.model.StyleSpansBuilder;
import java.util.Collection;
import java.util.Collections;
public class EasyStyleSpansBuilder {
private final StyleSpansBuilder<Collection<String>> builder;
private int to;
public EasyStyleSpansBuilder() {
builder = new StyleSpansBuilder<>();
to = 0;
}
public void add(int index, String element, Collection<String> styles) {
if (to < index) {
builder.add(Collections.emptyList(), index - to);
}
builder.add(styles, element.length());
to = index + element.length();
}
public boolean isEmpty() {
return to == 0;
}
public StyleSpans<Collection<String>> create() {
return builder.create();
}
}
|
#include "Config.h"
#include "OldAccountChange.h"
#include "cache/UserCache.h"
#include "crypto/BSHA1Hash.h"
#include "packets/bnet/BNetOldAccountChange.h"
#include "utils/DataConv.h"
namespace Plugins
{
namespace BNet
{
bool OldAccountChange::Process( Network::TcpClient& cl, Utils::Stream& in )
{
byte token[8];
byte pwd[20], newpwd[20];
std::string username;
in.read(token, 8);
in.read(pwd, 20);
in.read(newpwd, 20);
in >> username;
if(username.length() > 15)
username.resize(15);
Packets::BNet::BNetOldAccountChange packet;
Cache::UserCacheItem::Pointer user = Cache::userCache[username.c_str()];
if(user.get() != NULL && user->IsExist())
{
byte cres[20];
Crypto::BSHA1Context bsha1;
BSHA1Reset(&bsha1);
BSHA1Input(&bsha1, token, 8);
BSHA1Input(&bsha1, user->GetPasswd(), 20);
BSHA1Result(&bsha1, cres);
for(int i = 0; i < 5; ++ i)
{
((uint *)cres)[i] = Utils::Reverse(((uint *)cres)[i]);
}
if(memcmp(cres, pwd, 20) == 0)
{
packet.result = 1;
user->SetPasswd(newpwd);
}
else
packet.result = 0;
}
else
{
packet.result = 0;
}
packet.BuildAndSendTo(cl);
return true;
}
}
}
|
// shuffle array
function shuffle(arr) {
let currIndex = arr.length;
while (currIndex > 0) {
const randomIndex = Math.floor(Math.random() * currIndex);
currIndex--;
// swap elements
const temp = arr[currIndex];
arr[currIndex] = arr[randomIndex];
arr[randomIndex] = temp;
}
return arr;
}
// encrypt string
function encrypt(str) {
const originalChars = str.split('');
const shuffledChars = shuffle(originalChars);
return shuffledChars.join('');
}
const secret = encrypt('mySecretMessage');
console.log(secret);
// output
esmsycMretaec |
<reponame>Infinite-graph/vulcan-next
import { parseEnvVariableArray } from "~/lib/utils";
import debug from "debug";
const debugCors = debug("vns:cors");
const corsWhitelist = parseEnvVariableArray(
process.env.APOLLO_SERVER_CORS_WHITELIST
);
/**
* Accept same origin queries, and
*/
const corsOptions = {
origin: function (origin, callback) {
debugCors("Origin is", origin, "Allowed origins are ", corsWhitelist);
if (!origin) {
// same origin
callback(null, true);
} else if (corsWhitelist.indexOf(origin) !== -1) {
callback(null, true);
} else {
callback(new Error(`Not allowed by CORS ${origin}`));
}
},
};
export default corsOptions;
|
<filename>polymer-globe/polymer/polymer-all/more-elements/js-beautify/js-beautify/beautify-html.js<gh_stars>1000+
/*jshint curly:true, eqeqeq:true, laxbreak:true, noempty:false */
/*
The MIT License (MIT)
Copyright (c) 2007-2013 <NAME> and contributors.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Style HTML
---------------
Written by <NAME>, (<EMAIL>)
Based on code initially developed by: <NAME>, <<EMAIL>>
http://jsbeautifier.org/
Usage:
style_html(html_source);
style_html(html_source, options);
The options are:
indent_size (default 4) — indentation size,
indent_char (default space) — character to indent with,
max_char (default 250) - maximum amount of characters per line (0 = disable)
brace_style (default "collapse") - "collapse" | "expand" | "end-expand"
put braces on the same line as control statements (default), or put braces on own line (Allman / ANSI style), or just put end braces on own line.
unformatted (defaults to inline tags) - list of tags, that shouldn't be reformatted
indent_scripts (default normal) - "keep"|"separate"|"normal"
e.g.
style_html(html_source, {
'indent_size': 2,
'indent_char': ' ',
'max_char': 78,
'brace_style': 'expand',
'unformatted': ['a', 'sub', 'sup', 'b', 'i', 'u']
});
*/
(function() {
function style_html(html_source, options, js_beautify, css_beautify) {
//Wrapper function to invoke all the necessary constructors and deal with the output.
var multi_parser,
indent_size,
indent_character,
max_char,
brace_style,
unformatted;
options = options || {};
indent_size = options.indent_size || 4;
indent_character = options.indent_char || ' ';
brace_style = options.brace_style || 'collapse';
max_char = options.max_char === 0 ? Infinity : options.max_char || 250;
unformatted = options.unformatted || ['a', 'span', 'bdo', 'em', 'strong', 'dfn', 'code', 'samp', 'kbd', 'var', 'cite', 'abbr', 'acronym', 'q', 'sub', 'sup', 'tt', 'i', 'b', 'big', 'small', 'u', 's', 'strike', 'font', 'ins', 'del', 'pre', 'address', 'dt', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6'];
function Parser() {
this.pos = 0; //Parser position
this.token = '';
this.current_mode = 'CONTENT'; //reflects the current Parser mode: TAG/CONTENT
this.tags = { //An object to hold tags, their position, and their parent-tags, initiated with default values
parent: 'parent1',
parentcount: 1,
parent1: ''
};
this.tag_type = '';
this.token_text = this.last_token = this.last_text = this.token_type = '';
this.Utils = { //Uilities made available to the various functions
whitespace: "\n\r\t ".split(''),
single_token: 'br,input,link,meta,!doctype,basefont,base,area,hr,wbr,param,img,isindex,?xml,embed,?php,?,?='.split(','), //all the single tags for HTML
extra_liners: 'head,body,/html'.split(','), //for tags that need a line of whitespace before them
in_array: function (what, arr) {
for (var i=0; i<arr.length; i++) {
if (what === arr[i]) {
return true;
}
}
return false;
}
};
this.get_content = function () { //function to capture regular content between tags
var input_char = '',
content = [],
space = false; //if a space is needed
while (this.input.charAt(this.pos) !== '<') {
if (this.pos >= this.input.length) {
return content.length?content.join(''):['', 'TK_EOF'];
}
input_char = this.input.charAt(this.pos);
this.pos++;
this.line_char_count++;
if (this.Utils.in_array(input_char, this.Utils.whitespace)) {
if (content.length) {
space = true;
}
this.line_char_count--;
continue; //don't want to insert unnecessary space
}
else if (space) {
if (this.line_char_count >= this.max_char) { //insert a line when the max_char is reached
content.push('\n');
for (var i=0; i<this.indent_level; i++) {
content.push(this.indent_string);
}
this.line_char_count = 0;
}
else{
content.push(' ');
this.line_char_count++;
}
space = false;
}
content.push(input_char); //letter at-a-time (or string) inserted to an array
}
return content.length?content.join(''):'';
};
this.get_contents_to = function (name) { //get the full content of a script or style to pass to js_beautify
if (this.pos === this.input.length) {
return ['', 'TK_EOF'];
}
var input_char = '';
var content = '';
var reg_match = new RegExp('</' + name + '\\s*>', 'igm');
reg_match.lastIndex = this.pos;
var reg_array = reg_match.exec(this.input);
var end_script = reg_array?reg_array.index:this.input.length; //absolute end of script
if(this.pos < end_script) { //get everything in between the script tags
content = this.input.substring(this.pos, end_script);
this.pos = end_script;
}
return content;
};
this.record_tag = function (tag){ //function to record a tag and its parent in this.tags Object
if (this.tags[tag + 'count']) { //check for the existence of this tag type
this.tags[tag + 'count']++;
this.tags[tag + this.tags[tag + 'count']] = this.indent_level; //and record the present indent level
}
else { //otherwise initialize this tag type
this.tags[tag + 'count'] = 1;
this.tags[tag + this.tags[tag + 'count']] = this.indent_level; //and record the present indent level
}
this.tags[tag + this.tags[tag + 'count'] + 'parent'] = this.tags.parent; //set the parent (i.e. in the case of a div this.tags.div1parent)
this.tags.parent = tag + this.tags[tag + 'count']; //and make this the current parent (i.e. in the case of a div 'div1')
};
this.retrieve_tag = function (tag) { //function to retrieve the opening tag to the corresponding closer
if (this.tags[tag + 'count']) { //if the openener is not in the Object we ignore it
var temp_parent = this.tags.parent; //check to see if it's a closable tag.
while (temp_parent) { //till we reach '' (the initial value);
if (tag + this.tags[tag + 'count'] === temp_parent) { //if this is it use it
break;
}
temp_parent = this.tags[temp_parent + 'parent']; //otherwise keep on climbing up the DOM Tree
}
if (temp_parent) { //if we caught something
this.indent_level = this.tags[tag + this.tags[tag + 'count']]; //set the indent_level accordingly
this.tags.parent = this.tags[temp_parent + 'parent']; //and set the current parent
}
delete this.tags[tag + this.tags[tag + 'count'] + 'parent']; //delete the closed tags parent reference...
delete this.tags[tag + this.tags[tag + 'count']]; //...and the tag itself
if (this.tags[tag + 'count'] === 1) {
delete this.tags[tag + 'count'];
}
else {
this.tags[tag + 'count']--;
}
}
};
this.get_tag = function (peek) { //function to get a full tag and parse its type
var input_char = '',
content = [],
comment = '',
space = false,
tag_start, tag_end,
orig_pos = this.pos,
orig_line_char_count = this.line_char_count;
peek = peek !== undefined ? peek : false;
do {
if (this.pos >= this.input.length) {
if (peek) {
this.pos = orig_pos;
this.line_char_count = orig_line_char_count;
}
return content.length?content.join(''):['', 'TK_EOF'];
}
input_char = this.input.charAt(this.pos);
this.pos++;
this.line_char_count++;
if (this.Utils.in_array(input_char, this.Utils.whitespace)) { //don't want to insert unnecessary space
space = true;
this.line_char_count--;
continue;
}
if (input_char === "'" || input_char === '"') {
if (!content[1] || content[1] !== '!') { //if we're in a comment strings don't get treated specially
input_char += this.get_unformatted(input_char);
space = true;
}
}
if (input_char === '=') { //no space before =
space = false;
}
if (content.length && content[content.length-1] !== '=' && input_char !== '>' && space) {
//no space after = or before >
if (this.line_char_count >= this.max_char) {
this.print_newline(false, content);
this.line_char_count = 0;
}
else {
content.push(' ');
this.line_char_count++;
}
space = false;
}
if (input_char === '<') {
tag_start = this.pos - 1;
}
content.push(input_char); //inserts character at-a-time (or string)
} while (input_char !== '>');
var tag_complete = content.join('');
var tag_index;
if (tag_complete.indexOf(' ') !== -1) { //if there's whitespace, thats where the tag name ends
tag_index = tag_complete.indexOf(' ');
}
else { //otherwise go with the tag ending
tag_index = tag_complete.indexOf('>');
}
var tag_check = tag_complete.substring(1, tag_index).toLowerCase();
if (tag_complete.charAt(tag_complete.length-2) === '/' ||
this.Utils.in_array(tag_check, this.Utils.single_token)) { //if this tag name is a single tag type (either in the list or has a closing /)
if ( ! peek) {
this.tag_type = 'SINGLE';
}
}
else if (tag_check === 'script') { //for later script handling
if ( ! peek) {
this.record_tag(tag_check);
this.tag_type = 'SCRIPT';
}
}
else if (tag_check === 'style') { //for future style handling (for now it justs uses get_content)
if ( ! peek) {
this.record_tag(tag_check);
this.tag_type = 'STYLE';
}
}
else if (this.is_unformatted(tag_check, unformatted)) { // do not reformat the "unformatted" tags
comment = this.get_unformatted('</'+tag_check+'>', tag_complete); //...delegate to get_unformatted function
content.push(comment);
// Preserve collapsed whitespace either before or after this tag.
if (tag_start > 0 && this.Utils.in_array(this.input.charAt(tag_start - 1), this.Utils.whitespace)){
content.splice(0, 0, this.input.charAt(tag_start - 1));
}
tag_end = this.pos - 1;
if (this.Utils.in_array(this.input.charAt(tag_end + 1), this.Utils.whitespace)){
content.push(this.input.charAt(tag_end + 1));
}
this.tag_type = 'SINGLE';
}
else if (tag_check.charAt(0) === '!') { //peek for <!-- comment
if (tag_check.indexOf('[if') !== -1) { //peek for <!--[if conditional comment
if (tag_complete.indexOf('!IE') !== -1) { //this type needs a closing --> so...
comment = this.get_unformatted('-->', tag_complete); //...delegate to get_unformatted
content.push(comment);
}
if ( ! peek) {
this.tag_type = 'START';
}
}
else if (tag_check.indexOf('[endif') !== -1) {//peek for <!--[endif end conditional comment
this.tag_type = 'END';
this.unindent();
}
else if (tag_check.indexOf('[cdata[') !== -1) { //if it's a <[cdata[ comment...
comment = this.get_unformatted(']]>', tag_complete); //...delegate to get_unformatted function
content.push(comment);
if ( ! peek) {
this.tag_type = 'SINGLE'; //<![CDATA[ comments are treated like single tags
}
}
else {
comment = this.get_unformatted('-->', tag_complete);
content.push(comment);
this.tag_type = 'SINGLE';
}
}
else if ( ! peek) {
if (tag_check.charAt(0) === '/') { //this tag is a double tag so check for tag-ending
this.retrieve_tag(tag_check.substring(1)); //remove it and all ancestors
this.tag_type = 'END';
}
else { //otherwise it's a start-tag
this.record_tag(tag_check); //push it on the tag stack
this.tag_type = 'START';
}
if (this.Utils.in_array(tag_check, this.Utils.extra_liners)) { //check if this double needs an extra line
this.print_newline(true, this.output);
}
}
if (peek) {
this.pos = orig_pos;
this.line_char_count = orig_line_char_count;
}
return content.join(''); //returns fully formatted tag
};
this.get_unformatted = function (delimiter, orig_tag) { //function to return unformatted content in its entirety
if (orig_tag && orig_tag.toLowerCase().indexOf(delimiter) !== -1) {
return '';
}
var input_char = '';
var content = '';
var space = true;
do {
if (this.pos >= this.input.length) {
return content;
}
input_char = this.input.charAt(this.pos);
this.pos++;
if (this.Utils.in_array(input_char, this.Utils.whitespace)) {
if (!space) {
this.line_char_count--;
continue;
}
if (input_char === '\n' || input_char === '\r') {
content += '\n';
/* Don't change tab indention for unformatted blocks. If using code for html editing, this will greatly affect <pre> tags if they are specified in the 'unformatted array'
for (var i=0; i<this.indent_level; i++) {
content += this.indent_string;
}
space = false; //...and make sure other indentation is erased
*/
this.line_char_count = 0;
continue;
}
}
content += input_char;
this.line_char_count++;
space = true;
} while (content.toLowerCase().indexOf(delimiter) === -1);
return content;
};
this.get_token = function () { //initial handler for token-retrieval
var token;
if (this.last_token === 'TK_TAG_SCRIPT' || this.last_token === 'TK_TAG_STYLE') { //check if we need to format javascript
var type = this.last_token.substr(7);
token = this.get_contents_to(type);
if (typeof token !== 'string') {
return token;
}
return [token, 'TK_' + type];
}
if (this.current_mode === 'CONTENT') {
token = this.get_content();
if (typeof token !== 'string') {
return token;
}
else {
return [token, 'TK_CONTENT'];
}
}
if (this.current_mode === 'TAG') {
token = this.get_tag();
if (typeof token !== 'string') {
return token;
}
else {
var tag_name_type = 'TK_TAG_' + this.tag_type;
return [token, tag_name_type];
}
}
};
this.get_full_indent = function (level) {
level = this.indent_level + level || 0;
if (level < 1) {
return '';
}
return Array(level + 1).join(this.indent_string);
};
this.is_unformatted = function(tag_check, unformatted) {
//is this an HTML5 block-level link?
if (!this.Utils.in_array(tag_check, unformatted)){
return false;
}
if (tag_check.toLowerCase() !== 'a' || !this.Utils.in_array('a', unformatted)){
return true;
}
//at this point we have an tag; is its first child something we want to remain
//unformatted?
var next_tag = this.get_tag(true /* peek. */);
// tets next_tag to see if it is just html tag (no external content)
var tag = (next_tag || "").match(/^\s*<\s*\/?([a-z]*)\s*[^>]*>\s*$/);
// if next_tag comes back but is not an isolated tag, then
// let's treat the 'a' tag as having content
// and respect the unformatted option
if (!tag || this.Utils.in_array(tag, unformatted)){
return true;
} else {
return false;
}
};
this.printer = function (js_source, indent_character, indent_size, max_char, brace_style) { //handles input/output and some other printing functions
this.input = js_source || ''; //gets the input for the Parser
this.output = [];
this.indent_character = indent_character;
this.indent_string = '';
this.indent_size = indent_size;
this.brace_style = brace_style;
this.indent_level = 0;
this.max_char = max_char;
this.line_char_count = 0; //count to see if max_char was exceeded
for (var i=0; i<this.indent_size; i++) {
this.indent_string += this.indent_character;
}
this.print_newline = function (ignore, arr) {
this.line_char_count = 0;
if (!arr || !arr.length) {
return;
}
if (!ignore) { //we might want the extra line
while (this.Utils.in_array(arr[arr.length-1], this.Utils.whitespace)) {
arr.pop();
}
}
arr.push('\n');
for (var i=0; i<this.indent_level; i++) {
arr.push(this.indent_string);
}
};
this.print_token = function (text) {
this.output.push(text);
};
this.indent = function () {
this.indent_level++;
};
this.unindent = function () {
if (this.indent_level > 0) {
this.indent_level--;
}
};
};
return this;
}
/*_____________________--------------------_____________________*/
multi_parser = new Parser(); //wrapping functions Parser
multi_parser.printer(html_source, indent_character, indent_size, max_char, brace_style); //initialize starting values
while (true) {
var t = multi_parser.get_token();
multi_parser.token_text = t[0];
multi_parser.token_type = t[1];
if (multi_parser.token_type === 'TK_EOF') {
break;
}
switch (multi_parser.token_type) {
case 'TK_TAG_START':
multi_parser.print_newline(false, multi_parser.output);
multi_parser.print_token(multi_parser.token_text);
multi_parser.indent();
multi_parser.current_mode = 'CONTENT';
break;
case 'TK_TAG_STYLE':
case 'TK_TAG_SCRIPT':
multi_parser.print_newline(false, multi_parser.output);
multi_parser.print_token(multi_parser.token_text);
multi_parser.current_mode = 'CONTENT';
break;
case 'TK_TAG_END':
//Print new line only if the tag has no content and has child
if (multi_parser.last_token === 'TK_CONTENT' && multi_parser.last_text === '') {
var tag_name = multi_parser.token_text.match(/\w+/)[0];
var tag_extracted_from_last_output = multi_parser.output[multi_parser.output.length -1].match(/<\s*(\w+)/);
if (tag_extracted_from_last_output === null || tag_extracted_from_last_output[1] !== tag_name) {
multi_parser.print_newline(true, multi_parser.output);
}
}
multi_parser.print_token(multi_parser.token_text);
multi_parser.current_mode = 'CONTENT';
break;
case 'TK_TAG_SINGLE':
// Don't add a newline before elements that should remain unformatted.
var tag_check = multi_parser.token_text.match(/^\s*<([a-z]+)/i);
if (!tag_check || !multi_parser.Utils.in_array(tag_check[1], unformatted)){
multi_parser.print_newline(false, multi_parser.output);
}
multi_parser.print_token(multi_parser.token_text);
multi_parser.current_mode = 'CONTENT';
break;
case 'TK_CONTENT':
if (multi_parser.token_text !== '') {
multi_parser.print_token(multi_parser.token_text);
}
multi_parser.current_mode = 'TAG';
break;
case 'TK_STYLE':
case 'TK_SCRIPT':
if (multi_parser.token_text !== '') {
multi_parser.output.push('\n');
var text = multi_parser.token_text,
_beautifier,
script_indent_level = 1;
if (multi_parser.token_type === 'TK_SCRIPT') {
_beautifier = typeof js_beautify === 'function' && js_beautify;
} else if (multi_parser.token_type === 'TK_STYLE') {
_beautifier = typeof css_beautify === 'function' && css_beautify;
}
if (options.indent_scripts === "keep") {
script_indent_level = 0;
} else if (options.indent_scripts === "separate") {
script_indent_level = -multi_parser.indent_level;
}
var indentation = multi_parser.get_full_indent(script_indent_level);
if (_beautifier) {
// call the Beautifier if avaliable
text = _beautifier(text.replace(/^\s*/, indentation), options);
} else {
// simply indent the string otherwise
var white = text.match(/^\s*/)[0];
var _level = white.match(/[^\n\r]*$/)[0].split(multi_parser.indent_string).length - 1;
var reindent = multi_parser.get_full_indent(script_indent_level -_level);
text = text.replace(/^\s*/, indentation)
.replace(/\r\n|\r|\n/g, '\n' + reindent)
.replace(/\s*$/, '');
}
if (text) {
multi_parser.print_token(text);
multi_parser.print_newline(true, multi_parser.output);
}
}
multi_parser.current_mode = 'TAG';
break;
}
multi_parser.last_token = multi_parser.token_type;
multi_parser.last_text = multi_parser.token_text;
}
return multi_parser.output.join('');
}
if (typeof define === "function") {
// Add support for require.js
define(function(require, exports, module) {
var js_beautify = require('./beautify.js').js_beautify;
var css_beautify = require('./beautify-css.js').css_beautify;
exports.html_beautify = function(html_source, options) {
return style_html(html_source, options, js_beautify, css_beautify);
};
});
} else if (typeof exports !== "undefined") {
// Add support for CommonJS. Just put this file somewhere on your require.paths
// and you will be able to `var html_beautify = require("beautify").html_beautify`.
var js_beautify = require('./beautify.js').js_beautify;
var css_beautify = require('./beautify-css.js').css_beautify;
exports.html_beautify = function(html_source, options) {
return style_html(html_source, options, js_beautify, css_beautify);
};
} else if (typeof window !== "undefined") {
// If we're running a web page and don't have either of the above, add our one global
window.html_beautify = function(html_source, options) {
return style_html(html_source, options, window.js_beautify, window.css_beautify);
};;
}
}());
|
#!/usr/bin/env bash
set -e
# usage: set_env VAR [DEFAULT]
# ie: set_env 'XYZ_DB_PASSWORD' 'example'
set_env() {
local var="$1"
local def="${2:-}"
local val="$def"
if [ "${!var:-}" ]; then
val="${!var}"
fi
export "$var"="$val"
}
# Loads various settings that are used elsewhere in the script
# This should be called before any other functions
setup_env() {
set_env 'NF_VIRT_MEM_LIMIT'
}
# usage: process_init_files [file [file [...]]]
# ie: process_init_files /scripts/*
# process initializer files, based on file extensions and permissions
process_init_files() {
local f
for f; do
case "$f" in
*.sh)
if [ -x "$f" ]; then
echo "$0: running $f"
"$f"
else
echo "$0: sourcing $f"
. "$f"
fi
;;
*)
echo "$0: ignoring $f"
;;
esac
done
}
main() {
setup_env
if [ "$1" = 'nfcapd' ] || [ "$1" = 'sfcapd' ]; then
# check dir permissions
ls /entrypoint-init.d/ > /dev/null
process_init_files /entrypoint-init.d/*
fi
if [ -z "${NF_VIRT_MEM_LIMIT:-}" ]; then
exec "$@"
else
ulimit -v ${NF_VIRT_MEM_LIMIT} && exec "$@"
fi
}
main "$@"
|
import Recipient from '../models/Recipient';
class RecipientController {
async index(request, response) {
const recipients = await Recipient.findAll();
return response.json(recipients);
}
async show(request, response) {
const recipient = await Recipient.findByPk(request.params.id);
if (recipient) {
return response.json(recipient);
}
return response.status(401).json({ error: 'Recipient not found. ' });
}
async store(request, response) {
const recipient = await Recipient.create(request.body);
return response.json(recipient);
}
async update(request, response) {
const recipient = await Recipient.findByPk(request.params.id);
if (recipient) {
await recipient.update(request.body, { returning: true });
return response.json(recipient);
}
return response.status(401).json({ error: 'Recipient not found. ' });
}
async delete(request, response) {
const recipient = await Recipient.findByPk(request.params.id);
if (recipient) {
await recipient.destroy();
return response.json();
}
return response.status(401).json({ error: 'Recipient not found. ' });
}
}
export default new RecipientController();
|
<filename>jframe-demos/actuator/src/main/java/com/jf/health/MyHealthIndicator.java
package com.jf.health;
import com.jf.util.ProcessUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.HealthIndicator;
import org.springframework.stereotype.Component;
/**
* Created with IntelliJ IDEA.
* Description: 自定义健康检查
* User: admin
* Date: 2019-08-13
* Time: 10:22
*/
@Component
public class MyHealthIndicator implements HealthIndicator {
private static Logger log = LoggerFactory.getLogger(MyHealthIndicator.class);
static final String REDIS_PING = "redis-cli -h 127.0.0.1 -p 6379 ping";
static final String REDIS_CLI = "redis-cli -v";
@Override
public Health health() {
String result = ProcessUtil.process(REDIS_PING);
if (!"PONG".equals(result)) {
return Health.down().build();
} else {
String version = ProcessUtil.process(REDIS_CLI);
return Health.up().withDetail("VERSION", version).build();
}
}
}
|
<gh_stars>0
package main
import (
"io"
"os"
"path/filepath"
"strings"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/sebdah/goldie/v2"
)
type testCommand struct {
args []string
wantExitCode int
}
func copyFile(src, dst string) error {
s, err := os.Open(src)
if err != nil {
return err
}
defer s.Close()
d, err := os.Create(dst)
if err != nil {
return err
}
defer d.Close()
_, err = io.Copy(d, s)
if err != nil {
return err
}
return nil
}
func testRun(t *testing.T, cwd, dataDir string, commands []testCommand) {
t.Helper()
curDir, err := os.Getwd()
if err != nil {
t.Fatal(err)
}
defer func() {
err = os.Chdir(curDir)
if err != nil {
t.Fatal(err)
}
}()
err = os.Chdir(cwd)
if err != nil {
t.Fatal(err)
}
for _, command := range commands {
os.Args = append([]string{command.args[0], "--data-dir", dataDir}, command.args[1:]...)
got := run()
if diff := cmp.Diff(command.wantExitCode, got); diff != "" {
t.Fatalf("exit code mismatch (-want +got):\n%s", diff)
}
}
}
func testCommands(t *testing.T, commands []testCommand) {
t.Helper()
tmpDir := t.TempDir()
tmpStdout, err := os.CreateTemp(tmpDir, "stdout.*.txt")
if err != nil {
t.Fatal(err)
}
tmpStderr, err := os.CreateTemp(tmpDir, "stderr.*.txt")
if err != nil {
t.Fatal(err)
}
os.Stdout = tmpStdout
os.Stderr = tmpStderr
err = copyFile("testdata/input/projects.json", filepath.Join(tmpDir, "projects.json"))
if err != nil {
t.Fatal(err)
}
testRun(t, "/tmp", tmpDir, commands)
caseName := strings.ToLower(strings.ReplaceAll(t.Name(), "/", "_"))
g := goldie.New(t)
err = g.WithFixtureDir("testdata/stdout")
if err != nil {
t.Fatal(err)
}
gotStdout, err := os.ReadFile(os.Stdout.Name())
if err != nil {
t.Fatal(err)
}
g.Assert(t, caseName, gotStdout)
err = g.WithFixtureDir("testdata/stderr")
if err != nil {
t.Fatal(err)
}
gotStderr, err := os.ReadFile(os.Stderr.Name())
if err != nil {
t.Fatal(err)
}
g.Assert(t, caseName, gotStderr)
}
func TestRun(t *testing.T) {
cases := []testCommand{
{args: []string{"pj"}, wantExitCode: 0},
{args: []string{"pj", "-h"}, wantExitCode: 0},
{args: []string{"pj", "-v"}, wantExitCode: 0},
{args: []string{"pj", "init"}, wantExitCode: 0},
{args: []string{"pj", "init", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "list"}, wantExitCode: 0},
{args: []string{"pj", "get"}, wantExitCode: 0},
{args: []string{"pj", "get", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "get", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "current"}, wantExitCode: 0},
{args: []string{"pj", "workspace", "list"}, wantExitCode: 0},
{args: []string{"pj", "workspace", "change", "workspace-2"}, wantExitCode: 0},
}
for _, tt := range cases {
t.Run(strings.Join(tt.args, "_"), func(t *testing.T) {
testCommands(t, []testCommand{tt})
})
}
}
func TestRun_InitializeMultipleProject(t *testing.T) {
testCmds := []testCommand{
{args: []string{"pj", "init", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "init", "new-awesome-project"}, wantExitCode: 0},
}
testCommands(t, testCmds)
}
func TestRun_ChangeCurrentProject(t *testing.T) {
testCmds := []testCommand{
{args: []string{"pj", "init", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "change", "awesome-project"}, wantExitCode: 0},
}
testCommands(t, testCmds)
}
func TestRun_AddWorkspace(t *testing.T) {
testCmds := []testCommand{
{args: []string{"pj", "init", "awesome-project"}, wantExitCode: 0},
{args: []string{"pj", "workspace", "add"}, wantExitCode: 0},
}
testCommands(t, testCmds)
}
|
from functools import partial
import jax.nn.initializers as init
import jax.numpy as jnp
from einops import rearrange, repeat
from flax import linen as nn
def default(val, d):
return val if val is not None else d
def fourier_encode(x: jnp.ndarray, num_encodings=4):
x = jnp.expand_dims(x, -1)
orig_x = x
scales = 2 ** jnp.arange(num_encodings)
x /= scales
x = jnp.concatenate([jnp.sin(x), jnp.cos(x)], axis=-1)
x = jnp.concatenate([x, orig_x], axis=-1)
return x
class FeedForward(nn.Module):
mult: int = 4
dropout: float = 0.0
@nn.compact
def __call__(self, x, deterministic=False):
features = x.shape[-1]
x = nn.Dense(features * self.mult)(x)
x = nn.gelu(x)
x = nn.Dropout(self.dropout)(x, deterministic=deterministic)
x = nn.Dense(features)(x)
return x
class Attention(nn.Module):
heads: int = 8
head_features: int = 64
dropout: float = 0.0
@nn.compact
def __call__(self, x, context=None, mask=None, deterministic=False):
h = self.heads
dim = self.head_features * h
q = nn.Dense(dim, use_bias=False)(x)
k, v = nn.Dense(dim * 2, use_bias=False)(default(context, x)).split(2, axis=-1)
q, k, v = map(
lambda arr: rearrange(arr, "b n (h d) -> (b h) n d", h=h), (q, k, v)
)
sim = jnp.einsum("b i d, b j d -> b i j", q, k) * self.head_features ** -0.5
attn = nn.softmax(sim, axis=-1)
out = jnp.einsum("b i j, b j d -> b i d", attn, v)
out = rearrange(out, "(b h) n d -> b n (h d)", h=h)
out = nn.Dense(x.shape[-1])(out)
out = nn.Dropout(self.dropout)(out, deterministic=deterministic)
return out
class ReZero(nn.Module):
@nn.compact
def __call__(self, x):
scale = self.param("scale", init.zeros, (1,))
return scale * x
class Perceiver(nn.Module):
n_fourier_features: int = 4
depth: int = 2
n_latents: int = 256
latent_n_heads: int = 8
latent_head_features: int = 64
cross_n_heads: int = 2
cross_head_features: int = 128
ff_mult: int = 4
attn_dropout: float = 0.0
ff_dropout: float = 0.0
tie_layer_weights = False
@nn.compact
def __call__(self, x):
bs, dim = x.shape[0], x.shape[-1]
latents = self.param(
"latents", init.normal(), (self.n_latents, dim * self.ff_mult)
)
latent = repeat(latents, "n d -> b n d", b=bs)
x = fourier_encode(x, self.n_fourier_features)
x = rearrange(x, "b n ... -> b n (...)")
cross_attn = partial(
Attention,
heads=self.cross_n_heads,
head_features=self.cross_head_features,
dropout=self.attn_dropout,
)
latent_attn = partial(
Attention,
heads=self.latent_n_heads,
head_features=self.latent_head_features,
dropout=self.attn_dropout,
)
ff = partial(FeedForward, mult=self.ff_mult, dropout=self.ff_dropout)
if self.tie_layer_weights:
ca = cross_attn(name="cross_attn")
la = latent_attn(name="latent_attn")
cf = ff(name="cross_ff")
lf = ff(name="latent_ff")
for i in range(self.depth):
rz = ReZero(name=f"rezero_{i}")
latent += rz(ca(latent, x))
latent += rz(cf(latent))
latent += rz(la(latent))
latent += rz(lf(latent))
else:
for i in range(self.depth):
rz = ReZero(name=f"rezero_{i}")
latent += rz(cross_attn(name=f"cross_attn_{i}")(latent, x))
latent += rz(ff(name=f"cross_ff_{i}")(latent))
latent += rz(latent_attn(name=f"latent_attn_{i}")(latent))
latent += rz(ff(name=f"latent_ff_{i}")(latent))
return latent
|
#!/bin/bash -xue
which opam > /dev/null || { echo 'opam not found!'; exit 1; }
{ opam remote list | grep Incubaid/opam-repository-devel > /dev/null; } || { opam remote add incubaid-devel -k git git://github.com/Incubaid/opam-repository-devel.git; }
opam update -y
opam switch 4.02.1
eval `opam config env`
opam install -y "ssl.0.4.7"
opam install -y conf-libev
opam install -y camlbz2
opam install -y snappy
opam install -y "lwt.2.4.7"
opam remove -y camltc
opam install -y "camltc.999"
opam install -y bisect
opam install -y quickcheck
|
import React from 'react';
type SocialLoginProvider = 'google' | 'facebook' | 'twitter'; // Add more providers as needed
const useSocialLoginLoading = () => {
const [loadingState, setLoadingState] = React.useState<Record<SocialLoginProvider, boolean>>({
google: false,
facebook: false,
twitter: false, // Initialize for all supported providers
});
const setLoadingForSocialLogin = (provider: SocialLoginProvider, isLoading: boolean) => {
setLoadingState((prevState) => ({
...prevState,
[provider]: isLoading,
}));
};
return {
loadingState,
setLoadingForSocialLogin,
};
};
// Usage in the component
const socialLogins = ['google', 'facebook']; // Example social logins
const { loadingState, setLoadingForSocialLogin } = useSocialLoginLoading();
return (
<Stack>
{socialLogins.includes('google') && (
<Box>
<Button
onClick={() => setLoadingForSocialLogin('google', true)}
disabled={loadingState.google}
>
{loadingState.google ? 'Loading...' : 'Google Login'}
</Button>
</Box>
)}
{socialLogins.includes('facebook') && (
<Box>
<Button
onClick={() => setLoadingForSocialLogin('facebook', true)}
disabled={loadingState.facebook}
>
{loadingState.facebook ? 'Loading...' : 'Facebook Login'}
</Button>
</Box>
)}
</Stack>
); |
# Import the necessary libraries
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
# Load the data
data = pd.read_csv('stock_price_data.csv')
X = data.iloc[:,:1]
y = data.iloc[:,1]
# Split the dataset into training and testing
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=42)
# Create and fit the linear regression model
model = LinearRegression().fit(X_train, y_train)
# Make predictions on the test data
y_predictions = model.predict(X_test)
# Evaluate the model
mse = mean_squared_error(y_test, y_predictions)
print(mse) |
<filename>datasetutils/pasting.py
from PIL.Image import Image
from abc import ABC, abstractclassmethod
from typing import Tuple, Optional
from random import randint
class PastingRule(ABC):
@abstractclassmethod
def rule(self) -> Tuple[int, int]:
pass
class DefaultPastingRule(PastingRule):
def rule(self) -> Tuple[int, int]:
return (0, 0)
class LeftCornerPastingRule(PastingRule):
def __init__(self, left_corner = Tuple[int, int]):
self.__rule = left_corner
def rule(self) -> Tuple[int, int, int, int]:
return self.__rule
class RandomPastingRule(PastingRule):
def __init__(self, limit : Optional[int] = 20):
self.__limit = limit
def rule(self) -> Tuple[int, int, int, int]:
return (randint(0, self.__limit), randint(0,self.__limit))
if __name__ == "__main__":
lrule : PastingRule = LeftCornerPastingRule((10, 10))
print(lrule.rule()) |
def median_finder(nums):
nums.sort()
n = len(nums)
if n % 2 == 0:
med = (nums[n // 2 - 1] + nums[n // 2]) / 2
else:
med = nums[n // 2]
return med |
<gh_stars>1-10
# -*- coding: utf-8 -*-
from pygments.style import Style
from pygments.token import Token
from pygments.styles.default import DefaultStyle
class DocumentStyle(Style):
styles = {
Token.Menu.Completions.Completion.Current: 'bg:#00aaaa #000000',
Token.Menu.Completions.Completion: 'bg:#008888 #ffffff',
Token.Menu.Completions.ProgressButton: 'bg:#003333',
Token.Menu.Completions.ProgressBar: 'bg:#00aaaa',
Token.Host: '#00ff00',
Token.Pound: '#00ff00',
}
styles.update(DefaultStyle.styles)
def get_prompt_tokens(cli):
return [
(Token.Host, 'scm'),
(Token.Pound, '>>> '),
]
|
<reponame>mission-apprentissage/prise-de-rdv
import * as Yup from "yup";
import { Formik, Field, Form } from "formik";
import { useHistory } from "react-router-dom";
import { Box, Container, Input, Button, Link, Text, Flex } from "@chakra-ui/react";
import useAuth from "../common/hooks/useAuth";
import { _post } from "../common/httpClient";
const LoginPage = () => {
let [, setAuth] = useAuth();
let history = useHistory();
let feedback = (meta, message) => {
return meta.touched && meta.error
? {
feedback: message,
invalid: true,
}
: {};
};
let login = async (values, { setStatus }) => {
try {
let { token } = await _post("/api/login", values);
setAuth(token);
history.push("/");
} catch (e) {
console.error(e);
setStatus({ error: e.prettyMessage });
}
};
return (
<Box p={5} bg="#FAFAFA">
<Container border="1px solid #E0E5ED" bg="white" p={0} maxW="35ch">
<Box borderBottom="1px solid #E0E5ED" p={4}>
<Text fontSize="16px" ml={2}>
Connexion
</Text>
</Box>
<Box mx={5} mt={5}>
<Formik
initialValues={{
username: "",
password: "",
}}
validationSchema={Yup.object().shape({
username: Yup.string().required("Requis"),
password: <PASSWORD>().<PASSWORD>("<PASSWORD>"),
})}
onSubmit={login}
>
{({ status = {} }) => {
return (
<Form>
<Box>
<Text textStyle="h6" fontSize="12px">
Identifiant
</Text>
<Field name="username">
{({ field, meta }) => {
return (
<Input
placeholder="Votre identifiant..."
{...field}
{...feedback(meta, "Identifiant invalide")}
/>
);
}}
</Field>
</Box>
<Box mt={3}>
<Text textStyle="h6" fontSize="12px">
Mot de passe
</Text>
<Field name="password">
{({ field, meta }) => {
return (
<Input
type={"password"}
placeholder="Votre mot de passe..."
{...field}
{...feedback(meta, "Mot de passe invalide")}
/>
);
}}
</Field>
</Box>
<Flex mt={5} justifyContent="space-between">
<Button variant="primary" type={"submit"} fontSize="12px" fontWeight="700">
Connexion
</Button>
<Link href="/forgotten-password" color="info" mt={1}>
Mot de passe oublié
</Link>
</Flex>
<Box mb={5}>{status.error && <Text color="#cd201f">{status.error}</Text>}</Box>
</Form>
);
}}
</Formik>
</Box>
</Container>
</Box>
);
};
export default LoginPage;
|
/*
* This file is generated by jOOQ.
*/
package io.cattle.platform.core.model.tables.records;
import io.cattle.platform.core.model.Setting;
import io.cattle.platform.core.model.tables.SettingTable;
import io.cattle.platform.db.jooq.utils.TableRecordJaxb;
import javax.annotation.Generated;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record3;
import org.jooq.Row3;
import org.jooq.impl.UpdatableRecordImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
@Entity
@Table(name = "setting", schema = "cattle")
public class SettingRecord extends UpdatableRecordImpl<SettingRecord> implements TableRecordJaxb, Record3<Long, String, String>, Setting {
private static final long serialVersionUID = 1761153164;
/**
* Setter for <code>cattle.setting.id</code>.
*/
@Override
public void setId(Long value) {
set(0, value);
}
/**
* Getter for <code>cattle.setting.id</code>.
*/
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "id", unique = true, nullable = false, precision = 19)
@Override
public Long getId() {
return (Long) get(0);
}
/**
* Setter for <code>cattle.setting.name</code>.
*/
@Override
public void setName(String value) {
set(1, value);
}
/**
* Getter for <code>cattle.setting.name</code>.
*/
@Column(name = "name", nullable = false, length = 255)
@Override
public String getName() {
return (String) get(1);
}
/**
* Setter for <code>cattle.setting.value</code>.
*/
@Override
public void setValue(String value) {
set(2, value);
}
/**
* Getter for <code>cattle.setting.value</code>.
*/
@Column(name = "value", nullable = false, length = 16777215)
@Override
public String getValue() {
return (String) get(2);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Record1<Long> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record3 type implementation
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public Row3<Long, String, String> fieldsRow() {
return (Row3) super.fieldsRow();
}
/**
* {@inheritDoc}
*/
@Override
public Row3<Long, String, String> valuesRow() {
return (Row3) super.valuesRow();
}
/**
* {@inheritDoc}
*/
@Override
public Field<Long> field1() {
return SettingTable.SETTING.ID;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field2() {
return SettingTable.SETTING.NAME;
}
/**
* {@inheritDoc}
*/
@Override
public Field<String> field3() {
return SettingTable.SETTING.VALUE;
}
/**
* {@inheritDoc}
*/
@Override
public Long value1() {
return getId();
}
/**
* {@inheritDoc}
*/
@Override
public String value2() {
return getName();
}
/**
* {@inheritDoc}
*/
@Override
public String value3() {
return getValue();
}
/**
* {@inheritDoc}
*/
@Override
public SettingRecord value1(Long value) {
setId(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SettingRecord value2(String value) {
setName(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SettingRecord value3(String value) {
setValue(value);
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SettingRecord values(Long value1, String value2, String value3) {
value1(value1);
value2(value2);
value3(value3);
return this;
}
// -------------------------------------------------------------------------
// FROM and INTO
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public void from(Setting from) {
setId(from.getId());
setName(from.getName());
setValue(from.getValue());
}
/**
* {@inheritDoc}
*/
@Override
public <E extends Setting> E into(E into) {
into.from(this);
return into;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached SettingRecord
*/
public SettingRecord() {
super(SettingTable.SETTING);
}
/**
* Create a detached, initialised SettingRecord
*/
public SettingRecord(Long id, String name, String value) {
super(SettingTable.SETTING);
set(0, id);
set(1, name);
set(2, value);
}
}
|
organization in Global := "io.getnelson.nelson"
scalaVersion in Global := "2.11.11"
PB.targets in Compile := Seq(
scalapb.gen() -> (sourceManaged in Compile).value
)
libraryDependencies += "com.thesamet.scalapb" %% "scalapb-runtime" % scalapb.compiler.Version.scalapbVersion % "protobuf"
|
#!/bin/bash
##
# @file contrib/build_sdk.sh
# @brief Builds MEGA SDK static library and static examples
#
# (c) 2013-2014 by Mega Limited, Auckland, New Zealand
#
# This file is part of the MEGA SDK - Client Access Engine.
#
# Applications using the MEGA API must present a valid application key
# and comply with the the rules set forth in the Terms of Service.
#
# The MEGA SDK is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# @copyright Simplified (2-clause) BSD License.
#
# You should have received a copy of the license along with this
# program.
##
# Warn about /bin/sh ins't bash.
if [ -z "$BASH_VERSION" ] ; then
echo "WARNING: The shell running this script isn't bash."
fi
# global vars
use_local=0
use_dynamic=0
disable_freeimage=0
disable_ssl=0
disable_zlib=0
download_only=0
only_build_dependencies=0
enable_megaapi=0
make_opts=""
config_opts=""
no_examples=""
configure_only=0
disable_posix_threads=""
enable_sodium=0
enable_cares=0
enable_curl=0
enable_libuv=0
android_build=0
enable_cryptopp=0
on_exit_error() {
echo "ERROR! Please check log files. Exiting.."
}
on_exit_ok() {
if [ $configure_only -eq 1 ]; then
echo "Successfully configured MEGA SDK!"
elif [ $download_only -eq 1 ]; then
echo "Successfully downloaded MEGA SDK dependencies!"
elif [ $only_build_dependencies -eq 1 ]; then
echo "Successfully built MEGA SDK dependencies!"
else
echo "Successfully compiled MEGA SDK!"
fi
}
print_distro_help()
{
# yum: CentOS, Fedora, RedHat
type yum >/dev/null 2>&1
local exit_code=$?
if [ $exit_code -eq 0 ]; then
echo "Please execute the following command: sudo yum install gcc gcc-c++ libtool unzip autoconf make wget glibc-devel-static"
return
fi
# apt-get: Debian, Ubuntu
type apt-get >/dev/null 2>&1
local exit_code=$?
if [ $exit_code -eq 0 ]; then
echo "Please execute the following command: sudo apt-get install gcc c++ libtool-bin unzip autoconf m4 make wget"
echo " (or 'libtool' on older Debian / Ubuntu distro versions)"
return
fi
}
check_apps()
{
if [ -z "${BASH}" ]
then
echo "Please run this script with the BASH shell"
exit 1
elif [ ${BASH_VERSINFO} -lt 3 ]
then
printf "BASH version 3 or greater is required"
exit 1
fi
APPS=(bash gcc c++ libtool tar unzip autoconf make autoreconf wget automake m4)
for app in ${APPS[@]}; do
type ${app} >/dev/null 2>&1 || { echo "${app} is not installed. Please install it first and re-run the script."; print_distro_help; exit 1; }
hash ${app} 2>/dev/null || { echo "${app} is not installed. Please install it first and re-run the script."; print_distro_help; exit 1; }
done
}
package_download() {
local name=$1
local url=$2
local file=$local_dir/$3
local md5sum=$4
if [ $use_local -eq 1 ]; then
echo "Using local file for $name"
return
fi
echo "Downloading $name"
if [ -f $file ]; then
rm -f $file || true
fi
# use packages previously downloaded in /tmp/megasdkbuild folder
# if not present download from URL specified
# if wget fail, try curl
mkdir -p /tmp/megasdkbuild/
# cp /srv/dependencies_manually_downloaded/$3 $file 2>/dev/null || \
cp /tmp/megasdkbuild/$3 $file || \
wget --no-check-certificate -c $url -O $file --progress=bar:force -t 2 -T 30 || \
curl -k $url > $file || exit 1
echo "Checking MD5SUM for $file"
if ! echo $md5sum \*$file | md5sum -c - ; then
echo "Downloading $3 again"
#rm /tmp/megasdkbuild/$3
rm $file #this prevents unexpected "The file is already fully retrieved; nothing to do."
wget --no-check-certificate -c $url -O $file --progress=bar:force -t 2 -T 30 || \
curl -k $url > $file || exit 1
echo "Checking (again) MD5SUM for $file"
if ! echo $md5sum \*$file | md5sum -c - ; then
echo "Aborting execution due to incorrect MD5SUM for $file. Expected: $md5sum. Calculated:"
md5sum $file
exit 1
fi
fi
#copy to tmp download folder for next constructions
cp $file /tmp/megasdkbuild/$3
}
package_extract() {
local name=$1
local file=$local_dir/$2
local dir=$3
echo "Extracting $name"
local filename=$(basename "$file")
local extension="${filename##*.}"
if [ ! -f $file ]; then
echo "File $file does not exist!"
fi
if [ -d $dir ]; then
rm -fr $dir || exit 1
fi
if [ $extension == "gz" ]; then
tar -xzf $file &> $name.extract.log || exit 1
elif [ $extension == "zip" ]; then
unzip $file -d $dir &> $name.extract.log || exit 1
else
echo "Unsupported extension!"
exit 1
fi
}
package_configure() {
local name=$1
local dir=$2
local install_dir="$3"
local params="$4"
local conf_f1="./config"
local conf_f2="./configure"
local autogen="./autogen.sh"
echo "Configuring $name"
local cwd=$(pwd)
cd $dir || exit 1
if [ -f $autogen ]; then
$autogen
fi
if [ -f $conf_f1 ]; then
$conf_f1 --prefix=$install_dir $params &> ../$name.conf.log || exit 1
elif [ -f $conf_f2 ]; then
$conf_f2 $config_opts --prefix=$install_dir $params &> ../$name.conf.log || exit 1
else
local exit_code=$?
echo "Failed to configure $name, exit status: $exit_code"
exit 1
fi
cd $cwd
}
package_build() {
local name=$1
local dir=$2
if [ "$#" -eq 3 ]; then
local target=$3
else
local target=""
fi
echo "Building $name"
local cwd=$(pwd)
cd $dir
echo make $make_opts $target
make $make_opts $target &> ../$name.build.log
local exit_code=$?
if [ $exit_code -ne 0 ]; then
echo "Failed to build $name, exit status: $exit_code"
exit 1
fi
cd $cwd
}
package_install() {
local name=$1
local dir=$2
local install_dir=$3
if [ "$#" -eq 4 ]; then
local target=$4
else
local target=""
fi
echo "Installing $name"
local cwd=$(pwd)
cd $dir
make install $target &> ../$name.install.log
local exit_code=$?
if [ $exit_code -ne 0 ]; then
echo "Failed to install $name, exit status: $exit_code"
exit 1
fi
cd $cwd
# some packages install libraries to "lib64" folder
local lib64=$install_dir/lib64
local lib=$install_dir/lib
if [ -d $lib64 ]; then
cp -f $lib64/* $lib/
fi
}
openssl_pkg() {
local build_dir=$1
local install_dir=$2
local name="OpenSSL"
local openssl_ver="1.0.2h"
local openssl_url="https://www.openssl.org/source/openssl-$openssl_ver.tar.gz"
local openssl_md5="9392e65072ce4b614c1392eefc1f23d0"
local openssl_file="openssl-$openssl_ver.tar.gz"
local openssl_dir="openssl-$openssl_ver"
local openssl_params="--openssldir=$install_dir no-shared shared"
local loc_make_opts=$make_opts
package_download $name $openssl_url $openssl_file $openssl_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $openssl_file $openssl_dir
if [ $android_build -eq 1 ]; then
echo "Configuring $name"
local cwd=$(pwd)
cd $openssl_dir
perl -pi -e 's/install: all install_docs install_sw/install: install_docs install_sw/g' Makefile.org
./config shared no-ssl2 no-ssl3 no-comp no-hw no-engine --prefix=$install_dir
make depend || exit 1
cd $cwd
else
# handle MacOS
if [ "$(uname)" == "Darwin" ]; then
# OpenSSL compiles 32bit binaries, we need to explicitly tell to use x86_64 mode
if [ "$(uname -m)" == "x86_64" ]; then
echo "Configuring $name"
local cwd=$(pwd)
cd $openssl_dir
./Configure darwin64-x86_64-cc --prefix=$install_dir $openssl_params &> ../$name.conf.log || exit 1
cd $cwd
else
package_configure $name $openssl_dir $install_dir "$openssl_params"
fi
else
package_configure $name $openssl_dir $install_dir "$openssl_params"
fi
fi
# OpenSSL has issues with parallel builds, let's use the default options
make_opts=""
package_build $name $openssl_dir
make_opts=$loc_make_opts
package_install $name $openssl_dir $install_dir
}
cryptopp_pkg() {
local build_dir=$1
local install_dir=$2
local name="Crypto++"
local cryptopp_ver="563"
local cryptopp_url="http://www.cryptopp.com/cryptopp$cryptopp_ver.zip"
local cryptopp_md5="3c5b70e2ec98b7a24988734446242d07"
local cryptopp_file="cryptopp$cryptopp_ver.zip"
local cryptopp_dir="cryptopp$cryptopp_ver"
package_download $name $cryptopp_url $cryptopp_file $cryptopp_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $cryptopp_file $cryptopp_dir
#modify Makefile so that it does not use specific cpu architecture optimizations
sed "s#CXXFLAGS += -march=native#CXXFLAGS += #g" -i $cryptopp_dir/GNUmakefile
if [ $android_build -eq 1 ]; then
package_build $name $cryptopp_dir "static -f GNUmakefile-cross"
package_install $name $cryptopp_dir $install_dir "-f GNUmakefile-cross"
else
package_build $name $cryptopp_dir static
package_install $name $cryptopp_dir $install_dir
fi
}
sodium_pkg() {
local build_dir=$1
local install_dir=$2
local name="Sodium"
local sodium_ver="1.0.12"
local sodium_url="https://download.libsodium.org/libsodium/releases/libsodium-$sodium_ver.tar.gz"
local sodium_md5="c308e3faa724b630b86cc0aaf887a5d4"
local sodium_file="sodium-$sodium_ver.tar.gz"
local sodium_dir="libsodium-$sodium_ver"
if [ $use_dynamic -eq 1 ]; then
local sodium_params="--enable-shared"
else
local sodium_params="--disable-shared --enable-static --disable-pie"
fi
package_download $name $sodium_url $sodium_file $sodium_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $sodium_file $sodium_dir
package_configure $name $sodium_dir $install_dir "$sodium_params"
package_build $name $sodium_dir
package_install $name $sodium_dir $install_dir
}
libuv_pkg() {
local build_dir=$1
local install_dir=$2
local name="libuv"
local libuv_ver="v1.8.0"
local libuv_url="http://dist.libuv.org/dist/$libuv_ver/libuv-$libuv_ver.tar.gz"
local libuv_md5="f4229c4360625e973ae933cb92e1faf7"
local libuv_file="libuv-$libuv_ver.tar.gz"
local libuv_dir="libuv-$libuv_ver"
if [ $use_dynamic -eq 1 ]; then
local libuv_params="--enable-shared"
else
local libuv_params="--disable-shared --enable-static"
fi
package_download $name $libuv_url $libuv_file $libuv_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $libuv_file $libuv_dir
# linking with static library requires -fPIC
if [ $use_dynamic -eq 0 ]; then
export CFLAGS="-fPIC"
fi
package_configure $name $libuv_dir $install_dir "$libuv_params"
if [ $use_dynamic -eq 0 ]; then
unset CFLAGS
fi
package_build $name $libuv_dir
package_install $name $libuv_dir $install_dir
}
zlib_pkg() {
local build_dir=$1
local install_dir=$2
local name="Zlib"
local zlib_ver="1.2.11"
local zlib_url="http://zlib.net/zlib-$zlib_ver.tar.gz"
local zlib_md5="1c9f62f0778697a09d36121ead88e08e"
local zlib_file="zlib-$zlib_ver.tar.gz"
local zlib_dir="zlib-$zlib_ver"
local loc_conf_opts=$config_opts
if [ $use_dynamic -eq 1 ]; then
local zlib_params=""
else
local zlib_params="--static"
fi
package_download $name $zlib_url $zlib_file $zlib_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $zlib_file $zlib_dir
# doesn't recognize --host=xxx
config_opts=""
# Windows must use Makefile.gcc
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
package_configure $name $zlib_dir $install_dir "$zlib_params"
package_build $name $zlib_dir
package_install $name $zlib_dir $install_dir
else
export BINARY_PATH=$install_dir/bin
export INCLUDE_PATH=$install_dir/include
export LIBRARY_PATH=$install_dir/lib
package_build $name $zlib_dir "-f win32/Makefile.gcc"
package_install $name $zlib_dir $install_dir "-f win32/Makefile.gcc"
unset BINARY_PATH
unset INCLUDE_PATH
unset LIBRARY_PATH
fi
config_opts=$loc_conf_opts
}
sqlite_pkg() {
local build_dir=$1
local install_dir=$2
local name="SQLite"
local sqlite_ver="3100100"
local sqlite_url="http://www.sqlite.org/2016/sqlite-autoconf-$sqlite_ver.tar.gz"
local sqlite_md5="f315a86cb3e8671fe473baa8d34746f6"
local sqlite_file="sqlite-$sqlite_ver.tar.gz"
local sqlite_dir="sqlite-autoconf-$sqlite_ver"
if [ $use_dynamic -eq 1 ]; then
local sqlite_params="--enable-shared"
else
local sqlite_params="--disable-shared --enable-static"
fi
package_download $name $sqlite_url $sqlite_file $sqlite_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $sqlite_file $sqlite_dir
package_configure $name $sqlite_dir $install_dir "$sqlite_params"
package_build $name $sqlite_dir
package_install $name $sqlite_dir $install_dir
}
cares_pkg() {
local build_dir=$1
local install_dir=$2
local name="c-ares"
local cares_ver="1.10.0"
local cares_url="http://c-ares.haxx.se/download/c-ares-$cares_ver.tar.gz"
local cares_md5="1196067641411a75d3cbebe074fd36d8"
local cares_file="cares-$cares_ver.tar.gz"
local cares_dir="c-ares-$cares_ver"
if [ $use_dynamic -eq 1 ]; then
local cares_params="--enable-shared"
else
local cares_params="--disable-shared --enable-static"
fi
package_download $name $cares_url $cares_file $cares_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $cares_file $cares_dir
package_configure $name $cares_dir $install_dir "$cares_params"
package_build $name $cares_dir
package_install $name $cares_dir $install_dir
}
curl_pkg() {
local build_dir=$1
local install_dir=$2
local name="cURL"
local curl_ver="7.46.0"
local curl_url="http://curl.haxx.se/download/curl-$curl_ver.tar.gz"
local curl_md5="230e682d59bf8ab6eca36da1d39ebd75"
local curl_file="curl-$curl_ver.tar.gz"
local curl_dir="curl-$curl_ver"
local openssl_flags=""
# use local or system OpenSSL
if [ $disable_ssl -eq 0 ]; then
openssl_flags="--with-ssl=$install_dir"
else
openssl_flags="--with-ssl"
fi
if [ $use_dynamic -eq 1 ]; then
local curl_params="--disable-ftp --disable-file --disable-ldap --disable-ldaps --disable-rtsp --disable-dict \
--disable-telnet --disable-tftp --disable-pop3 --disable-imap --disable-smtp --disable-gopher --disable-sspi \
--without-librtmp --without-libidn --without-libssh2 --enable-ipv6 --disable-manual \
--with-zlib=$install_dir --enable-ares=$install_dir $openssl_flags"
else
local curl_params="--disable-ftp --disable-file --disable-ldap --disable-ldaps --disable-rtsp --disable-dict \
--disable-telnet --disable-tftp --disable-pop3 --disable-imap --disable-smtp --disable-gopher --disable-sspi \
--without-librtmp --without-libidn --without-libssh2 --enable-ipv6 --disable-manual \
--disable-shared --with-zlib=$install_dir --enable-ares=$install_dir $openssl_flags"
fi
package_download $name $curl_url $curl_file $curl_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $curl_file $curl_dir
package_configure $name $curl_dir $install_dir "$curl_params"
package_build $name $curl_dir
package_install $name $curl_dir $install_dir
}
readline_pkg() {
local build_dir=$1
local install_dir=$2
local name="Readline"
local readline_ver="6.3"
local readline_url="ftp://ftp.cwru.edu/pub/bash/readline-$readline_ver.tar.gz"
local readline_md5="33c8fb279e981274f485fd91da77e94a"
local readline_file="readline-$readline_ver.tar.gz"
local readline_dir="readline-$readline_ver"
if [ $use_dynamic -eq 1 ]; then
local readline_params="--enable-shared"
else
local readline_params="--disable-shared --enable-static"
fi
package_download $name $readline_url $readline_file $readline_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $readline_file $readline_dir
package_configure $name $readline_dir $install_dir "$readline_params"
package_build $name $readline_dir
package_install $name $readline_dir $install_dir
}
termcap_pkg() {
local build_dir=$1
local install_dir=$2
local name="Termcap"
local termcap_ver="1.3.1"
local termcap_url="http://ftp.gnu.org/gnu/termcap/termcap-$termcap_ver.tar.gz"
local termcap_md5="ffe6f86e63a3a29fa53ac645faaabdfa"
local termcap_file="termcap-$termcap_ver.tar.gz"
local termcap_dir="termcap-$termcap_ver"
if [ $use_dynamic -eq 1 ]; then
local termcap_params="--enable-shared"
else
local termcap_params="--disable-shared --enable-static"
fi
package_download $name $termcap_url $termcap_file $termcap_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $termcap_file $termcap_dir
package_configure $name $termcap_dir $install_dir "$termcap_params"
package_build $name $termcap_dir
package_install $name $termcap_dir $install_dir
}
freeimage_pkg() {
local build_dir=$1
local install_dir=$2
local cwd=$3
local name="FreeImage"
local freeimage_ver="3170"
local freeimage_url="http://downloads.sourceforge.net/freeimage/FreeImage$freeimage_ver.zip"
local freeimage_md5="459e15f0ec75d6efa3c7bd63277ead86"
local freeimage_file="freeimage-$freeimage_ver.zip"
local freeimage_dir_extract="freeimage-$freeimage_ver"
local freeimage_dir="freeimage-$freeimage_ver/FreeImage"
package_download $name $freeimage_url $freeimage_file $freeimage_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $freeimage_file $freeimage_dir_extract
#patch to fix problem with raw strings
find $freeimage_dir_extract/FreeImage/Source/LibWebP -type f -exec sed -i -e 's/"#\([A-X]\)"/" #\1 "/g' {} \;
#patch to fix problem with newest compilers
sed -i "s#CXXFLAGS += -D__ANSI__#CXXFLAGS += -D__ANSI__ -std=c++98#g" $freeimage_dir_extract/FreeImage/Makefile.gnu
# replace Makefile on MacOS
if [ "$(uname)" == "Darwin" ]; then
cp $cwd/contrib/FreeImage.Makefile.osx $freeimage_dir/Makefile.osx
fi
if [ $android_build -eq 1 ]; then
sed -i '/#define HAVE_SEARCH_H 1/d' $freeimage_dir/Source/LibTIFF4/tif_config.h
fi
if [ $use_dynamic -eq 0 ]; then
export FREEIMAGE_LIBRARY_TYPE=STATIC
fi
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
package_build $name $freeimage_dir
# manually copy header and library
cp $freeimage_dir/Dist/FreeImage.h $install_dir/include || exit 1
cp $freeimage_dir/Dist/libfreeimage* $install_dir/lib || exit 1
# MinGW
else
package_build $name $freeimage_dir "-f Makefile.mingw"
# manually copy header and library
cp $freeimage_dir/Dist/FreeImage.h $install_dir/include || exit 1
# ignore if not present
cp $freeimage_dir/Dist/FreeImage.dll $install_dir/lib || 1
cp $freeimage_dir/Dist/FreeImage.lib $install_dir/lib || 1
cp $freeimage_dir/Dist/libFreeImage.a $install_dir/lib || 1
fi
}
# we can't build vanilla ReadLine under MinGW
readline_win_pkg() {
local build_dir=$1
local install_dir=$2
local name="Readline"
local readline_ver="5.0.1"
local readline_url="http://downloads.sourceforge.net/project/gnuwin32/readline/5.0-1/readline-5.0-1-bin.zip?r=&ts=1468492036&use_mirror=freefr"
local readline_md5="91beae8726edd7ad529f67d82153e61a"
local readline_file="readline-bin.zip"
local readline_dir="readline-bin"
package_download $name $readline_url $readline_file $readline_md5
if [ $download_only -eq 1 ]; then
return
fi
package_extract $name $readline_file $readline_dir
# manually copy binary files
cp -R $readline_dir/include/* $install_dir/include/ || exit 1
# fix library name
cp $readline_dir/lib/libreadline.dll.a $install_dir/lib/libreadline.a || exit 1
}
build_sdk() {
local install_dir=$1
local debug=$2
local static_flags=""
local readline_flags=""
local freeimage_flags=""
local megaapi_flags=""
local openssl_flags=""
local sodium_flags="--without-sodium"
local cwd=$(pwd)
echo "Configuring MEGA SDK"
./autogen.sh || exit 1
# use either static build (by the default) or dynamic
if [ $use_dynamic -eq 1 ]; then
static_flags="--enable-shared"
else
static_flags="--disable-shared --enable-static"
fi
# disable freeimage
if [ $disable_freeimage -eq 0 ]; then
freeimage_flags="--with-freeimage=$install_dir"
else
freeimage_flags="--without-freeimage"
fi
# enable megaapi
if [ $enable_megaapi -eq 0 ]; then
megaapi_flags="--disable-megaapi"
fi
# add readline and termcap flags if building examples
if [ -z "$no_examples" ]; then
readline_flags=" \
--with-readline=$install_dir \
--with-termcap=$install_dir \
"
fi
if [ $disable_ssl -eq 0 ]; then
openssl_flags="--with-openssl=$install_dir"
fi
if [ $enable_sodium -eq 1 ]; then
sodium_flags="--with-sodium=$install_dir"
fi
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
./configure \
$static_flags \
--disable-silent-rules \
--disable-curl-checks \
$megaapi_flags \
$openssl_flags \
--with-cryptopp=$install_dir \
$sodium_flags \
--with-zlib=$install_dir \
--with-sqlite=$install_dir \
--with-cares=$install_dir \
--with-curl=$install_dir \
$freeimage_flags \
$readline_flags \
$disable_posix_threads \
$no_examples \
$config_opts \
--prefix=$install_dir \
$debug || exit 1
# Windows (MinGW) build, uses WinHTTP instead of cURL + c-ares, without OpenSSL
else
./configure \
$static_flags \
--disable-silent-rules \
--without-openssl \
$megaapi_flags \
--with-cryptopp=$install_dir \
$sodium_flags \
--with-zlib=$install_dir \
--with-sqlite=$install_dir \
--without-cares \
--without-curl \
--with-winhttp=$cwd \
$freeimage_flags \
$readline_flags \
$disable_posix_threads \
$no_examples \
$config_opts \
--prefix=$install_dir \
$debug || exit 1
fi
echo "MEGA SDK is configured"
if [ $configure_only -eq 0 ]; then
echo "Building MEGA SDK"
make clean
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
make -j9 || exit 1
else
make
fi
make install
fi
}
display_help() {
local app=$(basename "$0")
echo ""
echo "Usage:"
echo " $app [-a] [-c] [-h] [-d] [-e] [-f] [-g] [-l] [-m opts] [-n] [-o path] [-p path] [-q] [-r] [-s] [-t] [-w] [-x opts] [-y] [z]"
echo ""
echo "By the default this script builds static megacli executable."
echo "This script can be run with numerous options to configure and build MEGA SDK."
echo ""
echo "Options:"
echo " -a : Enable MegaApi"
echo " -c : Configure MEGA SDK and exit, do not build it"
echo " -d : Enable debug build"
echo " -e : Enable cares"
echo " -f : Disable FreeImage"
echo " -g : Enable curl"
echo " -l : Use local software archive files instead of downloading"
echo " -n : Disable example applications"
echo " -s : Disable OpenSSL"
echo " -r : Enable Android build"
echo " -t : Disable POSIX Threads support"
echo " -u : Enable Sodium cryptographic library"
echo " -v : Enable libuv"
echo " -w : Download software archives and exit"
echo " -y : Build dynamic library and executable (instead of static)"
echo " -m [opts]: make options"
echo " -x [opts]: configure options"
echo " -o [path]: Directory to store and look for downloaded archives"
echo " -p [path]: Installation directory"
echo " -q : Use Crypto++"
echo " -z : Disable libz"
echo ""
}
main() {
local cwd=$(pwd)
local work_dir=$cwd"/sdk_build"
local build_dir=$work_dir/"build"
local install_dir=$work_dir/"install"
local debug=""
# by the default store archives in work_dir
local_dir=$work_dir
while getopts ":habcdefglm:no:p:rstuvyx:wqz" opt; do
case $opt in
h)
display_help $0
exit 1
;;
a)
echo "* Enabling MegaApi"
enable_megaapi=1
;;
b)
only_build_dependencies=1
echo "* Building dependencies only."
;;
c)
echo "* Configure only"
configure_only=1
;;
d)
echo "* DEBUG build"
debug="--enable-debug"
;;
e)
echo "* Enabling external c-ares"
enable_cares=1
;;
f)
echo "* Disabling external FreeImage"
disable_freeimage=1
;;
g)
echo "* Enabling external Curl"
enable_curl=1
;;
l)
echo "* Using local files"
use_local=1
;;
m)
make_opts="$OPTARG"
;;
n)
no_examples="--disable-examples --disable-megacmd"
;;
o)
local_dir=$(readlink -f $OPTARG)
if [ ! -d $local_dir ]; then
mkdir -p $local_dir || exit 1
fi
echo "* Storing local archive files in $local_dir"
;;
p)
install_dir=$(readlink -f $OPTARG)
echo "* Installing into $install_dir"
;;
q)
echo "* Enabling external Crypto++"
enable_cryptopp=1
;;
r)
echo "* Building for Android"
android_build=1
;;
s)
echo "* Disabling OpenSSL"
disable_ssl=1
;;
t)
disable_posix_threads="--disable-posix-threads"
;;
u)
enable_sodium=1
echo "* Enabling external Sodium."
;;
v)
enable_libuv=1
echo "* Enabling external libuv."
;;
w)
download_only=1
echo "* Downloading software archives only."
;;
x)
config_opts="$OPTARG"
echo "* Using configuration options: $config_opts"
;;
y)
use_dynamic=1
echo "* Building dynamic library and executable."
;;
z)
disable_zlib=1
echo "* Disabling external libz."
;;
\?)
display_help $0
exit 1
;;
*)
display_help $0
exit 1
;;
esac
done
shift $((OPTIND-1))
check_apps
if [ "$(expr substr $(uname -s) 1 10)" = "MINGW32_NT" ]; then
if [ ! -f "$cwd/winhttp.h" -o ! -f "$cwd/winhttp.lib" ]; then
echo "ERROR! Windows build requires WinHTTP header and library to be present in MEGA SDK project folder!"
echo "Please get both winhttp.h and winhttp.lib files an put them into the MEGA SDK project's root folder."
exit 1
fi
fi
trap on_exit_error EXIT
if [ $download_only -eq 0 ]; then
if [ ! -d $build_dir ]; then
mkdir -p $build_dir || exit 1
fi
if [ ! -d $install_dir ]; then
mkdir -p $install_dir || exit 1
fi
cd $build_dir
fi
rm -fr *.log
export PREFIX=$install_dir
local old_pkg_conf=$PKG_CONFIG_PATH
export PKG_CONFIG_PATH=$install_dir/lib/pkgconfig/
export LD_LIBRARY_PATH="$install_dir/lib"
export LD_RUN_PATH="$install_dir/lib"
if [ $android_build -eq 1 ]; then
echo "SYSROOT: $SYSROOT"
fi
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
if [ $disable_ssl -eq 0 ]; then
openssl_pkg $build_dir $install_dir
fi
fi
if [ $enable_cryptopp -eq 1 ]; then
cryptopp_pkg $build_dir $install_dir
fi
if [ $enable_sodium -eq 1 ]; then
sodium_pkg $build_dir $install_dir
fi
if [ $disable_zlib -eq 0 ]; then
zlib_pkg $build_dir $install_dir
fi
sqlite_pkg $build_dir $install_dir
if [ $enable_cares -eq 1 ]; then
cares_pkg $build_dir $install_dir
fi
if [ $enable_curl -eq 1 ]; then
curl_pkg $build_dir $install_dir
fi
if [ $enable_libuv -eq 1 ]; then
libuv_pkg $build_dir $install_dir
fi
if [ $disable_freeimage -eq 0 ]; then
freeimage_pkg $build_dir $install_dir $cwd
fi
# Build readline and termcap if no_examples isn't set
if [ -z "$no_examples" ]; then
if [ "$(expr substr $(uname -s) 1 10)" != "MINGW32_NT" ]; then
readline_pkg $build_dir $install_dir
termcap_pkg $build_dir $install_dir
else
readline_win_pkg $build_dir $install_dir
fi
fi
if [ $download_only -eq 0 ] && [ $only_build_dependencies -eq 0 ]; then
cd $cwd
#fix libtool bug (prepends some '=' to certain paths)
for i in `find $install_dir -name "*.la"`; do sed -i "s#=/#/#g" $i; done
if [ $android_build -eq 1 ]; then
export "CXXFLAGS=$CXXFLAGS -std=c++11"
fi
export "CXXFLAGS=$CXXFLAGS -DCRYPTOPP_MAINTAIN_BACKWARDS_COMPATIBILITY_562"
build_sdk $install_dir $debug
fi
unset PREFIX
unset LD_RUN_PATH
unset LD_LIBRARY_PATH
export PKG_CONFIG_PATH=$old_pkg_conf
trap on_exit_ok EXIT
}
main "$@"
|
<reponame>code-check/github-api
package codecheck.github.operations
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import org.json4s.JArray
import codecheck.github.api.GitHubAPI
import codecheck.github.exceptions.NotFoundException
import codecheck.github.models.Webhook
import codecheck.github.models.WebhookConfig
import codecheck.github.models.WebhookCreateInput
import codecheck.github.models.WebhookUpdateInput
trait WebhookOp {
self: GitHubAPI =>
def listWebhooks(owner: String, repo: String): Future[List[Webhook]] = {
self.exec("GET", s"/repos/${owner}/${repo}/hooks").map {
_.body match {
case JArray(arr) => arr.map(new Webhook(_))
case _ => throw new IllegalStateException()
}
}
}
def getWebhook(owner: String, repo: String, id: Long): Future[Option[Webhook]] = {
self.exec("GET", s"/repos/${owner}/${repo}/hooks/${id}", fail404=false).map { res =>
res.statusCode match {
case 404 => None
case 200 => Some(new Webhook(res.body))
}
}
}
def createWebhook(owner: String, repo: String, input: WebhookCreateInput): Future[Webhook] = {
self.exec("POST", s"/repos/${owner}/${repo}/hooks", input.value).map { res =>
new Webhook(res.body)
}
}
//It is apparently an issue with Github's Webhook API that add_events and remove_events cannot be done
//in a single operation. To add and remove events, it must be done through two seperate calls of updateWebhook.
def updateWebhook(owner: String, repo: String, id: Long, input: WebhookUpdateInput): Future[Webhook] = {
self.exec("PATCH", s"/repos/${owner}/${repo}/hooks/${id}", input.value).map { res =>
new Webhook(res.body)
}
}
def testWebhook(owner: String, repo: String, id: Long): Future[Boolean] = {
self.exec("POST", s"/repos/${owner}/${repo}/hooks/${id}/test", fail404=false).map { res =>
res.statusCode match {
case 204 => true
case _ => false
}
}
}
def pingWebhook(owner: String, repo: String, id: Long): Future[Boolean] = {
self.exec("POST", s"/repos/${owner}/${repo}/hooks/${id}/pings", fail404=false).map { res =>
res.statusCode match {
case 204 => true
case _ => false
}
}
}
def removeWebhook(owner: String, repo: String, id: Long): Future[Boolean] = {
self.exec("DELETE", s"/repos/${owner}/${repo}/hooks/${id}", fail404=false).map { res =>
res.statusCode match {
case 204 => true
case _ => false
}
}
}
}
|
<filename>app/src/main/java/com/qweex/openbooklikes/NavDrawerAdapter.java
package com.qweex.openbooklikes;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import java.util.ArrayList;
public class NavDrawerAdapter extends BaseAdapter {
Menu menu;
int lastCount = -1;
Context context;
ArrayList<MenuItem> flatList = new ArrayList<>();
MenuItem selected;
int[] selectedColors = new int[] {R.color.nav_selected_bg, R.color.nav_selected_text, R.color.nav_selected_icon},
unselectedColors = new int[] {android.R.color.transparent, R.color.nav_unselected_text, R.color.nav_unselected_icon};
public Menu getMenu() {
return menu;
}
public NavDrawerAdapter(Menu m, Context c) {
menu = m;
context = c;
}
public void setSelected(MenuItem item) {
selected = item;
notifyDataSetInvalidated();
}
public void setSelected(int id) {
selected = menu.findItem(id);
notifyDataSetInvalidated();
}
public boolean isSelected(MenuItem item) {
return selected == item;
}
public int indexOf(MenuItem item) {
return flatList.indexOf(item);
}
@Override
public void notifyDataSetInvalidated() {
lastCount = -1;
flatList.clear();
super.notifyDataSetInvalidated();
}
@Override
public int getCount() {
return lastCount = lastCount == -1 ? count(menu, 0) : lastCount;
}
int count(Menu m, int sum) {
if(m!=null) {
for (int i = 0; i < m.size(); i++) {
if(!m.getItem(i).isVisible())
continue;
flatList.add(m.getItem(i));
sum = count(m.getItem(i).getSubMenu(), sum+1);
}
}
return sum;
}
@Override
public Object getItem(int i) {
return flatList.get(i);
}
@Override
public long getItemId(int i) {
return ((MenuItem)getItem(i)).getItemId();
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
MenuItem item = (MenuItem)getItem(i);
Resources res = context.getResources();
if(item.hasSubMenu()) {
view = LayoutInflater.from(context).inflate(R.layout.nav_list_header, null);
view.setClickable(true);
} else {
view = LayoutInflater.from(context).inflate(R.layout.nav_list_entry, null);
view.setClickable(false);
if(item.getIntent()!=null) {
int count = item.getIntent().getIntExtra("count", -1);
((TextView) view.findViewById(R.id.count)).setText(Integer.toString(count));
if(count < 0)
view.findViewById(R.id.count).setVisibility(View.GONE);
} else {
view.findViewById(R.id.count).setVisibility(View.GONE);
}
}
view.findViewById(R.id.separator).setVisibility(
i > 0 &&
((MenuItem) getItem(i - 1)).getGroupId() != item.getGroupId()
||
item.hasSubMenu()
? View.VISIBLE : View.GONE
);
TextView title = ((TextView)view.findViewById(R.id.title));
int[] colors = item.equals(selected) ? selectedColors : unselectedColors;
view.setBackgroundColor(res.getColor(colors[0]));
title.setTextColor(res.getColor(colors[1]));
if(item.getIcon()!=null && !item.hasSubMenu()) {
Drawable icon = item.getIcon();
ImageView imgV = ((ImageView) view.findViewById(R.id.image_view));
imgV.setColorFilter(null);
imgV.setColorFilter(res.getColor(colors[2]), PorterDuff.Mode.SRC_IN);
imgV.setImageDrawable(icon);
}
title.setText(item.getTitle());
return view;
}
}
|
def towers_of_hanoi(n, from_rod, aux_rod, to_rod):
if n == 1:
print("Move disk 1 from rod",from_rod,"to rod",to_rod)
return
towers_of_hanoi(n-1, from_rod, to_rod, aux_rod)
print("Move disk",n,"from rod",from_rod,"to rod",to_rod)
towers_of_hanoi(n-1, aux_rod, from_rod, to_rod)
# Driver code
n = 4
towers_of_hanoi(n, 'A', 'B', 'C') |
#!/usr/bin/env bash
JAVA_OPTS="-DONS_AI_LIBRARY_PATH=../../../../parsers/src/main/resources" sbt "project address-index-server" "runProd 9000"
|
// Package socket enumerates HTTP headerSec-Websocket-* keys and handles
// websocket communications.
package socket
import (
"fmt"
"log"
"net/http"
"os"
"runtime/debug"
"github.com/toba/coreweb"
)
type (
// Request from browser as bytes along with WebSocket client the browser
// communicated through.
Request struct {
Client *Client
Message []byte
}
// RequestHandler processes a socket request and returns a response that
// should be sent to the client or nil if no response is expected.
RequestHandler func(req *Request) []byte
)
const prefix = "Sec-Websocket-"
const (
Accept = prefix + "Accept"
Key = prefix + "Key"
Protocol = prefix + "Protocol"
Version = prefix + "Version"
)
// Handle incoming websocket requests. Create a client object for each
// connection with a read and write event loop.
//
// Having pumps in goroutines allows "collection of memory referenced by the
// caller" according to
//
// https://github.com/gorilla/websocket/commit/ea4d1f681babbce9545c9c5f3d5194a789c89f5b
func Handle(c coreweb.Config, responder RequestHandler) func(w http.ResponseWriter, r *http.Request) {
broadcast = make(chan []byte)
request = make(chan *Request)
register = make(chan *Client)
unregister = make(chan *Client)
clients = make(map[*Client]bool)
go listen(responder)
// return standard HTTP handler that upgrades to socket connection
return func(w http.ResponseWriter, r *http.Request) {
defer func() {
if rvr := recover(); rvr != nil {
fmt.Fprintf(os.Stderr, "Panic: %+v\n", rvr)
debug.PrintStack()
http.Error(w, http.StatusText(http.StatusInternalServerError), http.StatusInternalServerError)
}
}()
conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
log.Println(err)
//http.Error(w, fmt.Sprintf("cannot upgrade: %v", err), http.StatusInternalServerError)
return
}
client := &Client{conn: conn, Send: make(chan []byte, 256)}
register <- client
go client.writePump()
go client.readPump()
}
}
// listen is an event loop that continually checks event channels.
func listen(responder RequestHandler) {
for {
select {
case c := <-register:
clients[c] = true
case c := <-unregister:
if _, ok := clients[c]; ok {
delete(clients, c)
close(c.Send)
}
case req := <-request:
res := responder(req)
if res != nil {
req.Client.Send <- res
}
case res := <-broadcast:
for c := range clients {
select {
case c.Send <- res:
default:
close(c.Send)
delete(clients, c)
}
}
}
}
}
// Broadcast puts a message onto the broadcast channel to be sent to all
// connected clients.
func Broadcast(res []byte) {
if broadcast != nil && res != nil {
broadcast <- res
}
}
|
#!/bin/bash
echo "001 $(date)"
if [ "$#" -ne 2 ]; then
echo "need the path of tmp folder and STORAGE_CLASS_NAME"
exit 1
fi
readonly TMP_FOLDER=$1
readonly STORAGE_CLASS_NAME=$2
### pbench-register runs before the test is started if necessary
echo "002 $(date)"
readonly CLIENT_HOSTS_COMMA=$(oc get pod --all-namespaces -o wide --no-headers | grep Running | grep fio | awk '{print $7}' | awk 'BEGIN { ORS = " " } { print }' | tr " " ,)
readonly CLIENT_HOSTS="${CLIENT_HOSTS_COMMA::-1}"
echo "CLIENT_HOST ${CLIENT_HOSTS}"
#pbench-fio --test-types=read --clients="${CLIENT_HOSTS}" --config="SEQ_IO_${STORAGE_CLASS_NAME}" --samples=1 --max-stddev=20 --block-sizes=4 --job-file="${TMP_FOLDER}/config/sequential_io.job" --pre-iteration-script="${TMP_FOLDER}/scripts/drop-cache.sh"
pbench-fio --test-types=read,write,rw --clients="${CLIENT_HOSTS}" --config="SEQ_IO_${STORAGE_CLASS_NAME}" --samples=1 --max-stddev=20 --block-sizes=4,16,64 --job-file="${TMP_FOLDER}/config/sequential_io.job" --pre-iteration-script="${TMP_FOLDER}/scripts/drop-cache.sh"
echo "003 $(date)"
pbench-fio --test-types=randread,randwrite,randrw --clients="${CLIENT_HOSTS}" --config="RAND_IO_${STORAGE_CLASS_NAME}" --samples=1 --max-stddev=20 --block-sizes=4,16,64 --job-file="${TMP_FOLDER}/config/random_io.job" --pre-iteration-script="${TMP_FOLDER}/scripts/drop-cache.sh"
echo "pbench-copy-results: $(date)"
pbench-copy-results
|
function getSVG(result, glyphs) {
var xmin = 1000;
var xmax = -1000;
var ymin = 1000;
var ymax = -1000;
let ax = 0;
let ay = 0;
const paths = result
.map(function (x) {
const result = glyphs[x.g].map(function (command) {
if (command.type !== 'Z') {
const result = command.values
.map(function (p, i) {
// apply ax/ay/dx/dy to coords
return i % 2 ? p + ay + x.dy : p + ax + x.dx;
})
.map(function (x, i) {
// bbox calc
if (i % 2) {
if (x < ymin) ymin = x;
if (x > ymax) ymax = x;
} else {
if (x < xmin) xmin = x;
if (x > xmax) xmax = x;
}
return x;
});
return [command.type].concat(result).toString();
} else {
return command.type;
}
});
ax += x.ax;
ay += x.ay;
return result;
})
.reduce((acc, val) => {
return acc.concat(val.join(''));
}, []);
let parsedPaths = [];
paths.forEach((path) => {
const parsed = path.match(/[mM][^mMzZ]*[zZ]/g);
if (parsed) {
parsed.forEach((p) => {
parsedPaths.push(p);
});
}
});
let width = xmax - xmin;
let height = ymax - ymin;
// pad it a bit
let pad = Math.round(Math.min(width / 10, height / 10));
xmin -= pad;
ymin -= pad;
width += pad * 2;
height += pad * 2;
const bbox = xmin + ' ' + ymin + ' ' + width + ' ' + height;
return {
bbox,
paths: parsedPaths,
};
}
export const convertTextToSvg = async (hb: any, font: string, text: string, attributes: object) => {
return fetch(font)
.then((x) => {
return x.arrayBuffer();
})
.then((fontblob) => {
const blob = hb.createBlob(new Uint8Array(fontblob));
const face = hb.createFace(blob, 0);
const font = hb.createFont(face);
font.setVariations(attributes['variations']);
const buffer = hb.createBuffer();
buffer.addText(text);
buffer.guessSegmentProperties();
hb.shape(font, buffer);
const result = buffer.json(font);
let glyphs = {};
result.forEach((item) => {
if (glyphs[item.g]) return;
glyphs[item.g] = font.glyphToJson(item.g);
});
buffer.destroy();
font.destroy();
face.destroy();
blob.destroy();
return {
paths: getSVG(result, glyphs).paths,
glyphIds: result.map((item) => item.g),
};
});
};
export const convertGlyphToSvg = async (hb: any, font: string, glyphId: number, attributes: object) => {
return fetch(font)
.then((x) => {
return x.arrayBuffer();
})
.then((fontblob) => {
const blob = hb.createBlob(new Uint8Array(fontblob));
const face = hb.createFace(blob, 0);
const font = hb.createFont(face);
font.setVariations(attributes['variations']);
const path = font.glyphToPath(glyphId);
font.destroy();
face.destroy();
blob.destroy();
return [path];
});
};
|
(function($) {
"use strict"; // Start of use strict
// Smooth scrolling using jQuery easing
$('a.js-scroll-trigger[href*="#"]:not([href="#"])').click(function() {
if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html, body').animate({
scrollTop: (target.offset().top - 54)
}, 1000, "easeInOutExpo");
return false;
}
}
});
// Closes responsive menu when a scroll trigger link is clicked
$('.js-scroll-trigger').click(function() {
$('.navbar-collapse').collapse('hide');
});
// Activate scrollspy to add active class to navbar items on scroll
$('body').scrollspy({
target: '#mainNav',
offset: 56
});
// Collapse Navbar
var navbarCollapse = function() {
if ($("#mainNav").offset().top > 100) {
$("#mainNav").addClass("navbar-shrink");
} else {
$("#mainNav").removeClass("navbar-shrink");
}
};
// Collapse now if page is not at top
navbarCollapse();
// Collapse the navbar when page is scrolled
$(window).scroll(navbarCollapse);
// Mail
$('#contact_loader').hide();
var $mail_container = $('#sendMessageButton');
$mail_container.on('click', function(e) {
e.preventDefault();
let $name = $('#contact_name').val();
let $email = $('#contact_email').val();
let $phone = $('#contact_phone').val();
let $message = $('#contact_message').val();
let $url = '/sendMail?name=' + $name + '&email=' + $email + '&phone=' + $phone + '&message=' + $message;
$.ajax({
url: $url,
type: 'POST',
dataType: "json",
beforeSend: function () {
$('#contact_loader').show();
},
success: function(result, status, xhr) {
$('#contact_loader').hide();
if(result['success'] == 1) {
$('#contact_info').text("Your message was send successfully");
$('#contact_info').css('color', 'green');
} else {
$('#contact_info').text("Your message was not send successfully, please try sending me mail at <EMAIL>");
$('#contact_info').css('color', 'red');
}
},
error: function (xhr, status, error) {
$('#contact_loader').hide();
$('#contact_info').text("Your message was not send successfully, please try sending me mail at <EMAIL>");
$('#contact_info').css('color', 'red');
}
});
});
})(jQuery); // End of use strict
|
module.exports = {
"presets": ["@babel/preset-react", "@babel/preset-typescript"],
"plugins": [
["@babel/plugin-syntax-jsx"],
[
"search-and-replace",
{
"rules": [
{
"search": /nhsuk/,
"replace": "govuk"
}
]
}
]
],
ignore: ['**/__mocks__', '**/__tests__', 'src/setupTests.ts', 'components/**/*.test.*']
} |
/*
Program to print Right Lower Triangle of given height n
Example:
*
**
***
****
*****
******
*******
********
where height(n) of triangle is 8.
*/
#include <iostream>
using namespace std;
int main()
{
int i, j, n;
cout << "Enter number of rows/height of triangle: ";
cin >> n;
for (i = 0; i <= n; i++)
{
for (j = 0; j <= n; j++)
{
if (j > n - i)
{
cout << "*";
}
else
{
cout << " ";
}
}
cout << "\n";
}
return 0;
} |
package com.alipay.api.response;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.domain.GroupFundUserBill;
import com.alipay.api.AlipayResponse;
/**
* ALIPAY API: alipay.fund.trans.groupfunds.userbills.query response.
*
* @author auto create
* @since 1.0, 2021-07-14 10:09:56
*/
public class AlipayFundTransGroupfundsUserbillsQueryResponse extends AlipayResponse {
private static final long serialVersionUID = 2127366147312445986L;
/**
* 当前用户涉及的流水模型列表,仅返回实际存在的流水数据
*/
@ApiField("user_bills")
private GroupFundUserBill userBills;
public void setUserBills(GroupFundUserBill userBills) {
this.userBills = userBills;
}
public GroupFundUserBill getUserBills( ) {
return this.userBills;
}
}
|
#!/bin/sh
set -eu
OPTS="-showupto 1 -rfdefault generated -progtexdefault generated -show_po false -show_po_edges false -longlegend false -show_simple_vbconds true -squished_graphs true -show_events mem -show_final_rf false -show_fr true -show_poloc false -proc_or_thread false -condensed true"
MOREOPTS="-web_diagrams true -splat_kind true -model minimaluniproc -select_vos all -show_local_global false -axiom no -precise_pco false -speedcheck true -speedlist true"
FILE=$1
memevents ${OPTS} ${MOREOPTS} $FILE
BASE=$(basename $FILE .litmus)
sed -e 's/pad="0.0"/pad="0.2"/g' generated/$BASE-rf.dot | tee $BASE.dot |\
neato -Tpng > img/$BASE.png
|
<gh_stars>1-10
/**
* Copyright 2017-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.glowroot.instrumentation.httpurlconnection;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.Iterator;
import java.util.Map;
import com.google.common.io.ByteStreams;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.glowroot.instrumentation.test.harness.Container;
import org.glowroot.instrumentation.test.harness.IncomingSpan;
import org.glowroot.instrumentation.test.harness.OutgoingSpan;
import org.glowroot.instrumentation.test.harness.Span;
import org.glowroot.instrumentation.test.harness.impl.JavaagentContainer;
import static org.assertj.core.api.Assertions.assertThat;
public class HttpURLConnectionIT {
private static Container container;
@BeforeClass
public static void setUp() throws Exception {
// need to use javaagent container since HttpURLConnection is in the bootstrap class loader
container = JavaagentContainer.create();
}
@AfterClass
public static void tearDown() throws Exception {
container.close();
}
@After
public void afterEachTest() throws Exception {
container.resetAfterEachTest();
}
@Test
public void shouldCaptureHttpGet() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpGet.class, false);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: GET http://localhost:[0-9]+/hello1/");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "GET");
assertThat((String) detail.get("URI")).matches("http://localhost:[0-9]+/hello1/");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
@Test
public void shouldCaptureHttpGetWithQueryString() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpGetWithQueryString.class, false);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: GET http://localhost:[0-9]+/hello1\\?abc=xyz");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "GET");
assertThat((String) detail.get("URI")).matches("http://localhost:[0-9]+/hello1\\?abc=xyz");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
@Test
public void shouldCaptureHttpPost() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpPost.class, false);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: POST http://localhost:[0-9]+/hello1/");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "POST");
assertThat((String) detail.get("URI")).matches("http://localhost:[0-9]+/hello1/");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
@Test
public void shouldCaptureHttpGetHTTPS() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpGet.class, true);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: GET https://localhost:[0-9]+/hello1/");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "GET");
assertThat((String) detail.get("URI")).matches("https://localhost:[0-9]+/hello1/");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
@Test
public void shouldCaptureHttpGetWithQueryStringHTTPS() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpGetWithQueryString.class, true);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: GET https://localhost:[0-9]+/hello1\\?abc=xyz");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "GET");
assertThat((String) detail.get("URI")).matches("https://localhost:[0-9]+/hello1\\?abc=xyz");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
@Test
public void shouldCaptureHttpPostHTTPS() throws Exception {
// when
IncomingSpan incomingSpan = container.execute(ExecuteHttpPost.class, true);
// then
Iterator<Span> i = incomingSpan.childSpans().iterator();
OutgoingSpan outgoingSpan = (OutgoingSpan) i.next();
assertThat(outgoingSpan.message())
.matches("http client request: POST https://localhost:[0-9]+/hello1/");
Map<String, Object> detail = outgoingSpan.detail();
assertThat(detail).hasSize(3);
assertThat(detail).containsEntry("Method", "POST");
assertThat((String) detail.get("URI")).matches("https://localhost:[0-9]+/hello1/");
assertThat(detail).containsEntry("Result", 200);
assertThat(i.hasNext()).isFalse();
}
public static class ExecuteHttpGet extends ExecuteHttpBase {
@Override
public void transactionMarker() throws Exception {
String protocol = useHttps() ? "https" : "http";
URL obj = new URL(protocol + "://localhost:" + getPort() + "/hello1/");
HttpURLConnection connection = (HttpURLConnection) obj.openConnection();
if (connection.getResponseCode() != 200) {
throw new IllegalStateException(
"Unexpected response status code: " + connection.getResponseCode());
}
// this it to test header propagation by instrumentation
if (!"Yes".equals(connection.getHeaderField("X-Test-Harness"))) {
throw new IllegalStateException("X-Test-Harness header not recieved");
}
InputStream content = connection.getInputStream();
ByteStreams.exhaust(content);
content.close();
}
}
public static class ExecuteHttpGetWithQueryString extends ExecuteHttpBase {
@Override
public void transactionMarker() throws Exception {
String protocol = useHttps() ? "https" : "http";
URL obj = new URL(protocol + "://localhost:" + getPort() + "/hello1?abc=xyz");
HttpURLConnection connection = (HttpURLConnection) obj.openConnection();
if (connection.getResponseCode() != 200) {
throw new IllegalStateException(
"Unexpected response status code: " + connection.getResponseCode());
}
// this it to test header propagation by instrumentation
if (!"Yes".equals(connection.getHeaderField("X-Test-Harness"))) {
throw new IllegalStateException("X-Test-Harness header not recieved");
}
InputStream content = connection.getInputStream();
ByteStreams.exhaust(content);
content.close();
}
}
public static class ExecuteHttpPost extends ExecuteHttpBase {
@Override
public void transactionMarker() throws Exception {
String protocol = useHttps() ? "https" : "http";
URL obj = new URL(protocol + "://localhost:" + getPort() + "/hello1/");
HttpURLConnection connection = (HttpURLConnection) obj.openConnection();
connection.setDoOutput(true);
connection.getOutputStream().write("some data".getBytes());
connection.getOutputStream().close();
if (connection.getResponseCode() != 200) {
throw new IllegalStateException(
"Unexpected response status code: " + connection.getResponseCode());
}
// this it to test header propagation by instrumentation
if (!"Yes".equals(connection.getHeaderField("X-Test-Harness"))) {
throw new IllegalStateException("X-Test-Harness header not recieved");
}
InputStream content = connection.getInputStream();
ByteStreams.exhaust(content);
content.close();
}
}
}
|
<reponame>evgeny-dmi3ev/js-events
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-07-29 18:11
from __future__ import unicode_literals
from django.db import migrations
import sortedm2m.fields
class Migration(migrations.Migration):
dependencies = [
('js_locations', '0007_auto_20190709_2309'),
('js_events', '0021_auto_20190729_1503'),
]
operations = [
migrations.AddField(
model_name='eventrelatedplugin',
name='related_locations',
field=sortedm2m.fields.SortedManyToManyField(blank=True, help_text=None, to='js_locations.Location', verbose_name='related locations'),
),
]
|
<filename>routes/users.js<gh_stars>0
var express = require('express'),
routes = express.Router();
var userController = require('../controller/user-controller');
var passport = require('passport');
routes.get('/', (req, res) => {
return res.send('[]');
});
routes.post('/register', userController.registerUser);
routes.post('/login', userController.loginUser);
routes.get('/check', passport.authenticate('jwt', { session: false }), (req, res) => {
return res.json({ msg: `${req.user.email} você está logado.` });
});
module.exports = routes; |
<reponame>nicholascloud/vette
'use strict';
var isNumber = require('../is-number');
var gt = require('./gt');
module.exports = function gteq (number, message) {
number = Number(number);
if (!isNumber(number)) {
throw new TypeError('number must be numeric');
}
message = message || 'value must be greater than or equal to ' + number;
return gt(number, true, message);
}; |
<reponame>fedux-org-attic/vim-taskjuggler
#!/usr/bin/env ruby
# encoding: utf-8
require 'thor'
require 'thor/group'
require 'open4'
require 'tmpdir'
require 'fileutils'
require 'find'
module Vim
module TaskJuggler
module Cli
# Runs a command
def run(command, options = {})
command.gsub!(/^\s+/, "")
process_data = Hash.new
pid, stdin, stdout, stderr = Open4::popen4(command)
ignored, process_data[:status] = Process::waitpid2(pid)
process_data[:stdout] = stdout.read
process_data[:stderr] = stderr.read
process_data[:ignore_exit_codes] = ((options[:ignore_exit_codes] || Array.new) << 0).uniq
raise_if_command_failed!(command_name(command), process_data)
end
# checks if command exited with an error code -eq 0
def raise_if_command_failed!(utility, process_data)
unless process_data[:ignore_exit_codes].include?(process_data[:status].to_i)
exception_string = "Failed to run \"#{ utility }\" on \"#{ RUBY_PLATFORM }\".\n\n" +
"Exit code was:\n\n\s\s#{ process_data[:status].to_i }\n\n"
exception_string << "STDOUT was:\n\n\s\s#{ process_data[:stdout].gsub("\n", "\n\s\s") }" unless process_data[:stdout].empty?
exception_string << "STDERR was:\n\n\s\s#{ process_data[:stderr].gsub("\n", "\n\s\s") }" unless process_data[:stderr].empty?
raise RuntimeError, exception_string
# else
# info_string = "Everything ran fine!\n\n"
# info_string << "STDOUT was:\n\n\s\s#{ process_data[:stdout].gsub("\n", "\n\s\s") }" unless process_data[:stdout].empty?
# puts info_string
end
end
#normalize command name
def command_name(command)
command.slice(0, command.index(/\s/)).split('/')[-1]
end
end
end
end
module Vim
module TaskJuggler
syntax_path=File.expand_path('syntax' , File.dirname(__FILE__))
class Default < Thor
end
class Install < Thor
include Cli
desc "extension [PATH]" , "Install vim extension to path (Default ~/.vim/bundle.available/vim-taskjuggler)"
def extension(remote_path = 'https://github.com/maxmeyer/vim-taskjuggler' ,
local_path = '~/.vim/bundle.available/vim-taskjuggler'
)
local_path = File.expand_path(local_path)
unless Dir.exists?(local_path)
run("git clone #{remote_path} #{local_path}")
else
run("cd #{local_path}; git pull")
end
end
desc "gem" , "Install taskjuggler gem"
def gem
run("gem install taskjuggler")
end
desc "syntax_file" , "Install new version of the vim syntax-file for taskjuggler"
def syntax_file(extension_path = '~/.vim/bundle.available/vim-taskjuggler')
extension_path = File.expand_path(extension_path)
tmpdir = Dir.mktmpdir
run("cd #{tmpdir}; gem fetch taskjuggler")
result = Array.new
Find.find(tmpdir) do |path|
result << path if File.fnmatch('*.gem', path)
end
syntax_file = 'data/tjp.vim'
run(%Q[tar -O -xf "#{result[0]}" data.tar.gz | tar -xz -C "#{tmpdir}" "#{syntax_file}"])
unless Dir.exists?(extension_path)
FileUtils.mkdir_p extension_path
end
FileUtils.cp "#{tmpdir}/#{syntax_file}" , File.join(extension_path,'syntax')
end
end
Default.register(
Install,
'install',
'install [subcommand]',
'install various things, e.g. gem, vim-extension, vim syntax file'
)
end
end
Vim::TaskJuggler::Default.start
|
<gh_stars>1-10
import context, { CreateFunctionContext } from './context/main';
import effect from './effect/main';
import { CreateVariableContext, CreateReturnContext } from './render/main';
export default {
context,
effect,
CreateVariableContext,
CreateReturnContext,
CreateFunctionContext
} |
#!/bin/bash -e
IMAGE="xeon-centos75-dldt-ffmpeg"
DIR=$(dirname $(readlink -f "$0"))
. "${DIR}/../../../script/shell.sh"
|
#! /usr/bin/env ruby
require 'gooddata'
require 'json'
$SCRIPT_PARAMS ||= {}
puts "GoodData::VERSION = #{GoodData::VERSION}"
client = GoodData.connect :sst_token => $SCRIPT_PARAMS['GDC_SST']
puts JSON.pretty_generate(client.user.json)
|
<filename>iot-suite-server-ability/src/main/java/com/tuya/iot/suite/ability/asset/connector/AssetConnector.java
package com.tuya.iot.suite.ability.asset.connector;
import com.tuya.connector.api.annotations.*;
import com.tuya.connector.open.api.model.PageResult;
import com.tuya.iot.suite.ability.asset.ability.AssetAbility;
import com.tuya.iot.suite.ability.asset.model.*;
import java.util.List;
/**
* Description TODO
*
* @author Chyern
* @date 2021/3/10
*/
public interface AssetConnector extends AssetAbility {
@Override
@POST("/v1.0/iot-02/assets")
String addAsset(@Body AssetAddRequest request);
@Override
@PUT("/v1.0/iot-02/assets/{asset_id}")
Boolean modifyAsset(@Path("asset_id") String assetId, @Body AssetModifyRequest body);
@Override
@DELETE("/v1.0/iot-02/assets/{asset_id}")
Boolean deleteAsset(@Path("asset_id") String assetId);
@Override
@GET("/v1.0/iot-02/assets/{asset_id}")
Asset selectAsset(@Path("asset_id") String assetId);
@Override
@GET("/v1.0/iot-02/assets")
List<Asset> selectAssets(@Query("asset_ids") String assetIds);
@Override
@GET("/v1.1/iot-02/assets")
PageResult<Asset> selectAssets(@Query("asset_ids") String assetIds, @Query("asset_name") String assetName,
@Query("last_row_key") String lastRowKey, @Query("page_size") Integer pageSize);
@Override
@GET("/v1.0/iot-02/assets/{asset_id}/sub-assets")
PageResult<Asset> selectChildAssets(@Path("asset_id") String assetId, @Query("last_row_key") String lastRowKey,
@Query("page_size") String pageSize);
@Override
@GET("/v1.0/iot-02/assets/{asset_id}/devices")
PageResult<Asset> selectChildDevices(@Path("asset_id") String assetId, @Query("last_row_key") String lastRowKey,
@Query("page_size") String pageSize);
@Override
@POST("/v1.0/iot-03/assets/actions/user-authorized")
Boolean authorized(@Body AssetAuthorizationRequest assetAuthorizationRequest);
@Override
@POST("/v1.0/iot-03/users/{uid}/actions/batch-assets-authorized")
Boolean batchAssetsAuthorizedToUser(@Path("uid") String userId, @Body AssetAuthBatchToUser assetAuthBatchToUser);
@Override
@POST("/v1.0/iot-03/users/{uid}/actions/batch-assets-unauthorized")
Boolean batchAssetsUnAuthorizedToUser(@Path("uid")String userId, @Body AssetAuthBatchToUser assetAuthBatchToUser);
@Override
@POST("/v1.0/iot-03/users/{uid}/actions/assets-unauthorized")
Boolean assetsUnAuthorizedToUser(@Path("uid")String userId,@Body AssetAuthToUser assetAuthToUser);
@Override
@GET("/v1.0/iot-03/users/{uid}/assets")
PageResultWithTotal<AuthorizedAsset> pageListAuthorizedAssets(@Path("uid") String assetId,
@Query("pageNo") int pageNo,
@Query("pageSize") int pageSize);
}
|
<filename>nightwatch/commands/mountReactComponent.js
module.exports = class Command {
async command(componentName, props, cb = function() {}) {
const reactEntryPoint = this.api.globals.entryPoint || '/node_modules/vite-plugin-nightwatch/src/react_index.js';
let propsFromFn = '';
if (typeof props == 'function') {
propsFromFn = `(${props.toString()})()`;
}
let scriptContent = `
import ReactLibs from '${reactEntryPoint}';
const {React, ReactDOM} = ReactLibs;
import Component from '${componentName}';
const props = ${propsFromFn || (typeof props == 'string' ? props : JSON.stringify(props))};
const element = React.createElement(Component, props);
ReactDOM.render(element, document.getElementById('app'));
window['@component_element'] = element;
window['@component_class'] = Component;
`;
const scriptFn = function(scriptContent) {
var scriptEl = document.createElement('script');
scriptEl.type = 'module';
scriptEl.innerHTML = scriptContent;
document.body.appendChild(scriptEl);
}
const renderedElement = await this.api
.launchComponentRenderer()
.pause(1000)
.execute(scriptFn, [scriptContent])
.pause(this.client.argv.debug ? 0 : 500)
.execute(function() {
return document.querySelectorAll('#app')[0].firstElementChild
}, [], (result) => {
if (!result || !result.value) {
throw new Error('Could not mount the component. Run nightwatch with --devtools and --debug flags (Chrome only) and investigate the error in the browser console.')
}
const componentInstance = this.api.createElement(result.value, {
isComponent: true,
type: 'react'
});
cb(componentInstance);
return componentInstance;
});
return renderedElement;
}
}
|
#!/usr/bin/env bash
./bootstrap.sh
mkdir -p /opt/libpostal_data
./configure --datadir=/opt/libpostal_data
make
make install
ldconfig
|
#!/usr/bin/env bash
set -e
docker run --rm --name lib-build \
-v /var/run/docker.sock:/var/run/docker.sock \
-v $PWD:/repo \
-w /repo \
-e FEEDZ_LOGICALITY_API_KEY=$FEEDZ_LOGICALITY_API_KEY \
-e BUILD_NUMBER=$GITHUB_RUN_NUMBER \
--network host \
damianh/dotnet-sdks:6 \
dotnet run -p build/Build.csproj -c Release -- "$@" |
<reponame>tanshuai/reference-wallet
# Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
import uuid
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from dataclasses_json import dataclass_json
from .currency import Currency
DebtId = NewType("DebtId", uuid.UUID)
@dataclass_json
@dataclass
class DebtData:
debt_id: DebtId
currency: Currency
amount: int # Positive value - Wallet owes LP, negative value - LP owes Wallet
|
package main
import (
"flag"
"fmt"
"io/ioutil"
"math/rand"
"net/http"
"time"
"github.com/prometheus/common/log"
)
var randChars = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
// Generates a random string for transactions.
func randSeq(n int) string {
b := make([]rune, n)
for i := range b {
b[i] = randChars[rand.Intn(len(randChars))]
}
return string(b)
}
func main() {
rand.Seed(time.Now().UnixNano())
portNr := flag.String("portNr", "26657", "Port numbers: 26657, 26660, 26662, and 26664. Default 26657")
TXNr := flag.Int("TXNr", 100, "Number of transactions to send. Default: 100")
TXKeySize := flag.Int("kSize", 100, "Specify the size of the transaction (key) in bytes. Default is 100 bytes.")
TXValueSize := flag.Int("vSize", 100, "Specify the size of the transaction (value) in bytes. Default is 100 bytes.")
TXTime := flag.Int("TXTime", 1000, "Time spent between transactions. In milliseconds.")
flag.Parse()
for i := 0; i < *TXNr; i++ {
resp, err := http.Get("http://127.0.0.1:" + *portNr + "/broadcast_tx_commit?tx=\"" + randSeq(*TXKeySize) +
"=" + randSeq(*TXValueSize) + "\"")
if err != nil {
panic(err)
}
defer resp.Body.Close()
if resp.StatusCode == http.StatusOK {
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
bodyString := string(bodyBytes)
log.Info(bodyString)
}
fmt.Println("Sleeping", *TXTime, "milliseconds...", i+1)
time.Sleep(time.Duration(*TXTime) * time.Millisecond)
}
}
|
import { moduleMetadata } from '@storybook/angular';
import { OcCommonLibModule } from '@openchannel/angular-common-components/src/lib/common-components';
import { OcDropboxComponent } from '@openchannel/angular-common-components';
const modules = {
imports: [OcCommonLibModule],
};
export default {
title: 'Dropbox search [BEM]',
component: OcDropboxComponent,
decorators: [moduleMetadata(modules)],
argTypes: { selectedItem: { action: 'Get selected' } },
};
const DropboxComponent = (args: OcDropboxComponent) => ({
component: OcDropboxComponent,
moduleMetadata: modules,
props: args,
});
export const DefaultDropbox = DropboxComponent.bind({});
DefaultDropbox.args = {
placeHolder: 'Default place holder',
items: ['first', 'second', 'third'],
clearFormAfterSelect: true,
};
export const ScrollDropbox = DropboxComponent.bind({});
ScrollDropbox.args = {
placeHolder: 'Default place holder',
items: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15'],
clearFormAfterSelect: false,
};
export const EmptyDropbox = DropboxComponent.bind({});
EmptyDropbox.args = {
placeHolder: null,
items: null,
clearFormAfterSelect: null,
};
|
public List<String> substrings(String inputString)
{
// List to store all substrings
List<String> substrings = new ArrayList<String>();
// Iterate over the length of the input string
// starting from the left character
for (int start = 0; start < inputString.length(); start++) {
// Iterate over the remaining length of the input string
// ending at the right character
for (int end = start + 1; end <= inputString.length(); end++) {
// Get the substring of the input string
// starting from the left index till
// the right index
String substr = inputString.substring(start, end);
// Add the substring to the list
substrings.add(substr);
}
}
// Return the list of all substrings
return substrings;
} |
<gh_stars>0
import { urlFriendly } from "./url-friendly";
import { ProjectType, Project } from "../queries";
export const getProjectUrl = (project: Project, withSlug?: boolean): string => {
if (project.type === ProjectType.Artistic) {
return `/artistic-works/${urlFriendly(project.group)}` + (withSlug ? "/" + project.slug : "");
} else {
return `/curated-works/${urlFriendly(project.projectGroup.title)}` + (withSlug ? "/" + project.slug : "");
}
}
|
#!/bin/sh
curl -k --key spec/files/cert/jrc_prime256v1.key --cert spec/files/cert/jrc_prime256v1.crt -H "Accept: */*" -H "Content-Type: application/json" --data-binary '{"admin": true}' -X PUT https://fountain-test.example.com:8443/administrators/3
curl -k --key spec/files/cert/jrc_prime256v1.key --cert spec/files/cert/jrc_prime256v1.crt https://fountain-test.example.com:8443/administrators/3 | jq ".administrator.admin"
|
<gh_stars>0
$('#btnSave').click(function () {
save_worker(getWorkersFormVal(), function (result) {
if (result.id == 1) {
alertSuccessMessage('Saved Successfully');
refresh();
} else {
alertErrorMessage('Something Went Wrong!');
}
});
});
function save_worker(obj, callBack) {
if (!obj) {
return false;
}
$.ajax({
type: "POST",
headers: {
"Authorization": "Bearer " + $('meta[name=api-token]').attr("content"),
"Accept": "application/json"
},
url: "/api/worker",
data: obj,
// contentType: 'text/json',
// dataType: "json",
cache: false,
success: function (result) {
if (typeof callBack !== 'undefined' && callBack != null && typeof callBack === "function") {
callBack(result);
}
}
});
}
|
function insertAtCaret(areaId, text) {
var txtarea = document.getElementById(areaId);
if (!txtarea) { return; }
var scrollPos = txtarea.scrollTop;
var strPos = 0;
var br = ((txtarea.selectionStart || txtarea.selectionStart == '0') ?
"ff" : (document.selection ? "ie" : false ) );
if (br == "ie") {
txtarea.focus();
var range = document.selection.createRange();
range.moveStart ('character', -txtarea.value.length);
strPos = range.text.length;
} else if (br == "ff") {
strPos = txtarea.selectionStart;
}
var front = (txtarea.value).substring(0, strPos);
var back = (txtarea.value).substring(strPos, txtarea.value.length);
txtarea.value = front + text + back;
strPos = strPos + text.length;
if (br == "ie") {
txtarea.focus();
var ieRange = document.selection.createRange();
ieRange.moveStart ('character', -txtarea.value.length);
ieRange.moveStart ('character', strPos);
ieRange.moveEnd ('character', 0);
ieRange.select();
} else if (br == "ff") {
txtarea.selectionStart = strPos;
txtarea.selectionEnd = strPos;
txtarea.focus();
}
txtarea.scrollTop = scrollPos;
}
|
<filename>package.js
Package.describe({
"name": "alethes:pages",
"summary": "State of the art, out of the box Meteor pagination",
"version": "1.8.6",
"git": "https://github.com/alethes/meteor-pages"
});
Package.onUse(function(api){
api.versionsFrom("METEOR@1.0.3.1")
api.use([
"meteor-platform",
"check",
"tracker",
"underscore",
"coffeescript",
"mongo",
"ejson"
]);
api.use("iron:router@1.0.0", ["client", "server"], { weak: true });
api.use([
"templating",
"spacebars",
"blaze",
"session"
], "client");
api.addFiles([
"lib/pages.coffee"
]);
api.addFiles([
"client/templates.html",
"client/controllers.coffee",
"client/main.css"
], "client");
api.addAssets([
"public/loader.gif"
], ["client"]);
});
Package.onTest(function(api){
api.use([
"meteor-platform",
"coffeescript",
"alethes:pages"
]);
api.addFiles([
"tests/test_templates.html",
"tests/tests.coffee"
]);
});
|
#!/bin/bash
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
pyenv activate python-3.7
|
<reponame>1aurabrown/ervell
import gql from 'graphql-tag';
import selectableChannelFragment from 'react/components/ConnectionSelectionList/components/SelectableChannel/fragments/selectableChannel';
export default gql`
query RecentChannelsQuery {
me {
__typename
id
recent_channels: recent_connections(per: 5) {
...SelectableChannel
}
}
}
${selectableChannelFragment}
`;
|
package com.example.demo.util;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
import android.util.DisplayMetrics;
import android.util.Pair;
import java.lang.reflect.Field;
/**
* Created by yummyLau on 18-7-11
* Email: <EMAIL>
* blog: yummylau.com
*/
public class DisplayUtils {
public static int dip2px(Context context, float dipValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dipValue * scale + 0.5f);
}
public static Pair<Integer,Integer> getScreenSize(Context context) {
DisplayMetrics dm = context.getResources().getDisplayMetrics();
return new Pair<Integer,Integer>(dm.widthPixels, dm.heightPixels);
}
public static boolean isPortrait(Context context){
return context.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT;
}
public static void checkoutOrientation(Activity activity) {
activity.setRequestedOrientation(isPortrait(activity) ? ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE : ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
}
public static int getStatusBarHeight(Context context) {
int result = 0;
try {
Class<?> c = Class.forName("com.android.internal.R$dimen");
Object o = c.newInstance();
Field field = c.getField("status_bar_height");
int x = (Integer)field.get(o);
result = context.getResources().getDimensionPixelSize(x);
} catch (Exception var6) {
var6.printStackTrace();
}
if (result == 0) {
result = dip2px(context, 25.0F);
}
return result;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.