text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
###############################################################################
#
# # `deploy-source.sh`
#
# This script will check the local `source` directory for any csv files,
# gzip them, and upload them to S3 for use with the build script.
#
###############################################################################
# load env vars
ENV_FILE=.env.local
if [ -f "$ENV_FILE" ]; then
echo "loading environment vars from $ENV_FILE"
export $(echo $(cat $ENV_FILE | sed 's/#.*//g' | sed 's/\r//g' | xargs) | envsubst)
fi
if [ ! -d "./source" ]
then
echo "source directory does not exist. create a source directory and place csv files in it."
exit 1
fi
# configure aws cli
if [[ -z "${AWS_ACCESS_ID}" ]]; then
printf '%s\n' "Missing AWS_ACCESS_ID environment variable, could not configure AWS CLI." >&2
exit 1
fi
if [[ -z "${AWS_SECRET_KEY}" ]]; then
printf '%s\n' "Missing AWS_SECRET_KEY environment variable, could not configure AWS CLI." >&2
exit 1
fi
aws configure set aws_access_key_id $AWS_ACCESS_ID
aws configure set aws_secret_access_key $AWS_SECRET_KEY
aws configure set default.region us-east-1
for f in ./source/*.csv
do
echo "Processing $f file..."
# take action on each file. $f store current file name
gzip "$f"
done
aws s3 cp ./source s3://$DATA_INPUT/ --recursive |
#!/bin/bash
set -e
export VAULT_ADDR="http://$VAULT_HOSTNAME.$DOMAIN:8200"
echo "Using Vault Token: $VAULT_TOKEN"
config_dir="/etc/consul.d"
mkdir -p "$config_dir/ca"
response=$(vault write pki/issue/cert -format=json \
common_name=$HOSTNAME.$DOMAIN \
alt_names="server.dc1.consul")
for (( i=0; i<$(echo "$response" | jq '.data.ca_chain | length'); i++ )); do
cert=$(echo "$response" | jq -r ".data.ca_chain[$i]")
name=$(echo "$cert" | openssl x509 -noout -subject -nameopt multiline | sed -n 's/ *commonName *= //p' | sed 's/\s//g')
echo "$cert" > "$config_dir/ca/$name.pem"
done
echo "$response" | jq -r .data.private_key > $config_dir/consul.key
echo "$response" | jq -r .data.certificate > $config_dir/consul.crt
echo "$response" | jq -r .data.issuing_ca >> $config_dir/consul.crt
(
cat <<-EOF
{
"bootstrap_expect": 3,
"client_addr": "0.0.0.0",
"data_dir": "/var/consul",
"leave_on_terminate": true,
"rejoin_after_leave": true,
"retry_join": ["consul1.$DOMAIN", "consul2.$DOMAIN", "consul3.$DOMAIN"],
"server": true,
"ui": true,
"encrypt": "oNMJiPZRlaP8RnQiQo9p8MMK5RSJ+dXA2u+GjFm1qx8=",
"verify_incoming_rpc": true,
"verify_incoming_https": false,
"verify_outgoing": true,
"verify_server_hostname": true,
"ca_path": "$config_dir/ca/",
"cert_file": "$config_dir/consul.crt",
"key_file": "$config_dir/consul.key",
"ports": {
"http": -1,
"https": 8501
}
}
EOF
) | sudo tee $config_dir/consul.json
(
cat <<-EOF
[Unit]
Description=consul agent
Requires=network-online.target
After=network-online.target
[Service]
Restart=on-failure
ExecStart=/usr/bin/consul agent -config-file=$config_dir/consul.json -bind '{{ GetInterfaceIP "eth0" }}'
ExecReload=/bin/kill -HUP $MAINPID
[Install]
WantedBy=multi-user.target
EOF
) | sudo tee /etc/systemd/system/consul.service
sudo systemctl enable consul.service
sudo systemctl restart consul
|
<?php
// configuration
$from = 'admin@example.org';
$nameFrom = 'Admin';
$to = 'recipient@example.org';
$subject = 'Email Subject';
$templateDirectory = __DIR__ . DIRECTORY_SEPARATOR . 'templates' . DIRECTORY_SEPARATOR;
$templatePath = $templateDirectory . 'email-template.html';
$message = file_get_contents($templatePath);
// Setup PHPMailer
require 'vendor/autoload.php';
$mail = new PHPMailer\PHPMailer\PHPMailer();
$mail->setFrom($from, $nameFrom);
$mail->addAddress($to);
$mail->Subject = $subject;
$mail->Body = $message;
$mail->isHTML(true);
// send email
if(!$mail->send()) {
echo 'Message could not be sent.';
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
else {
echo "Message has been sent";
} |
#!/bin/sh
#
# Script for starting/stopping LibreOffice without restarting Alfresco
#
# Copyright 2013-2016 Loftux AB, Peter Löfgren
# Distributed under the Creative Commons Attribution-ShareAlike 3.0 Unported License (CC BY-SA 3.0)
# -------
# JDK locations
export JAVA_HOME="/usr/lib/jvm/java-8-oracle"
export JRE_HOME=$JAVA_HOME/jre
# User under which tomcat will run
USER=alfresco
ALF_HOME=/opt/alfresco
cd "$ALF_HOME"
# export LC_ALL else openoffice may use en settings on dates etc
export LC_ALL=@@LOCALESUPPORT@@
export CATALINA_PID="${ALF_HOME}/tomcat.pid"
RETVAL=0
start() {
OFFICE_PORT=`ps ax|grep office|grep 8100|wc -l`
if [ $OFFICE_PORT -ne 0 ]; then
echo "Alfresco Open Office service already started"
CURRENT_PROCID=`ps axf|grep office|grep 8100|awk -F " " 'NR==1 {print $1}'`
echo $CURRENT_PROCID
else
#Only start if Alfresco is already running
SHUTDOWN_PORT=`netstat -vatn|grep LISTEN|grep 8005|wc -l`
export JAVA_HOME=$JAVA_HOME
if [ $SHUTDOWN_PORT -ne 0 ]; then
/bin/su -s /bin/bash $USER -c "/opt/libreoffice6.4/program/soffice.bin \"--accept=socket,host=localhost,port=8100;urp;StarOffice.ServiceManager\" \"-env:UserInstallation=file:///opt/alfresco/alf_data/oouser\" --nologo --headless --nofirststartwizard --norestore --nodefault &" >/dev/null
echo "Alfresco Open Office starting"
logger Alfresco Open Office service started
fi
fi
}
stop() {
# Start Tomcat in normal mode
OFFICE_PORT=`ps ax|grep office|grep 8100|wc -l`
if [ $OFFICE_PORT -ne 0 ]; then
echo "Alfresco Open Office started, killing"
CURRENT_PROCID=`ps axf|grep office|grep 8100|awk -F " " 'NR==1 {print $1}'`
echo $CURRENT_PROCID
kill $CURRENT_PROCID
logger Alfresco Open Office service stopped
fi
}
status() {
# Start Tomcat in normal mode
OFFICE_PORT=`ps ax|grep office|grep 8100|wc -l`
if [ $OFFICE_PORT -ne 0 ]; then
echo "Alfresco LibreOffice service started"
else
echo "Alfresco LibreOffice service NOT started"
fi
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
stop
sleep 2
start
;;
status)
status
;;
*)
echo "Usage: $0 {start|stop|restart|status}"
exit 1
esac
exit $RETVAL
|
#!/usr/bin/env bash
set -euo pipefail
IFS=$'\n\t'
export BUILDCONFIGURATION=Release
cd $BUILD_SOURCESDIRECTORY/build
# uncomment the following lines to override the installed Xamarin.Android SDK
# wget -nv https://jenkins.mono-project.com/view/Xamarin.Android/job/xamarin-android-d16-2/49/Azure/processDownloadRequest/xamarin-android/xamarin-android/bin/BuildRelease/Xamarin.Android.Sdk-OSS-9.4.0.59_d16-2_6d9b105.pkg
# sudo installer -verbose -pkg Xamarin.Android.Sdk-OSS-9.4.0.59_d16-2_6d9b105.pkg -target /
# Install AVD files
echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install 'system-images;android-28;google_apis;x86'
echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install "system-images;android-$ANDROID_SIMULATOR_APILEVEL;google_apis;x86"
# Create emulator
echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd -n xamarin_android_emulator -k "system-images;android-$ANDROID_SIMULATOR_APILEVEL;google_apis;x86" --sdcard 128M --force
echo $ANDROID_HOME/emulator/emulator -list-avds
echo "Starting emulator"
# Start emulator in background
nohup $ANDROID_HOME/emulator/emulator -avd xamarin_android_emulator -skin 1280x800 -memory 2048 -no-audio -no-snapshot -netfast -qemu > /dev/null 2>&1 &
export IsUiAutomationMappingEnabled=true
# build the tests, while the emulator is starting
msbuild /r /p:Configuration=$BUILDCONFIGURATION $BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.UITests/SamplesApp.UITests.csproj
# Wait for the emulator to finish booting
$BUILD_SOURCESDIRECTORY/build/android-uitest-wait-systemui.sh
$ANDROID_HOME/platform-tools/adb devices
echo "Emulator started"
if [ "$UITEST_SNAPSHOTS_ONLY" == 'true' ];
then
export TEST_FILTERS="namespace == 'SamplesApp.UITests.Snap'"
export SCREENSHOTS_FOLDERNAME=android-$ANDROID_SIMULATOR_APILEVEL-Snap
else
export TEST_FILTERS="namespace != 'SamplesApp.UITests.Snap'"
export SCREENSHOTS_FOLDERNAME=android-$ANDROID_SIMULATOR_APILEVEL
fi
export UNO_UITEST_SCREENSHOT_PATH=$BUILD_ARTIFACTSTAGINGDIRECTORY/screenshots/$SCREENSHOTS_FOLDERNAME
export UNO_UITEST_PLATFORM=Android
export UNO_UITEST_ANDROIDAPK_PATH=$BUILD_SOURCESDIRECTORY/build/uitests-android-build/android/uno.platform.unosampleapp-Signed.apk
cd $BUILD_SOURCESDIRECTORY/build
mono nuget/NuGet.exe install NUnit.ConsoleRunner -Version 3.10.0
mkdir -p $UNO_UITEST_SCREENSHOT_PATH
# Move to the screenshot directory so that the output path is the proper one, as
# required by Xamarin.UITest
cd $UNO_UITEST_SCREENSHOT_PATH
mono $BUILD_SOURCESDIRECTORY/build/NUnit.ConsoleRunner.3.10.0/tools/nunit3-console.exe \
--inprocess \
--agents=1 \
--workers=1 \
--result=$BUILD_SOURCESDIRECTORY/build/TestResult.xml \
--where "$TEST_FILTERS" \
$BUILD_SOURCESDIRECTORY/src/SamplesApp/SamplesApp.UITests/bin/$BUILDCONFIGURATION/net47/SamplesApp.UITests.dll \
|| true
$ANDROID_HOME/platform-tools/adb shell logcat -d > $BUILD_ARTIFACTSTAGINGDIRECTORY/screenshots/$SCREENSHOTS_FOLDERNAME/android-device-log.txt
cp $UNO_UITEST_ANDROIDAPK_PATH $BUILD_ARTIFACTSTAGINGDIRECTORY
|
<filename>modules/redis/example/deps.ts<gh_stars>10-100
export {
Application,
Context,
Controller,
Get,
isHttpError,
Module,
NestFactory,
send,
Status,
} from "../../../mod.ts";
|
function sha() { # usage: sha FILENAME.js
tempShaOutput=$(cat $1 | openssl dgst -sha384 -binary | openssl base64 -A)
echo sha384-$tempShaOutput
}
sha keyboard-focus-trap.js
|
#!/bin/bash
# Licensed to Systerel under one or more contributor license
# agreements. See the NOTICE file distributed with this work
# for additional information regarding copyright ownership.
# Systerel licenses this file to you under the Apache
# License, Version 2.0 (the "License"); you may not use this
# file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Script to generate data necessary to build the S2OPC project:
#
# Steps (if necessary):
# - generate sources files from B model
# - generate sources files for examples address space for tests
#
# This generation script uses the free AtelierB
BMODEL_DIR=bsrc
PREBUILD=pre-build
PROJET=s2opc_genc
LOGPATH="$(pwd)/pre-build.log"
# Redirect all output and errors to log file
echo "Pre-build log" > "$LOGPATH"
EXITCODE=0
echo "Prepare B project and tools configuration" | tee -a "$LOGPATH"
# Set a pre-build local environment for "m" script
export ATELIERB_VERSION="4.2.1"
export STARTBB=startBB
BASE="$(pwd)/$PREBUILD"
export BASE
# Create pre-build directory to check B modle and generate C code
mkdir -p "$BASE" >> "$LOGPATH"
# Configure "m" script environment
export liste_projet="$BASE/liste_projets.txt"
echo $PROJET . > "$liste_projet"
export TOOLING_DIR=/home/tooling
export TRAD_JAVA="java -jar $TOOLING_DIR/bin/trad/b2c.jar"
export ROOT="toolkit_header"
# Make symbolic link to all files in bsrc/
cd "$BASE" >> "$LOGPATH" || exit 1
find ../$BMODEL_DIR -maxdepth 1 -type f -exec ln -f -s {} . \; >> "$LOGPATH" || exit 1
cd - || exit 1
PATH="$TOOLING_DIR"/bin/m:"$TOOLING_DIR"/bin/trad:"$PATH"
echo "Generate C sources files from B model" | tee -a "$LOGPATH"
if ! make VERBOSE=1 -C "$BASE" >> "$LOGPATH";
then
echo "ERROR: generating C source files from B model" | tee -a "$LOGPATH"
EXITCODE=1
fi
# Remove pre-build directory in any case
rm -rf ./$PREBUILD
if [[ $EXITCODE -eq 0 ]]; then
echo "Completed with SUCCESS" | tee -a "$LOGPATH"
else
echo "Completed with ERRORS" | tee -a "$LOGPATH"
fi
exit $EXITCODE
|
<gh_stars>0
export interface TxOutModel {
recipient: string;
value: number;
} |
#!/bin/bash
export SCRIPTHOME=`pwd`
. ./debian-env.sh
function createChangelog() {
# Debian tooling changelog hints:
# - signature line MUST have one whitespace prefix
# - signature line MUST have double space between email and timestamp
# - traling lines must have exactly one space
export MYTS=`date "+%a, %d %b %Y %H:%M:%S"`
echo "Changelog timestamp: ${MYTS}"
echo -e "hstr (${HHFULLVERSION}) ${UBUNTUVERSION}; urgency=low" > $1
echo " " >> $1
echo -e " * ${HHBZRMSG}" >> $1
echo " " >> $1
echo " -- Martin Dvorak (Dvorka) <martin.dvorak@mindforger.com> ${MYTS} +0100" >> $1
}
function createTarball() {
cd ..
mkdir work
cd work
cp -vrf ../${HH} .
rm -rvf ${HH}/.bzr
tar zcf ../${HH}.tgz ${HH}
cp -vf ../${HH}.tgz ../${HH}.orig.tar.gz
cd ../${HH}
}
echo -e "\n_ HSTR deb build _______________________________________________\n"
rm -rvf ../debian
cp -rvf ${HHSRC}/debian ..
createChangelog ../debian/changelog
cp -vf debian/rules ../debian/rules
cp -vf debian/hstr.dirs ../debian/hstr.dirs
cp -vf debian/watch ../debian/watch
# cleanup
rm -vrf ../dist ../bin ../doc ../pad.xml
cd ../..
mv hstr ${HH}
cd ${HH}
createTarball
debuild -us -uc
debuild -S
# eof
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_circle_notifications = void 0;
var ic_circle_notifications = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 16.5c-.83 0-1.5-.67-1.5-1.5h3c0 .83-.67 1.5-1.5 1.5zm5-2.5H7v-1l1-1v-2.61C8 9.27 9.03 7.47 11 7v-.5c0-.57.43-1 1-1s1 .43 1 1V7c1.97.47 3 2.28 3 4.39V14l1 1v1z"
},
"children": []
}]
};
exports.ic_circle_notifications = ic_circle_notifications; |
var _test_dynamic_backend_8cpp =
[
[ "BackendFactory", "_test_dynamic_backend_8cpp.xhtml#a6a075b7c32d5511f95903749eef44b22", null ],
[ "GetBackendId", "_test_dynamic_backend_8cpp.xhtml#adaff295134ed2825ae43a8e9281b6f2a", null ],
[ "GetVersion", "_test_dynamic_backend_8cpp.xhtml#aa8f09f94b0356f870c9bdb9c594cddfc", null ],
[ "TestDynamicBackendId", "_test_dynamic_backend_8cpp.xhtml#aec75c1d78333f881f2516f55a0ed00df", null ]
]; |
<filename>externals/cmsis/CMSIS_5/docs/Driver/html/group__nand__interface__gr_structARM__NAND__CAPABILITIES.js
var group__nand__interface__gr_structARM__NAND__CAPABILITIES =
[
[ "event_device_ready", "group__nand__interface__gr.html#a5f347e9b63764bbb657f52dc20682128", null ],
[ "reentrant_operation", "group__nand__interface__gr.html#ae0514834750c7452431717a881471e2b", null ],
[ "sequence_operation", "group__nand__interface__gr.html#afa4b798731b1154878c26dda3f090acf", null ],
[ "vcc", "group__nand__interface__gr.html#a35cfa22b2140b109fe24b97c42d5a5ed", null ],
[ "vcc_1v8", "group__nand__interface__gr.html#a0e7d3b9258d468492b22de55d855a06e", null ],
[ "vccq", "group__nand__interface__gr.html#ab1cdfce6eb051bed7b904e0fd1719afa", null ],
[ "vccq_1v8", "group__nand__interface__gr.html#a1896a7548bb6fab285f23cc0d0b23d7d", null ],
[ "vpp", "group__nand__interface__gr.html#a75b97f7c917bba90b2f5c747d6857d23", null ],
[ "wp", "group__nand__interface__gr.html#afe7f5b149b8d92859398315b1ad31ddc", null ],
[ "ce_lines", "group__nand__interface__gr.html#ad5dd0fcdd7f6d5e5cd739f73323a2b11", null ],
[ "ce_manual", "group__nand__interface__gr.html#a2b8044d986995b183b057217643466bf", null ],
[ "rb_monitor", "group__nand__interface__gr.html#a69f5e734ee4a9bb501718cf78a740c3e", null ],
[ "data_width_16", "group__nand__interface__gr.html#a0f22baea13daa9101bf6fc1fdfddc747", null ],
[ "ddr", "group__nand__interface__gr.html#aa9acfde38637fe749aa9271c0a8dae1a", null ],
[ "ddr2", "group__nand__interface__gr.html#ae086693990cbd5d628014c0fcc7c1f2c", null ],
[ "sdr_timing_mode", "group__nand__interface__gr.html#a21036f2047273d90c0af0e97031df5a9", null ],
[ "ddr_timing_mode", "group__nand__interface__gr.html#a00c1f5db7d7c4abe7556733c36da7783", null ],
[ "ddr2_timing_mode", "group__nand__interface__gr.html#a6d9b66da0e56d04d545e0bb6841891b2", null ],
[ "driver_strength_18", "group__nand__interface__gr.html#ae672b2a65dd3d0b93812c088491c4552", null ],
[ "driver_strength_25", "group__nand__interface__gr.html#ae87c19872b838dac7d3136a3fd466f6a", null ],
[ "driver_strength_50", "group__nand__interface__gr.html#aef3d6e1522a6cf7fb87fd113dcd43ad5", null ],
[ "reserved", "group__nand__interface__gr.html#aa43c4c21b173ada1b6b7568956f0d650", null ]
]; |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# don't add licenses to the site directory, it will break the docs
# and will add them to the theme which is a submodule (bad)
which addlicense || go get github.com/google/addlicense
ls | grep -v site | grep -v docs | grep -v e2e | xargs $GOBIN/addlicense -y 2021 -l apache
# process e2e separately because expected result files shouldn't have license
# otherwise the test will fail
find e2e -type f | grep -v '.expected/results.yaml' | xargs $GOBIN/addlicense -y 2021 -l apache
|
#sidebar {
float: left;
width: 20%;
background-color: #eee;
}
#sidebar ul {
margin: 0;
padding: 0;
list-style-type: none;
}
#sidebar li a {
display: block;
padding: 10px;
background-color: #ccc;
text-decoration: none;
color: #222;
}
#sidebar li a:hover {
background-color: #555;
color: #fff;
} |
package libs.trustconnector.scdp.smartcard.application.bl;
import libs.trustconnector.scdp.smartcard.application.*;
import libs.trustconnector.scdp.smartcard.*;
import libs.trustconnector.scdp.util.*;
import libs.trustconnector.scdp.smartcard.SmartCardReader;
import libs.trustconnector.scdp.smartcard.application.Application;
import libs.trustconnector.scdp.util.ByteArray;
public class BootLoader extends Application
{
int curBank;
public static final byte HAL_PARAM_LOCK = 0;
public static final byte HAL_PARAM_T0_ATR = 1;
public static final byte HAL_PARAM_SWP_TYPEA_UID = 2;
public static final byte HAL_PARAM_SWP_TYPEA_SAK = 3;
public static final byte HAL_PARAM_SWP_TYPEA_ATQA = 4;
public static final byte HAL_PARAM_SWP_TYPEA_ATS_HIS = 5;
public static final byte HAL_PARAM_SWP_TYPEA_FWI_SFGI = 6;
public static final byte HAL_PARAM_SWP_TYPEA_CID = 7;
public static final byte HAL_PARAM_SWP_TYPEA_DATARATE = 8;
public static final byte HAL_PARAM_SWP_TYPEB_PUPI = 9;
public static final byte HAL_PARAM_SWP_TYPEB_AFI = 10;
public static final byte HAL_PARAM_SWP_TYPEB_ATQB = 11;
public static final byte HAL_PARAM_SWP_TYPEB_HIGH_LAYER_RSP = 12;
public static final byte HAL_PARAM_SWP_TYPEB_DATARATE = 13;
public static final byte HAL_PARAM_TCLA_UID = 14;
public static final byte HAL_PARAM_TCLA_SAK = 15;
public static final byte HAL_PARAM_TCLA_ATQA = 16;
public static final byte HAL_PARAM_TCLA_ATS = 17;
public static final byte HAL_PARAM_TCLB_PUPI = 18;
public static final byte HAL_PARAM_TCLB_ATQB = 19;
public static final byte HAL_PARAM_CHIP_TYPE = 20;
public static final byte HAL_PARAM_CHIP_ID = 21;
public static final byte HAL_PARAM_CHIP_EXT_START = Byte.MIN_VALUE;
public static final byte HAL_PARAM_CHIP_EXT_VER = Byte.MIN_VALUE;
public static final byte HAL_PARAM_CHIP_EXT_FC = -127;
public static final byte HAL_PARAM_CHIP_EXT_RZM = -126;
public static final byte HAL_PARAM_CHIP_EXT_CLK = -125;
public BootLoader(final SmartCardReader reader) {
super(reader, null);
this.curBank = -1;
}
public boolean enterBL(final byte[] rsKey) {
this.reader.reset();
this.apdu.setCAPDU("Enter BL", "00F60000083232323232323232");
if (rsKey != null) {
this.apdu.setCData(rsKey);
}
this.reader.transmit(this.apdu);
return this.apdu.getSW() == 36864;
}
public boolean setBank(final int bankID) {
this.apdu.setCAPDU("Set Bank " + String.format("0x%02X", bankID), "00F1000001FF");
final byte[] bank = { (byte)bankID };
this.apdu.setCData(bank);
this.reader.transmit(this.apdu);
if (this.apdu.getSW() == 36864) {
this.curBank = bankID;
return true;
}
return false;
}
public byte[] readFlash(int offset, int length) {
final ByteArray rsp = new ByteArray();
while (length > 0) {
final int b = offset >> 16 & 0xFF;
if (b != this.curBank && !this.setBank(offset >> 16 & 0xFF)) {
return null;
}
final int readLen = (length > 256) ? 256 : length;
this.apdu.setCAPDU("Read Flash Offset=" + String.format("0x%04X", offset), "00F3000000");
this.apdu.setP1(offset >> 8);
this.apdu.setP2(offset & 0xFF);
this.apdu.setP3(readLen);
this.reader.transmit(this.apdu);
offset += readLen;
length -= readLen;
rsp.append(this.apdu.getRData());
}
return rsp.toBytes();
}
public boolean writeFlash(int offset, final byte[] content) {
int off = 0;
int leftLen = content.length;
while (leftLen > 0) {
final int b = offset >> 16 & 0xFF;
if (b != this.curBank && !this.setBank(offset >> 16 & 0xFF)) {
return false;
}
final int updateLen = (leftLen > 128) ? 128 : leftLen;
this.apdu.setCAPDU("Write Flash Offset=" + String.format("0x%04X", offset), "00F7000000");
this.apdu.setP1(offset >> 8);
this.apdu.setP2(offset & 0xFF);
this.apdu.setCData(content, off, updateLen);
this.reader.transmit(this.apdu);
leftLen -= updateLen;
off += updateLen;
offset += updateLen;
if (this.apdu.getSW() != 36864) {
return false;
}
}
return true;
}
public boolean eraseFlash(final int offset, final int pageCount) {
final int b = offset >> 16 & 0xFF;
if (b != this.curBank && !this.setBank(offset >> 16 & 0xFF)) {
return false;
}
this.apdu.setCAPDU("Erase Flash Offset=" + String.format("0x%04X", offset), "00F5000000");
this.apdu.setP1(offset >> 8);
this.apdu.setP2(offset & 0xFF);
final byte[] bank = { (byte)pageCount };
this.apdu.setCData(bank);
this.reader.transmit(this.apdu);
return this.apdu.getSW() == 36864;
}
byte[] getParam(final int paramType) {
this.apdu.setCAPDU("Get Param", "0030000000");
this.apdu.setP1(paramType);
this.reader.transmit(this.apdu);
if (this.apdu.getSW() == 36864) {
return this.apdu.getRData();
}
return null;
}
boolean setParam(final int paramType, final byte[] newParam) {
this.apdu.setCAPDU("Set Param", "8030000000");
this.apdu.setP1(paramType);
this.apdu.setCData(newParam);
this.reader.transmit(this.apdu);
return this.apdu.getSW() == 36864;
}
}
|
function mysqlexec {
mysql -u root -p -e "$1";
}
function mysql-create-db {
mysqlexec "create database \`$1\` CHARACTER SET utf8;";
}
function mysql-drop-db {
mysqlexec "drop database if exists \`$1\`;";
}
function mysql-clear-db {
mysqlexec "drop database if exists \`$1\`; create database \`$1\` CHARACTER SET utf8;"
} |
/*==================================================================*\
| EXIP - Embeddable EXI Processor in C |
|--------------------------------------------------------------------|
| This work is licensed under BSD 3-Clause License |
| The full license terms and conditions are located in LICENSE.txt |
\===================================================================*/
/**
* @file check_streamIO.c
* @brief Tests the interface to the EXI stream reader/decoder
*
* @date Aug 18, 2010
* @author <NAME>
* @author <NAME>
* @version 0.5
* @par[Revision] $Id$
*/
#include <stdlib.h>
#include <check.h>
#include "streamRead.h"
#include "streamWrite.h"
#include "streamDecode.h"
#include "streamEncode.h"
#include "procTypes.h"
#include "errorHandle.h"
#include "stringManipulate.h"
#include "memManagement.h"
#include "ioUtil.h"
/* BEGIN: streamRead tests */
START_TEST (test_readNextBit)
{
EXIStream testStream;
char buf[2];
boolean bit_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = readNextBit(&testStream, &bit_val);
fail_unless (bit_val == 1,
"The bit 1 from the stream is read as 0");
fail_unless (err == EXIP_OK,
"readNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The readNextBit function did not move the bit Pointer of the stream correctly");
// Set the bit pointer to the first byte boundary
testStream.context.bitPointer = 7;
err = readNextBit(&testStream, &bit_val);
fail_unless (bit_val == 0,
"The bit 0 from the stream is read as 1");
fail_unless (err == EXIP_OK,
"readNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0 && testStream.context.bufferIndx == 1,
"The readNextBit function did not move the bit Pointer of the stream correctly");
// Set the bit pointer to the second byte boundary
testStream.context.bitPointer = 7;
err = readNextBit(&testStream, &bit_val);
fail_unless (err == EXIP_OK,
"readNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0 && testStream.context.bufferIndx == 2,
"The readNextBit function did not move the bit Pointer of the stream correctly");
err = readNextBit(&testStream, &bit_val);
fail_unless (err == EXIP_BUFFER_END_REACHED, "Incorrect error code");
}
END_TEST
START_TEST (test_readBits)
{
EXIStream testStream;
char buf[2];
unsigned int bits_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = readBits(&testStream, 4, &bits_val);
fail_unless (bits_val == 13,
"The bits 1101 from the stream are read as %d", bits_val);
fail_unless (err == EXIP_OK,
"readBits returns error code %d", err);
fail_unless (testStream.context.bitPointer == 4,
"The readBits function did not move the bit Pointer of the stream correctly");
// Set the bit pointer to the first byte boundary
testStream.context.bitPointer = 7;
err = readBits(&testStream, 5, &bits_val);
fail_unless (bits_val == 6,
"The bits 00110 from the stream are read as %d", bits_val);
fail_unless (err == EXIP_OK,
"readNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 4 && testStream.context.bufferIndx == 1,
"The readBits function did not move the bit Pointer of the stream correctly");
err = readBits(&testStream, 5, &bits_val);
fail_unless (err == EXIP_BUFFER_END_REACHED, "Incorrect error code");
}
END_TEST
/* END: streamRead tests */
/* BEGIN: streamWrite tests */
START_TEST (test_writeNextBit)
{
EXIStream testStream;
char buf[2];
errorCode err = EXIP_UNEXPECTED_ERROR;
int test;
testStream.context.bitPointer = 0;
buf[0] = (char) 0x01; /* 0b00000001 */
buf[1] = (char) 0x00; /* 0b00000000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = writeNextBit(&testStream, 1);
test = (buf[0] & 0x80 /* 0b10000000 */ ) != 0;
fail_unless (test == 1,
"The bit 1 was written as 0");
fail_unless (err == EXIP_OK,
"writeNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The writeNextBit function did not move the bit Pointer of the stream correctly");
// Set the bit pointer to the first byte boundary
testStream.context.bitPointer = 7;
err = writeNextBit(&testStream, 0);
test = (buf[0] & 0x01) != 0;
fail_unless (test == 0,
"The bit 0 was written as 1");
fail_unless (err == EXIP_OK,
"writeNextBit returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0 && testStream.context.bufferIndx == 1,
"The writeNextBit function did not move the bit Pointer of the stream correctly");
testStream.context.bufferIndx = 2;
testStream.context.bitPointer = 0;
err = writeNextBit(&testStream, 0);
fail_unless (err == EXIP_BUFFER_END_REACHED, "Incorrect error code");
}
END_TEST
START_TEST (test_writeNBits)
{
EXIStream testStream;
char buf[2];
errorCode err = EXIP_UNEXPECTED_ERROR;
int test, test1;
testStream.context.bitPointer = 0;
buf[0] = (char) 0xA0; /* 0b10100000 */
buf[1] = (char) 0xE0; /* 0b11100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.buffer.bufContent = 2;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = writeNBits(&testStream, 7, 19);
test = ((unsigned int) buf[0]) >> 1;
fail_unless (test == 19,
"The number 19 was written as %d", test);
fail_unless (err == EXIP_OK,
"writeNBits returns error code %d", err);
fail_unless (testStream.context.bitPointer == 7,
"The writeNBits function did not move the bit Pointer of the stream correctly");
// Set the bit pointer to the first byte boundary
testStream.context.bitPointer = 7;
err = writeNBits(&testStream, 5, 9);
test = (buf[0] & 0x01 ) != 0;
test1 = (buf[1] & 0xF0 /* 0b11110000 */ ) >> 4;
fail_unless (test == 0 && test1 == 9,
"writeNBits function doesn't write correctly");
fail_unless (err == EXIP_OK,
"writeNBits returns error code %d", err);
fail_unless (testStream.context.bitPointer == 4 && testStream.context.bufferIndx == 1,
"The writeNBits function did not move the bit Pointer of the stream correctly");
err = writeNBits(&testStream, 5, 16);
fail_unless (err == EXIP_BUFFER_END_REACHED, "Incorrect error code");
}
END_TEST
/* END: streamWrite tests */
/* BEGIN: streamDecode tests */
START_TEST (test_decodeNBitUnsignedInteger)
{
EXIStream testStream;
char buf[2];
unsigned int bit_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = decodeNBitUnsignedInteger(&testStream, 6, &bit_val);
fail_unless (bit_val == 53,
"The 110101 from the stream is read as %d", bit_val);
fail_unless (err == EXIP_OK,
"decodeNBitUnsignedInteger returns error code %d", err);
fail_unless (testStream.context.bitPointer == 6,
"The decodeNBitUnsignedInteger function did not move the bit Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeBoolean)
{
EXIStream testStream;
char buf[2];
boolean bit_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = decodeBoolean(&testStream, &bit_val);
fail_unless (bit_val == 1,
"The the bit 1 from the stream is read as %d", bit_val);
fail_unless (err == EXIP_OK,
"decodeBoolean returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The decodeBoolean function did not move the bit Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeUnsignedInteger)
{
EXIStream testStream;
char buf[3];
UnsignedInteger bit_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
buf[2] = (char) 0x48; /* 0b01001000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 3;
testStream.buffer.bufContent = 3;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = decodeUnsignedInteger(&testStream, &bit_val);
fail_unless (bit_val == 12372,
"The UnsignedInteger 12372 from the stream is read as %d", bit_val);
fail_unless (err == EXIP_OK,
"decodeUnsignedInteger returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0,
"The decodeUnsignedInteger function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The decodeUnsignedInteger function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeString)
{
EXIStream testStream;
char buf[4];
String bit_val;
CharType cht[100];
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x02; /* 0b00000010 */
buf[1] = (char) 0x65; /* 0b01100101 */ // e - ASCII
buf[2] = (char) 0x54; /* 0b01010100 */ // T - ASCII
buf[3] = (char) 0x52; /* 0b01010010 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 4;
testStream.buffer.bufContent = 4;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
bit_val.length = 0;
bit_val.str = cht;
err = decodeString(&testStream, &bit_val);
fail_unless (bit_val.length == 2,
"The String length of 2 is reported as %d from decodeString", bit_val.length);
fail_unless (bit_val.str[0] == 'e' && bit_val.str[1] == 'T',
"The String \"eT\" is decoded wrong by decodeString");
fail_unless (err == EXIP_OK,
"decodeString returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0,
"The decodeString function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 3,
"The decodeString function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeBinary)
{
EXIStream testStream;
char buf[20];
char testbuf[20];
int i;
char* res;
Index bytes = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
int same=1;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x05; /* 0b00000101 */ //5
buf[1] = (char) 0xF0; /* 0b11110000 */
buf[2] = (char) 0xCC; /* 0b11001100 */
buf[3] = (char) 0xAA; /* 0b10101010 */
buf[4] = (char) 0x55; /* 0b01010101 */
buf[5] = (char) 0x33; /* 0b00110011 */
buf[6] = (char) 0x08; /* 0b00001000 */ //8
buf[7] = (char) 0x6E; /* 0b01101110 */
buf[8] = (char) 0xCA; /* 0b11001010 */
buf[9] = (char) 0x59; /* 0b01011001 */
buf[10] = (char) 0xD8; /* 0b11011000 */
buf[11] = (char) 0x59; /* 0b01011001 */
buf[12] = (char) 0xCA; /* 0b11001010 */
buf[13] = (char) 0x6C; /* 0b01101100 */
buf[14] = (char) 0xD8; /* 0b11011000 */
buf[15] = (char) 0x07; /* 0b00000111 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 20;
testStream.buffer.bufContent = 20;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
initAllocList(&testStream.memList);
for(i=0;i<20;i++) testbuf[i]=buf[i];
testStream.context.bufferIndx = 0;
//Test1:
err = decodeBinary(&testStream, &res, &bytes);
for(i=0;i<5;i++)
{
if(res[i]!=testbuf[i+1])
{
same=0;
break;
}
}
fail_unless (err == EXIP_OK,
"decodeBinary returns error code %d", err);
fail_unless (bytes == 5,
"The length of the binary content is read as %d (actual : %d)", bytes,5);
fail_unless (same == 1,
"The binary content is read wrongly");
fail_unless (testStream.context.bitPointer == 0,
"The decodeBinary function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 6,
"The decodeBinary function did not move the byte Pointer of the stream correctly");
//Test2:
bytes=0;
err = EXIP_UNEXPECTED_ERROR;
err = decodeBinary(&testStream, &res, &bytes);
same = 1;
for(i=0;i<8;i++)
{
if(res[i]!=testbuf[i+7])
{
same=0;
break;
}
}
fail_unless (err == EXIP_OK,
"decodeBinary returns error code %d", err);
fail_unless (bytes == 8,
"The length of the binary content is read as %d (actual : %d)", bytes,8);
fail_unless (same == 1,
"The binary content is read wrongly");
fail_unless (testStream.context.bitPointer == 0,
"The decodeBinary function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 15,
"The decodeBinary function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeFloat)
{
EXIStream testStream;
char buf[3];
Float fl_val;
double expected_res = 500; // 5 x 10^2
errorCode err = EXIP_UNEXPECTED_ERROR;
double actual_res = 0;
makeDefaultOpts(&testStream.header.opts);
fl_val.exponent = 0;
fl_val.mantissa = 0;
buf[0] = (char) 0x02; /* 0b00000010 */
buf[1] = (char) 0x80; /* 0b10000000 */
buf[2] = (char) 0x92; /* 0b10010010 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 3;
testStream.buffer.bufContent = 3;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
initAllocList(&testStream.memList);
err = decodeFloatValue(&testStream, &fl_val);
actual_res = fl_val.mantissa;
while(fl_val.exponent)
{
if(fl_val.exponent > 0)
{
fl_val.exponent--;
}
else
{
fl_val.exponent++;
}
actual_res *= 10;
}
fail_unless (err == EXIP_OK,
"decodeFloat returns error code %d", err);
fail_unless (actual_res == expected_res,
"The float value is read as %f (actual : %f)", actual_res, expected_res);
fail_unless (testStream.context.bitPointer == 2,
"The decodeBinary function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The decodeBinary function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeIntegerValue)
{
EXIStream testStream;
char buf[3];
Integer bit_val = 0;
errorCode err = EXIP_UNEXPECTED_ERROR;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x94; /* 0b10010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
buf[2] = (char) 0x48; /* 0b01001000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 3;
testStream.buffer.bufContent = 3;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = decodeIntegerValue(&testStream, &bit_val);
fail_unless (bit_val == -41,
"The IntegerValue -41 from the stream is read as %d", bit_val);
fail_unless (err == EXIP_OK,
"decodeIntegerValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The decodeIntegerValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 1,
"The decodeIntegerValue function did not move the byte Pointer of the stream correctly");
buf[0] = (char) 0x14; /* 0b00010100 */
testStream.context.bitPointer = 0;
testStream.context.bufferIndx = 0;
err = decodeIntegerValue(&testStream, &bit_val);
fail_unless (bit_val == 40,
"The IntegerValue 40 from the stream is read as %d", bit_val);
fail_unless (err == EXIP_OK,
"decodeIntegerValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The decodeIntegerValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 1,
"The decodeIntegerValue function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_decodeDecimalValue)
{
EXIStream testStream;
char buf[3];
errorCode err = EXIP_UNEXPECTED_ERROR;
Decimal dec_val;
Decimal res;
dec_val.mantissa = 0;
dec_val.exponent = 0;
res.mantissa = 5001;
res.exponent = -3;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x02; /* 0b00000010 */
buf[1] = (char) 0xB2; /* 0b10110010 */
buf[2] = (char) 0x12; /* 0b00010010 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 3;
testStream.buffer.bufContent = 3;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
initAllocList(&testStream.memList);
err = decodeDecimalValue(&testStream, &dec_val);
fail_unless (res.mantissa == dec_val.mantissa && res.exponent == dec_val.exponent, "The value 5.001 is decoded as %d*10^%d", dec_val.mantissa, dec_val.exponent);
fail_unless (err == EXIP_OK,
"decodeDecimalValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The decodeIntegerValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The decodeIntegerValue function did not move the byte Pointer of the stream correctly");
}
END_TEST
/* END: streamDecode tests */
/* BEGIN: streamEncode tests */
START_TEST (test_encodeNBitUnsignedInteger)
{
EXIStream testStream;
char buf[2];
errorCode err = EXIP_UNEXPECTED_ERROR;
unsigned char test, test2;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0xCE; /* 0b11001110 */
buf[1] = (char) 0xE0; /* 0b11100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = encodeNBitUnsignedInteger(&testStream, 9, 412);
test = buf[0] | 0;
test2 = (unsigned char) buf[1] >> 7;
fail_unless (err == EXIP_OK,
"encodeNBitUnsignedInteger returns error code %d", err);
fail_unless (test == 206 && test2 == 0,
"encodeNBitUnsignedInteger does not encode correctly");
fail_unless (testStream.context.bitPointer == 1 && testStream.context.bufferIndx == 1,
"The encodeNBitUnsignedInteger function did not move the bit Pointer of the stream correctly");
}
END_TEST
START_TEST (test_encodeBoolean)
{
EXIStream testStream;
char buf[2];
errorCode err = EXIP_UNEXPECTED_ERROR;
unsigned char bit_val = 0;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x54; /* 0b01010100 */
buf[1] = (char) 0x60; /* 0b01100000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 2;
testStream.buffer.bufContent = 2;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = encodeBoolean(&testStream, 1);
bit_val = (unsigned char) buf[0] >> 7;
fail_unless (err == EXIP_OK,
"encodeBoolean returns error code %d", err);
fail_unless (bit_val == 1,
"encodeBoolean does not write correctly");
fail_unless (testStream.context.bitPointer == 1,
"The encodeBoolean function did not move the bit Pointer of the stream correctly");
err = encodeBoolean(&testStream, 0);
bit_val = (unsigned char) buf[0] >> 6;
fail_unless (err == EXIP_OK,
"encodeBoolean returns error code %d", err);
fail_unless (bit_val == 2,
"encodeBoolean does not write correctly");
fail_unless (testStream.context.bitPointer == 2,
"The encodeBoolean function did not move the bit Pointer of the stream correctly");
}
END_TEST
START_TEST (test_encodeUnsignedInteger)
{
EXIStream testStream;
char buf[3];
errorCode err = EXIP_UNEXPECTED_ERROR;
unsigned char test1, test2;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0xD4; /* 0b11010100 */
buf[1] = (char) 0x00; /* 0b00000000 */
buf[2] = (char) 0x00; /* 0b00000000 */
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 3;
testStream.buffer.bufContent = 3;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
initAllocList(&testStream.memList);
err = encodeUnsignedInteger(&testStream, 421);
test1 = (unsigned char) buf[0];
test2 = (unsigned char) buf[1];
fail_unless (err == EXIP_OK,
"encodeUnsignedInteger returns error code %d", err);
fail_unless (test1 == 165 && test2 == 3,
"The encodeUnsignedInteger function doesn't work correctly");
fail_unless (testStream.context.bitPointer == 0,
"The encodeUnsignedInteger function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The encodeUnsignedInteger function did not move the byte Pointer of the stream correctly");
buf[0] = (char) 0x10; /* 0b00010000 */
buf[1] = (char) 0x00; /* 0b00000000 */
buf[2] = (char) 0x00; /* 0b00000000 */
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
err = EXIP_UNEXPECTED_ERROR;
err = encodeUnsignedInteger(&testStream, 0);
test1 = (unsigned char) buf[0];
test2 = (unsigned char) buf[1];
fail_unless (err == EXIP_OK,
"encodeUnsignedInteger returns error code %d", err);
fail_unless (test1 == 0 && test2 == 0,
"The encodeUnsignedInteger function doesn't work correctly");
fail_unless (testStream.context.bitPointer == 0,
"The encodeUnsignedInteger function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 1,
"The encodeUnsignedInteger function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_encodeString)
{
EXIStream testStream;
char buf[50];
String testStr;
errorCode err = EXIP_UNEXPECTED_ERROR;
unsigned char str_len;
testStream.context.bitPointer = 0;
makeDefaultOpts(&testStream.header.opts);
buf[0] = (char) 0x02; /* 0b00000010 */
buf[1] = (char) 0x65; /* 0b01100101 */
buf[2] = (char) 0x64; /* 0b01010100 */
buf[3] = (char) 0x62; /* 0b01010010 */
initAllocList(&testStream.memList);
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 50;
testStream.buffer.bufContent = 50;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
asciiToString("TEST encodeString()", &testStr, &testStream.memList, FALSE);
err = encodeString(&testStream, &testStr);
str_len = buf[0];
fail_unless (err == EXIP_OK,
"encodeString returns error code %d", err);
fail_unless (str_len == 19,
"The String length is not encoded correctly");
fail_unless (buf[1] == 'T' && buf[2] == 'E',
"encodeString doesn't encode correctly");
fail_unless (testStream.context.bitPointer == 0,
"The encodeString function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 20,
"The encodeString function did not move the byte Pointer of the stream correctly");
}
END_TEST
START_TEST (test_encodeBinary)
{
EXIStream testStream;
char buf[50];
char bin_data[50];
errorCode err = EXIP_UNEXPECTED_ERROR;
makeDefaultOpts(&testStream.header.opts);
bin_data[0] = (char) 0x22; /* 0b00100010 */
bin_data[1] = (char) 0x65; /* 0b01100101 */
bin_data[2] = (char) 0xD4; /* 0b11010100 */
bin_data[3] = (char) 0x5A; /* 0b01011010 */
bin_data[4] = (char) 0xD7; /* 0b11010111 */
initAllocList(&testStream.memList);
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 50;
testStream.buffer.bufContent = 50;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
err = encodeBinary(&testStream, bin_data, 5);
fail_unless (err == EXIP_OK,
"encodeBinary returns error code %d", err);
fail_unless (testStream.context.bitPointer == 0,
"The encodeBinary function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 6,
"The encodeBinary function did not move the byte Pointer of the stream correctly");
fail_unless(testStream.buffer.buf[0] == 5, "Incorrect encoding during encodeBinary 1");
fail_unless(testStream.buffer.buf[1] == (signed char) 0x22, "Incorrect encoding during encodeBinary 2");
fail_unless(testStream.buffer.buf[2] == (signed char) 0x65, "Incorrect encoding during encodeBinary 3");
fail_unless(testStream.buffer.buf[3] == (signed char) 0xD4, "Incorrect encoding during encodeBinary 4");
fail_unless(testStream.buffer.buf[4] == (signed char) 0x5A, "Incorrect encoding during encodeBinary 5");
fail_unless(testStream.buffer.buf[5] == (signed char) 0xD7, "Incorrect encoding during encodeBinary 6");
}
END_TEST
START_TEST (test_encodeFloatValue)
{
EXIStream testStream;
char buf[10];
Float test_val;
Float test_dec;
errorCode err = EXIP_UNEXPECTED_ERROR;
// 5 x 10^2
test_val.mantissa = 5;
test_val.exponent = 2;
makeDefaultOpts(&testStream.header.opts);
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 10;
testStream.buffer.bufContent = 10;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
initAllocList(&testStream.memList);
err = encodeFloatValue(&testStream, test_val);
fail_unless (err == EXIP_OK,
"encodeFloatValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 2,
"The encodeFloatValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The encodeFloatValue function did not move the byte Pointer of the stream correctly");
testStream.context.bitPointer = 0;
testStream.context.bufferIndx = 0;
test_dec.mantissa = 0;
test_dec.exponent = 0;
err = decodeFloatValue(&testStream, &test_dec);
fail_unless(test_val.exponent == test_dec.exponent && test_val.mantissa == test_dec.mantissa
, "Incorrect encoding of float value");
}
END_TEST
START_TEST (test_encodeIntegerValue)
{
EXIStream testStream;
char buf[5];
errorCode err = EXIP_UNEXPECTED_ERROR;
Integer test_dec = 0;
makeDefaultOpts(&testStream.header.opts);
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 5;
testStream.buffer.bufContent = 5;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
initAllocList(&testStream.memList);
err = encodeIntegerValue(&testStream, -913);
fail_unless (err == EXIP_OK,
"encodeIntegerValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The encodeIntegerValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The encodeIntegerValue function did not move the byte Pointer of the stream correctly");
testStream.context.bitPointer = 0;
testStream.context.bufferIndx = 0;
err = decodeIntegerValue(&testStream, &test_dec);
fail_unless (test_dec == -913,
"The encodeIntegerValue encodes correctly");
}
END_TEST
START_TEST (test_encodeDecimalValue)
{
EXIStream testStream;
char buf[30];
errorCode err = EXIP_UNEXPECTED_ERROR;
Decimal dec_val;
Decimal res;
dec_val.mantissa = 0;
dec_val.exponent = 0;
res.mantissa = 5001;
res.exponent = -3;
makeDefaultOpts(&testStream.header.opts);
testStream.buffer.buf = buf;
testStream.buffer.bufLen = 30;
testStream.buffer.bufContent = 30;
testStream.buffer.ioStrm.readWriteToStream = NULL;
testStream.buffer.ioStrm.stream = NULL;
testStream.context.bufferIndx = 0;
testStream.context.bitPointer = 0;
initAllocList(&testStream.memList);
err = encodeDecimalValue(&testStream, res);
fail_unless (err == EXIP_OK,
"encodeDecimalValue returns error code %d", err);
fail_unless (testStream.context.bitPointer == 1,
"The encodeDecimalValue function did not move the bit Pointer of the stream correctly");
fail_unless (testStream.context.bufferIndx == 2,
"The encodeDecimalValue function did not move the byte Pointer of the stream correctly");
testStream.context.bitPointer = 0;
testStream.context.bufferIndx = 0;
err = decodeDecimalValue(&testStream, &dec_val);
fail_unless (res.mantissa == dec_val.mantissa && res.exponent == dec_val.exponent, "The value 5.001 is decoded as %d*10^%d", dec_val.mantissa, dec_val.exponent);
}
END_TEST
/* END: streamEncode tests */
/* START: ioUtil tests */
START_TEST (test_moveBitPointer)
{
EXIStream strm;
strm.context.bitPointer = 3;
strm.context.bufferIndx = 0;
moveBitPointer(&strm, 13);
fail_unless(strm.context.bitPointer == 0 && strm.context.bufferIndx == 2, "incorrect moving of the BitPointer");
strm.context.bitPointer = 7;
strm.context.bufferIndx = 0;
moveBitPointer(&strm, 1);
fail_unless(strm.context.bitPointer == 0 && strm.context.bufferIndx == 1, "incorrect moving of the BitPointer");
strm.context.bitPointer = 3;
strm.context.bufferIndx = 0;
moveBitPointer(&strm, 12);
fail_unless(strm.context.bitPointer == 7 && strm.context.bufferIndx == 1, "incorrect moving of the BitPointer");
}
END_TEST
START_TEST (test_getBitsNumber)
{
fail_unless(getBitsNumber(99) == 7);
fail_unless(getBitsNumber(63) == 6);
fail_unless(getBitsNumber(64) == 7);
fail_unless(getBitsNumber(4095) == 12);
fail_unless(getBitsNumber(824) == 10);
fail_unless(getBitsNumber(16383) == 14);
fail_unless(getBitsNumber(7234) == 13);
}
END_TEST
START_TEST (test_log2INT)
{
fail_unless(log2INT(99) == 6);
fail_unless(log2INT(63) == 5);
fail_unless(log2INT(64) == 6);
fail_unless(log2INT(4095) == 11);
fail_unless(log2INT(824) == 9);
fail_unless(log2INT(16383) == 13);
fail_unless(log2INT(7234) == 12);
}
END_TEST
/* END: ioUtil tests */
Suite * streamIO_suite (void)
{
Suite *s = suite_create ("StreamIO");
{
/* StreamRead test case */
TCase *tc_sRead = tcase_create ("StreamRead");
tcase_add_test (tc_sRead, test_readNextBit);
tcase_add_test (tc_sRead, test_readBits);
suite_add_tcase (s, tc_sRead);
}
{
/* StreamWrite test case */
TCase *tc_sWrite = tcase_create ("StreamWrite");
tcase_add_test (tc_sWrite, test_writeNextBit);
tcase_add_test (tc_sWrite, test_writeNBits);
suite_add_tcase (s, tc_sWrite);
}
{
/* StreamDecode test case */
TCase *tc_sDecode = tcase_create ("StreamDecode");
tcase_add_test (tc_sDecode, test_decodeNBitUnsignedInteger);
tcase_add_test (tc_sDecode, test_decodeBoolean);
tcase_add_test (tc_sDecode, test_decodeUnsignedInteger);
tcase_add_test (tc_sDecode, test_decodeString);
tcase_add_test (tc_sDecode, test_decodeBinary);
tcase_add_test (tc_sDecode, test_decodeFloat);
tcase_add_test (tc_sDecode, test_decodeIntegerValue);
tcase_add_test (tc_sDecode, test_decodeDecimalValue);
suite_add_tcase (s, tc_sDecode);
}
{
/* StreamEncode test case */
TCase *tc_sEncode = tcase_create ("StreamEncode");
tcase_add_test (tc_sEncode, test_encodeNBitUnsignedInteger);
tcase_add_test (tc_sEncode, test_encodeBoolean);
tcase_add_test (tc_sEncode, test_encodeUnsignedInteger);
tcase_add_test (tc_sEncode, test_encodeString);
tcase_add_test (tc_sEncode, test_encodeBinary);
tcase_add_test (tc_sEncode, test_encodeFloatValue);
tcase_add_test (tc_sEncode, test_encodeIntegerValue);
tcase_add_test (tc_sEncode, test_encodeDecimalValue);
suite_add_tcase (s, tc_sEncode);
}
{
/* ioUtil test case */
TCase *tc_ioUtil = tcase_create ("ioUtil");
tcase_add_test (tc_ioUtil, test_moveBitPointer);
tcase_add_test (tc_ioUtil, test_getBitsNumber);
tcase_add_test (tc_ioUtil, test_log2INT);
suite_add_tcase (s, tc_ioUtil);
}
return s;
}
int main (void)
{
int number_failed;
Suite *s = streamIO_suite();
SRunner *sr = srunner_create (s);
#ifdef _MSC_VER
srunner_set_fork_status(sr, CK_NOFORK);
#endif
srunner_run_all (sr, CK_NORMAL);
number_failed = srunner_ntests_failed (sr);
srunner_free (sr);
return (number_failed == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
}
|
<gh_stars>10-100
var path = require('path');
var fs = require('fs');
var browserify = require('browserify');
var fileName = process.env.npm_package_name + '.js';
var srcDir = './src/js';
var distDir = './dist';
var files = [path.join(srcDir, fileName)];
if (!fs.existsSync(distDir)){
fs.mkdirSync(distDir);
}
var b = browserify(files);
b.bundle().pipe(fs.createWriteStream(path.join(distDir, fileName))); |
def filter_list(arr1, arr2):
return [i for i in arr1 if i not in arr2]
arr1 = [1, 2, 3, 4]
arr2 = [2, 4, 6]
filtered_list = filter_list(arr1, arr2)
print(filtered_list) |
<gh_stars>1-10
package com.lookfor.iwannatravel.dto;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
@Getter
@Setter
@ToString
public class CountryDto {
private String name;
private boolean tourism;
private String documents;
private boolean quarantine;
private int quarantineDays;
private String quarantineNote;
private String note;
}
|
#!/bin/sh
gst=/usr/share/gnome-shell/gnome-shell-theme.gresource
workdir=${HOME}/shell-theme
for r in `gresource list $gst`; do
r=${r#\/org\/gnome\/shell/}
if [ ! -d $workdir/${r%/*} ]; then
mkdir -p $workdir/${r%/*}
fi
done
for r in `gresource list $gst`; do
gresource extract $gst $r >$workdir/${r#\/org\/gnome\/shell/}
done |
#!/bin/bash
module load ucsc_tools
MACS2_DIR=/gpfs/group/pipkin/hdiao/T_Cell_ChIP/2_MACS2
OUT_DIR=/gpfs/group/pipkin/hdiao/T_Cell_ChIP/2_MACS2_bw
bdg_name=$MACS2_DIR/2017_PNAS_Li___H3K27Ac_IL2-UP-deletion-CD4_treat_pileup.bdg
bdg_srt_name=$MACS2_DIR/2017_PNAS_Li___H3K27Ac_IL2-UP-deletion-CD4_treat_pileup_srt.bdg
bw_name=$OUT_DIR/2017_PNAS_Li___H3K27Ac_IL2-UP-deletion-CD4.bw
chrom_size=/gpfs/group/pipkin/hdiao/ref_resources/mm/release102/GRCm38.genome.sizes
##### Sort bedgraph file
LC_COLLATE=C sort -k1,1 -k2,2n $bdg_name > $bdg_srt_name
##### Convert
bedGraphToBigWig $bdg_srt_name $chrom_size $bw_name
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2626-1
#
# Security announcement date: 2013-02-17 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:29 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: i386
#
# Vulnerable packages fix on version:
# - lighttpd:1.4.28-2+squeeze1.2
#
# Last versions recommanded by security team:
# - lighttpd:1.4.28-2+squeeze1.7
#
# CVE List:
# - CVE-2009-3555
# - CVE-2012-4929
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade lighttpd=1.4.28-2+squeeze1.7 -y
|
<reponame>DorinR/Politisense
const Action = require('../JobAction').AbstractJobAction
class BillLinkFetchAdapterAction extends Action {
constructor (params) {
super()
this.params = params
}
async perform (links) {
console.log(`INFO: Retrieved Bill link: ${links.selected[0]}`)
return {
url: links.selected[0],
id: this.params.bill,
parliament: this.params.parliament
}
}
}
module.exports.BillLinkFetchAdapterAction = BillLinkFetchAdapterAction
|
require 'rails_helper'
RSpec.feature 'Candidate submits the application with full course choice location', skip: true do
include CandidateHelper
scenario 'The location that the candidate picked is full but others have vacancies' do
given_i_complete_my_application
and_the_selected_course_options_is_now_full
and_an_alternative_course_option_has_vacancies
and_i_submit_my_application
then_i_see_a_warning_that_there_are_no_vacancies_at_my_chosen_location
and_i_cannot_proceed
end
def given_i_complete_my_application
candidate_completes_application_form
end
def and_the_selected_course_options_is_now_full
course_option = current_candidate.current_application.application_choices.first.course_option
course_option.update!(vacancy_status: 'no_vacancies')
end
def and_an_alternative_course_option_has_vacancies
alternative_site = create(:site, name: 'Alternative site', code: 'B', provider: @provider)
create(
:course_option,
site: alternative_site,
course: current_candidate.current_application.application_choices.first.course,
)
end
def and_i_submit_my_application
click_link 'Check and submit your application'
end
def then_i_see_a_warning_that_there_are_no_vacancies_at_my_chosen_location
expect(page).to have_content("Your chosen location for ‘#{current_candidate.current_application.application_choices.first.course.provider_and_name_code}’ has no vacancies")
end
def and_i_cannot_proceed
click_link t('continue')
expect(page).to have_content('There is a problem')
expect(page).to have_content("Your chosen location for ‘#{current_candidate.current_application.application_choices.first.course.provider_and_name_code}’ has no vacancies")
end
end
|
#!/bin/bash
set -x
version="${1:-1.20.2}"
rg=test-aks-$RANDOM
location="${location:-eastus}"
WINDOWS_USERNAME="azureuser"
ADMIN_PASS="${ADMIN_PASS:-changeMe2234}"
clustername="$rg-cluster"
az group create --name $rg --location $location
az aks create \
--resource-group $rg \
--name $clustername \
--node-count 1 \
--enable-addons monitoring \
--generate-ssh-keys \
--windows-admin-username $WINDOWS_USERNAME \
--vm-set-type VirtualMachineScaleSets \
--kubernetes-version $version \
--network-plugin azure \
--windows-admin-password $ADMIN_PASS
az aks nodepool add \
--resource-group $rg \
--cluster-name $clustername \
--os-type Windows \
--name npwin \
--node-count 2
az aks get-credentials --resource-group $rg --name $clustername |
#!/bin/bash
################################################################################
# Copyright (c) 2021 Vladislav Trifochkin
#
# Unified build script for Linux distributions
#
# Changelog:
# 2021.05.20 Initial version.
# 2021.11.07 Added PROJECT_OPT_PREFIX variable.
# 2021.11.08 SOURCE_DIR recognition modified.
################################################################################
CMAKE_OPTIONS="${CMAKE_OPTIONS}"
if [ -z "$PROJECT_OPT_PREFIX" ] ; then
echo "ERROR: PROJECT_OPT_PREFIX is mandatory." >&2
exit 1
fi
if [ -z "$BUILD_GENERATOR" ] ; then
if command -v ninja > /dev/null ; then
BUILD_GENERATOR=Ninja
else
echo "WARN: Preferable build system 'ninja' not found, use default." >&2
BUILD_GENERATOR="Unix Makefiles"
fi
fi
if [ -n $BUILD_STRICT ] ; then
case $BUILD_STRICT in
[Oo][Nn])
BUILD_STRICT=ON
;;
*)
unset BUILD_STRICT
;;
esac
fi
if [ -n "$BUILD_STRICT" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -D${PROJECT_OPT_PREFIX}BUILD_STRICT=$BUILD_STRICT"
fi
if [ -n "$CXX_STANDARD" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_CXX_STANDARD=$CXX_STANDARD"
fi
if [ -n "$C_COMPILER" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_C_COMPILER=$C_COMPILER"
fi
if [ -n "$CXX_COMPILER" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_CXX_COMPILER=$CXX_COMPILER"
fi
if [ -z "$BUILD_TYPE" ] ; then
BUILD_TYPE=Debug
fi
CMAKE_OPTIONS="$CMAKE_OPTIONS -DCMAKE_BUILD_TYPE=$BUILD_TYPE"
if [ -n $BUILD_TESTS ] ; then
case $BUILD_TESTS in
[Oo][Nn])
BUILD_TESTS=ON
;;
*)
unset BUILD_TESTS
;;
esac
fi
if [ -n "$BUILD_TESTS" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -D${PROJECT_OPT_PREFIX}BUILD_TESTS=$BUILD_TESTS"
fi
if [ -n $BUILD_DEMO ] ; then
case $BUILD_DEMO in
[Oo][Nn])
BUILD_DEMO=ON
;;
*)
unset BUILD_DEMO
;;
esac
fi
if [ -n "$BUILD_DEMO" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -D${PROJECT_OPT_PREFIX}BUILD_DEMO=$BUILD_DEMO"
fi
if [ -n $ENABLE_COVERAGE ] ; then
case $ENABLE_COVERAGE in
[Oo][Nn])
ENABLE_COVERAGE=ON
;;
*)
unset ENABLE_COVERAGE
;;
esac
fi
if [ -n "$ENABLE_COVERAGE" ] ; then
CMAKE_OPTIONS="$CMAKE_OPTIONS -D${PROJECT_OPT_PREFIX}ENABLE_COVERAGE=$ENABLE_COVERAGE"
fi
BUILD_DIR=builds/${CXX_COMPILER:-default}.cxx${CXX_STANDARD:-}${ENABLE_COVERAGE:+.coverage}
# We are inside source directory
if [ -d .git ] ; then
if [ -z "$ENABLE_COVERAGE" ] ; then
SOURCE_DIR=`pwd`
fi
BUILD_DIR="../$BUILD_DIR"
fi
if [ -z "$SOURCE_DIR" ] ; then
# We are inside subdirectory (usually from scripts directory)
if [ -d ../.git ] ; then
SOURCE_DIR=`pwd`/..
BUILD_DIR="../../$BUILD_DIR"
else
echo "ERROR: SOURCE_DIR must be specified" >&2
exit 1
fi
fi
mkdir -p ${BUILD_DIR} \
&& cd ${BUILD_DIR} \
&& cmake -G "${BUILD_GENERATOR}" $CMAKE_OPTIONS $SOURCE_DIR \
&& cmake --build . \
&& [ -n "$BUILD_TESTS" ] && ctest \
&& [ -n "$ENABLE_COVERAGE" ] && cmake --build . --target Coverage
|
#!/usr/bin/env bash
set -e
CONFIG="install.conf.yaml"
CONFIG_WSL="install-wsl.conf.yaml"
DOTBOT_DIR="dotbot"
DOTBOT_BIN="bin/dotbot"
BASEDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cd "${BASEDIR}"
git -C "${DOTBOT_DIR}" submodule sync --quiet --recursive
git submodule update --init --recursive "${DOTBOT_DIR}"
/usr/bin/env python3 "${BASEDIR}/${DOTBOT_DIR}/${DOTBOT_BIN}" -d "${BASEDIR}" -c "${CONFIG}" "${@}"
if [ "$WSLENV" ]; then
/usr/bin/env python3 "${BASEDIR}/${DOTBOT_DIR}/${DOTBOT_BIN}" -d "${BASEDIR}" -c "${CONFIG_WSL}" "${@}"
fi
|
<gh_stars>1-10
package io.github.rcarlosdasilva.weixin.model.response.media;
import com.google.gson.annotations.SerializedName;
public class MediaAddTemporaryResponse {
private String type;
@SerializedName("media_id")
private String mediaId;
@SerializedName("created_at")
private long createdAt;
@SerializedName("thumb_media_id")
private String thumbMediaId;
/**
* 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb,主要用于视频与音乐格式的缩略图).
*
* @return media id
*/
public String getType() {
return type;
}
/**
* 媒体文件上传后,获取时的唯一标识.
*
* @return type
*/
public String getMediaId() {
return mediaId;
}
/**
* 媒体文件上传时间戳.
*
* @return time
*/
public long getCreatedAt() {
return createdAt;
}
/**
* 媒体文件上传后,获取时的唯一标识(用于缩略图文件).
*
* @return thumb_media_id
*/
public String getThumbMediaId() {
return thumbMediaId;
}
}
|
<gh_stars>1-10
import React from 'react';
import {BrowserRouter, Switch, Route, Link} from 'react-router-dom';
import Sobre from './paginas/sobre';
import Contato from './paginas/contato';
function Routes(){
return(
<BrowserRouter>
<Link to="/contato">Contato</Link><br/>
<Link to="/sobre">Sobre</Link>
<br/><br/>
<h2>Venha fazer parte de nosso Curso</h2>
<br/><br/>
<Switch>
<Route path="/contato">
<Contato/>
</Route>
<Route path="/sobre">
<Sobre/>
</Route>
</Switch>
</BrowserRouter>
);
};
export default Routes; |
<filename>src/connection_and_streaming/stream_audio.py
from mqtt_client import MqttClient
import logging
import json
import time
import pyaudio
from threading import Thread
CHUNK = 2048
FORMAT = pyaudio.paInt16
CHANNELS = 1
RATE = 44100
MQTT_BROKER = "mqtt.item.ntnu.no"
MQTT_PORT = 1883
class StreamAudio():
def __init__(self):
self.number = 8
self.name = "office" + str(self.number)
self.sendTo = None
self.active = False
self.on = True
# get the logger object for the component
self._logger = logging.getLogger(__name__)
print("logging under name {}.".format(__name__))
self._logger.info("Starting Component")
# create a new MQTT client
self._logger.debug("Connecting to MQTT broker {} at port {}".format(MQTT_BROKER, MQTT_PORT))
self.mqtt_client = MqttClient("StreamAudio" + str(self.number))
self.mqtt_client.on_connect = self.on_connect
self.mqtt_client.on_message = self.on_message
self.mqtt_client.connect(MQTT_BROKER, MQTT_PORT)
self.mqtt_client.subscribe("ttm4115/team_1/project/audio" + str(self.number))
thread = Thread(target=self.mqtt_client.loop_start())
thread.start()
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
while True:
while self.active:
data = stream.read(CHUNK)
audiostring = data.decode("ISO-8859-1")
timestamp = str(int(time.time() * 1000))
self.send_msg("streamaudio", "office" + str(self.number) + "audio", self.sendTo, timestamp, audiostring,
"ttm4115/team_1/project/audio" + str(self.number))
self.mqtt_client.loop_stop()
stream.stop_stream()
stream.close()
p.terminate()
def on_connect(self, client, userdata, flags, rc):
"""
Callback when connecting to MQTT
"""
self._logger.debug("MQTT connected to {}".format(client))
def load_json(self, msg):
"""
Deserialize JSON string
"""
try:
data = json.loads(msg.payload.decode("utf-8"))
except Exception as err:
self._logger.error('Message sent to topic {} had no valid JSON. Message ignored. {}'.format(msg.topic, err))
return
return data
def on_message(self, client, userdata, msg):
"""
Callback when recieving message to subscribed topic through MQTT
"""
if msg.topic == "ttm4115/team_1/project/audio" + str(self.number):
data = self.load_json(msg)
if data["command"] == "streamstart" and data["reciver"] == self.name + "audio":
self.active = True
self.sendTo = data["answer"]
elif data["command"] == "streamstop" and data["reciver"] == self.name + "audio":
self.active = False
def send_msg(self, msg, sender, reciver, timestamp, answer, where):
"""
Serialize into JSON string and publish to MQTT topic
:param where: Topic to publish to
"""
command = {"command": msg, "sender": sender, "reciver": reciver, "time": timestamp, "answer": answer}
payload = json.dumps(command)
self.mqtt_client.publish(where, payload)
if __name__ == "__main__":
debug_level = logging.DEBUG
logger = logging.getLogger(__name__)
logger.setLevel(debug_level)
ch = logging.StreamHandler()
ch.setLevel(debug_level)
formatter = logging.Formatter('%(asctime)s - %(name)-12s - %(levelname)-8s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
t = StreamAudio()
|
import { BadRequestException } from "@nestjs/common";
import {registerDecorator, ValidationOptions, ValidationArguments} from "class-validator";
export function MoreThen(property?: string, validationOptions?: ValidationOptions) {
return function (object: Object, propertyName: string) {
registerDecorator({
name: "MoreThen",
target: object.constructor,
propertyName: propertyName,
constraints: [property],
options: validationOptions,
validator: {
validate(value: any, args: ValidationArguments) {
const [relatedPropertyName] = args.constraints;
const relatedValue = (args.object as any)[relatedPropertyName];
if (value>=property) return true
else throw new BadRequestException('Validation failed Date');
}
}
});
};
} |
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.pool.ha;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.sql.DataSource;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.support.logging.Log;
import com.alibaba.druid.support.logging.LogFactory;
/**
* An utility class to create DruidDataSource dynamically.
*
* @author DigitalSonic
*/
public class DataSourceCreator {
private final static Log LOG = LogFactory.getLog(DataSourceCreator.class);
private Properties properties = new Properties();
private List<String> nameList = new ArrayList<String>();
public DataSourceCreator(String file) {
loadProperties(file);
loadNameList();
}
public Map<String, DataSource> createMap(HighAvailableDataSource haDataSource) throws SQLException {
Map<String, DataSource> map = new ConcurrentHashMap<String, DataSource>();
if (nameList == null || nameList.isEmpty()) {
LOG.error("No DataSource will be created!");
return map;
}
for (String n : nameList) {
String url = properties.getProperty(n + ".url");
String username = properties.getProperty(n + ".username");
String password = properties.getProperty(n + ".password");
LOG.info("Creating " + n + " with url[" + url + "] and username[" + username + "].");
DruidDataSource dataSource = create(n, url, username, password, haDataSource);
map.put(n, dataSource);
}
LOG.info(map.size() + " DruidDataSource(s) created. ");
return map;
}
protected DruidDataSource create(String name, String url, String username, String password,
HighAvailableDataSource haDataSource) throws SQLException {
DruidDataSource dataSource = new DruidDataSource();
dataSource.setName(name + "-" + System.identityHashCode(dataSource));
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
dataSource.setDriverClassName(haDataSource.getDriverClassName());
dataSource.setConnectProperties(haDataSource.getConnectProperties());
dataSource.setConnectionProperties(haDataSource.getConnectionProperties());
dataSource.setInitialSize(haDataSource.getInitialSize());
dataSource.setMaxActive(haDataSource.getMaxActive());
dataSource.setMinIdle(haDataSource.getMinIdle());
dataSource.setMaxWait(haDataSource.getMaxWait());
dataSource.setValidationQuery(haDataSource.getValidationQuery());
dataSource.setValidationQueryTimeout(haDataSource.getValidationQueryTimeout());
dataSource.setTestOnBorrow(haDataSource.isTestOnBorrow());
dataSource.setTestOnReturn(haDataSource.isTestOnReturn());
dataSource.setTestWhileIdle(haDataSource.isTestWhileIdle());
dataSource.setPoolPreparedStatements(haDataSource.isPoolPreparedStatements());
dataSource.setSharePreparedStatements(haDataSource.isSharePreparedStatements());
dataSource.setMaxPoolPreparedStatementPerConnectionSize(
haDataSource.getMaxPoolPreparedStatementPerConnectionSize());
dataSource.setQueryTimeout(haDataSource.getQueryTimeout());
dataSource.setTransactionQueryTimeout(haDataSource.getTransactionQueryTimeout());
dataSource.setTimeBetweenEvictionRunsMillis(haDataSource.getTimeBetweenEvictionRunsMillis());
dataSource.setMinEvictableIdleTimeMillis(haDataSource.getMinEvictableIdleTimeMillis());
dataSource.setMaxEvictableIdleTimeMillis(haDataSource.getMaxEvictableIdleTimeMillis());
dataSource.setPhyTimeoutMillis(haDataSource.getPhyTimeoutMillis());
dataSource.setTimeBetweenConnectErrorMillis(haDataSource.getTimeBetweenConnectErrorMillis());
dataSource.setRemoveAbandoned(haDataSource.isRemoveAbandoned());
dataSource.setRemoveAbandonedTimeoutMillis(haDataSource.getRemoveAbandonedTimeoutMillis());
dataSource.setLogAbandoned(haDataSource.isLogAbandoned());
dataSource.setProxyFilters(haDataSource.getProxyFilters());
dataSource.setFilters(haDataSource.getFilters());
dataSource.setLogWriter(haDataSource.getLogWriter());
dataSource.init();
return dataSource;
}
private void loadNameList() {
Set<String> names = new HashSet<String>();
for (String n : properties.stringPropertyNames()) {
if (n.contains(".url")) {
names.add(n.split("\\.url")[0]);
}
}
if (!names.isEmpty()) {
nameList.addAll(names);
}
}
private void loadProperties(String file) {
Properties properties = new Properties();
if (file == null) {
return;
}
InputStream is = null;
try {
LOG.debug("Trying to load " + file + " from FileSystem.");
is = new FileInputStream(file);
} catch(FileNotFoundException e) {
LOG.debug("Trying to load " + file + " from Classpath.");
try {
is = DataSourceCreator.class.getResourceAsStream(file);
} catch (Exception ex) {
LOG.warn("Can not load resource " + file, ex);
}
}
if (is != null) {
try {
properties.load(is);
this.properties = properties;
} catch(Exception e) {
LOG.error("Exception occurred while loading " + file, e);
} finally {
if (is != null) {
try {
is.close();
} catch (Exception e) {
// ignore
}
}
}
} else {
LOG.warn("File " + file + " can't be loaded!");
}
}
}
|
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
rm -rf temp/data/integration-test-base-node
mkdir -p temp/data/integration-test-base-node
export TARI_BASE_NODE__NETWORK=localnet
export TARI_BASE_NODE__LOCALNET__DATA_DIR=localnet
export TARI_BASE_NODE__LOCALNET__DB_TYPE=lmdb
export TARI_BASE_NODE__LOCALNET__ORPHAN_STORAGE_CAPACITY=10
export TARI_BASE_NODE__LOCALNET__PRUNING_HORIZON=0
export TARI_BASE_NODE__LOCALNET__PRUNED_MODE_CLEANUP_INTERVAL=10000
export TARI_BASE_NODE__LOCALNET__CORE_THREADS=10
export TARI_BASE_NODE__LOCALNET__MAX_THREADS=512
#export TARI_BASE_NODE__LOCALNET__IDENTITY_FILE=$DIR/node1_id.json
export TARI_BASE_NODE__LOCALNET__TOR_IDENTITY_FILE=node_tor_id.json
export TARI_BASE_NODE__LOCALNET__WALLET_IDENTITY_FILE=walletid.json
export TARI_BASE_NODE__LOCALNET__WALLET_TOR_IDENTITY_FILE=wallet_tor_id.json
export TARI_BASE_NODE__LOCALNET__TRANSPORT=tcp
export TARI_BASE_NODE__LOCALNET__TCP_LISTENER_ADDRESS=/ip4/0.0.0.0/tcp/18189
export TARI_BASE_NODE__LOCALNET__ALLOW_TEST_ADDRESSES=true
export TARI_BASE_NODE__LOCALNET__PUBLIC_ADDRESS=/ip4/10.0.0.102/tcp/18189
export TARI_BASE_NODE__LOCALNET__ENABLE_WALLET=true
#export TARI_BASE_NODE__LOCALNET__TOR_CONTROL_ADDRESS=/ip4/127.0.0.1/tcp/9051
#export TARI_BASE_NODE__LOCALNET__TOR_CONTROL_AUTH=none
#export TARI_BASE_NODE__LOCALNET__TOR_FORWARD_ADDRESS=/ip4/127.0.0.1/tcp/0
#export TARI_BASE_NODE__LOCALNET__TOR_ONION_PORT=18999
#export TARI_BASE_NODE__LOCALNET__PUBLIC_ADDRESS=
export TARI_BASE_NODE__LOCALNET__ALLOW_TEST_ADDRESSES=true
export TARI_BASE_NODE__LOCALNET__GRPC_ENABLED=true
export TARI_BASE_NODE__LOCALNET__GRPC_ADDRESS=127.0.0.1:50051
export TARI_BASE_NODE__LOCALNET__BLOCK_SYNC_STRATEGY=ViaBestChainMetadata
export TARI_BASE_NODE__LOCALNET__ENABLE_MINING=false
export TARI_BASE_NODE__LOCALNET__NUM_MINING_THREADS=1
export TARI_BASE_NODE__LOCALNET__ORPHAN_DB_CLEAN_OUT_THRESHOLD=0
# not used
export TARI_BASE_NODE__LOCALNET__GRPC_WALLET_ADDRESS=127.0.0.1:5999
export TARI_MERGE_MINING_PROXY__LOCALNET__MONEROD_URL=aasdf
export TARI_MERGE_MINING_PROXY__LOCALNET__MONEROD_USE_AUTH=false
export TARI_MERGE_MINING_PROXY__LOCALNET__MONEROD_USERNAME=asdf
export TARI_MERGE_MINING_PROXY__LOCALNET__MONEROD_PASSWORD=asdf
export TARI_MERGE_MINING_PROXY__LOCALNET__PROXY_HOST_ADDRESS=127.0.0.1:50071
export TARI_MERGE_MINING_PROXY__LOCALNET__WAIT_FOR_INITIAL_SYNC_AT_STARTUP=true
export RUST_BACKTRACE=FULL
cd temp/data/integration-test-base-node
cargo run --release --bin tari_base_node -- --base-path . --create-id --init
cargo run --release --bin tari_base_node -- --base-path .
|
<filename>ts/dist/assets/cwa/lib/AvatarGUI.js
//-------- js/AvatarGUI.js --------
// Generated by CoffeeScript 1.12.2
(function () {
var AvSpeedController, AvatarGUI, Config, HTMLForAvatarGUI, SigningAvatar, console, cwaenv, document, log, setTimeout, theConfig, theSToCA,
bind = function (fn, me) {
return function () {
return fn.apply(me, arguments);
};
};
cwaenv = this.getCWAEnv();
console = this.console;
document = this.document;
setTimeout = this.setTimeout;
log = console.log.bind(console);
Config = cwaenv.get("Config");
AvSpeedController = cwaenv.get("AvSpeedController");
SigningAvatar = cwaenv.get("SigningAvatar");
HTMLForAvatarGUI = cwaenv.get("HTMLForAvatarGUI");
theSToCA = cwaenv.get("theSToCA");
theConfig = Config.theConfig;
console.log("{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{{");
AvatarGUI = (function () {
function AvatarGUI(avIndex, avSettings) {
var allDiv, avEvtHandlers, avaDiv, div, htmlgen, i, j, k, l, len, len1, len2, len3, len4, len5, len6, len7, len8, len9, m, n, o, p, q, r, ref, ref1, speedFun, theDivs;
this.avIndex = avIndex;
this.avEvtLoadDone = bind(this.avEvtLoadDone, this);
this.avEvtLoadStarts = bind(this.avEvtLoadStarts, this);
this.stat = bind(this.stat, this);
this.initAv = avSettings.initAv;
log("Starting AvatarGUI " + this.avIndex + " ...");
htmlgen = new HTMLForAvatarGUI(this.avIndex, avSettings);
allDiv = document.getElementsByClassName("CWASAPanel av" + this.avIndex);
if ((ref = allDiv[0]) != null) {
ref.innerHTML = htmlgen.allHTML();
}
avaDiv = document.getElementsByClassName("CWASAAvatar av" + this.avIndex);
if ((ref1 = avaDiv[0]) != null) {
ref1.innerHTML = htmlgen.htmlForAv();
}
theDivs = document.getElementsByClassName("CWASAGUI av" + this.avIndex);
for (i = 0, len = theDivs.length; i < len; i++) {
div = theDivs[i];
div.innerHTML = htmlgen.htmlForGUI();
}
theDivs = document.getElementsByClassName("CWASAAvMenu av" + this.avIndex);
for (j = 0, len1 = theDivs.length; j < len1; j++) {
div = theDivs[j];
div.innerHTML = htmlgen.htmlForAvMenu();
}
theDivs = document.getElementsByClassName("CWASASpeed av" + this.avIndex);
for (k = 0, len2 = theDivs.length; k < len2; k++) {
div = theDivs[k];
div.innerHTML = htmlgen.htmlForSpeedCtrl();
}
theDivs = document.getElementsByClassName("CWASASiGMLURL av" + this.avIndex);
for (l = 0, len3 = theDivs.length; l < len3; l++) {
div = theDivs[l];
div.innerHTML = htmlgen.htmlForSiGMLURL();
}
theDivs = document.getElementsByClassName("CWASASiGMLText av" + this.avIndex);
for (m = 0, len4 = theDivs.length; m < len4; m++) {
div = theDivs[m];
div.innerHTML = htmlgen.htmlForSiGMLText();
}
theDivs = document.getElementsByClassName("CWASAPlay av" + this.avIndex);
for (n = 0, len5 = theDivs.length; n < len5; n++) {
div = theDivs[n];
div.innerHTML = htmlgen.htmlForSiGMLPlay();
}
theDivs = document.getElementsByClassName("CWASAPlayExtra av" + this.avIndex);
for (o = 0, len6 = theDivs.length; o < len6; o++) {
div = theDivs[o];
div.innerHTML = htmlgen.htmlForSiGMLPlayExtra();
}
theDivs = document.getElementsByClassName("CWASAFrames av" + this.avIndex);
for (p = 0, len7 = theDivs.length; p < len7; p++) {
div = theDivs[p];
div.innerHTML = htmlgen.htmlForFrameSteps();
}
theDivs = document.getElementsByClassName("CWASAProgress av" + this.avIndex);
for (q = 0, len8 = theDivs.length; q < len8; q++) {
div = theDivs[q];
div.innerHTML = htmlgen.htmlForProgress();
}
theDivs = document.getElementsByClassName("CWASAStatus av" + this.avIndex);
for (r = 0, len9 = theDivs.length; r < len9; r++) {
div = theDivs[r];
div.innerHTML = htmlgen.htmlForStatus();
}
this.domEls = this._getDOMEls();
this.guiDisablePlay();
avEvtHandlers = {
fps: this.avEvtFPS.bind(this),
atFrame: this.avEvtAtFrame.bind(this),
avLoadStarts: this.avEvtLoadStarts.bind(this),
avLoadDone: this.avEvtLoadDone.bind(this),
animLoading: this.avEvtSiGMLLoading.bind(this),
animActive: this.avEvtAnimActive.bind(this),
animIdle: this.avEvtAnimIdle.bind(this)
};
this.speedController = new AvSpeedController(this.domEls, true);
speedFun = ((function (_this) {
return function () {
return _this.speedController.curSpeed();
};
})(this));
this.avatar = new SigningAvatar(this, this.domEls.avCanvas[0], speedFun, avEvtHandlers);
}
AvatarGUI.prototype.stat = function (msg) {
var i, j, len, len1, slel, statels, statlogels, status, stel;
msg = theConfig.animgenProc + ": " + msg;
statels = document.getElementsByClassName("statusExtra av" + this.avIndex);
for (i = 0, len = statels.length; i < len; i++) {
stel = statels[i];
stel.value = msg;
}
msg = (((new Date).toISOString()).substr(11, 12)) + " [" + this.avIndex + "] " + msg;
statlogels = document.getElementsByClassName("statusLog");
for (j = 0, len1 = statlogels.length; j < len1; j++) {
slel = statlogels[j];
slel.value += "[av" + this.avIndex + "] " + msg + "\n";
}
log(msg);
return status = msg;
};
AvatarGUI.prototype.startAvatar = function () {
this.initGUI();
return this.avatar.switchAvatar(this.initAv);
};
AvatarGUI.prototype.playSiGMLText = function (sigml) {
this._sIndex = -1;
return this.avatar.playSiGML(sigml);
};
AvatarGUI.prototype.playSiGMLURL = function (sigmlurl) {
this._sIndex = -1;
return this.avatar.playSiGMLURL(sigmlurl);
};
AvatarGUI.prototype.playSiGMLTextFromEl = function (sigmltxtel) {
var ref, sigml;
sigml = (ref = sigmltxtel[0]) != null ? ref.value : void 0;
this._sIndex = -1;
return this.playSiGMLText(sigml);
};
AvatarGUI.prototype.playSiGMLURLFromEl = function (sigmlurlel) {
var ref, sigmlurl;
sigmlurl = (ref = sigmlurlel[0]) != null ? ref.value : void 0;
this._sIndex = -1;
return this.playSiGMLURL(sigmlurl);
};
AvatarGUI.prototype.stopPlay = function () {
return this.avatar.stopPlay();
};
AvatarGUI.prototype.suspendPlay = function () {
this.guiSuspendPlay();
return this.avatar.suspendPlay();
};
AvatarGUI.prototype.resumePlay = function () {
this.guiStartPlay();
return this.avatar.resumePlay();
};
AvatarGUI.prototype.showPreviousFrame = function () {
return this.avatar.showPreviousFrame();
};
AvatarGUI.prototype.showNextFrame = function () {
return this.avatar.showNextFrame();
};
AvatarGUI.prototype.handleURLKey = function (evt) {
var chr;
chr = evt.which || evt.keyCode;
if (chr === theSToCA.RETURN_CHAR) {
if (evt.preventDefault != null) {
evt.preventDefault();
} else {
evt.returnValue = false;
}
return this.playSiGMLURLFromEl(this.domEls.sigmlURL);
}
};
AvatarGUI.prototype._setElsValue = function (els, newval) {
var el, i, len, results;
results = [];
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
results.push(el.value = newval);
}
return results;
};
AvatarGUI.prototype._setElsOnclick = function (els, hdlr) {
var el, i, len, results;
results = [];
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
results.push(el.onclick = hdlr);
}
return results;
};
AvatarGUI.prototype._setElsDisabled = function (els, isDis) {
var el, i, len, results;
results = [];
for (i = 0, len = els.length; i < len; i++) {
el = els[i];
results.push(el.disabled = isDis);
}
return results;
};
AvatarGUI.prototype.avEvtFPS = function (newfps) {
return this._setElsValue(this.domEls.fps, "" + (newfps.toFixed(2)));
};
AvatarGUI.prototype.avEvtAtFrame = function (gloss, s, f, isDone) {
if (s !== this._sIndex || f !== this._fIndex || isDone) {
this._setElsValue(this.domEls.signAndFrame, s + "/" + f);
this._fIndex = f;
if (s !== this._sIndex) {
this._setElsValue(this.domEls.gloss, "" + gloss);
return this._sIndex = s;
}
}
};
AvatarGUI.prototype.avEvtLoadStarts = function (av) {
this._setElsValue(this.domEls.avMenu, av);
return this.guiDisablePlay();
};
AvatarGUI.prototype.avEvtLoadDone = function (av) {
this._setElsValue(this.domEls.avMenu, av);
return this.guiStopPlay();
};
AvatarGUI.prototype.avEvtSiGMLLoading = function () {
return this.guiDisablePlay();
};
AvatarGUI.prototype.avEvtAnimActive = function () {
return this.guiStartPlay();
};
AvatarGUI.prototype.avEvtAnimIdle = function () {
return this.guiStopPlay();
};
AvatarGUI.prototype.initGUI = function () {
var avm, avmenu, i, len, textsu;
this.nSFD = 0;
this._setElsOnclick(this.domEls.stop, this.stopPlay.bind(this));
this._setElsOnclick(this.domEls.suspend, this.suspendPlay.bind(this));
this._setElsOnclick(this.domEls.resume, this.resumePlay.bind(this));
this._setElsOnclick(this.domEls.playSU, ((function (_this) {
return function () {
return _this.playSiGMLURLFromEl(_this.domEls.sigmlURL);
};
})(this)));
this._setElsOnclick(this.domEls.playST, ((function (_this) {
return function () {
return _this.playSiGMLTextFromEl(_this.domEls.sigmlText);
};
})(this)));
this._setElsOnclick(this.domEls.prevF, this.showPreviousFrame.bind(this));
this._setElsOnclick(this.domEls.nextF, this.showNextFrame.bind(this));
avmenu = this.domEls.avMenu;
for (i = 0, len = avmenu.length; i < len; i++) {
avm = avmenu[i];
avm.onchange = ((function (_this) {
return function (mnu) {
return function () {
return _this.avatar.switchAvatar(mnu.value);
};
};
})(this))(avm);
}
textsu = this.domEls.sigmlURL[0];
return textsu != null ? textsu.onkeypress = ((function (_this) {
return function (evt) {
return _this.handleURLKey(evt);
};
})(this)) : void 0;
};
AvatarGUI.prototype.guiDisablePlay = function () {
this._setElsDisabled(this.domEls.avMenu, true);
this._setElsDisabled(this.domEls.playSU, true);
this._setElsDisabled(this.domEls.playST, true);
this._setElsDisabled(this.domEls.stop, true);
this._setElsDisabled(this.domEls.suspend, true);
this._setElsDisabled(this.domEls.resume, true);
this._setElsDisabled(this.domEls.prevF, true);
return this._setElsDisabled(this.domEls.nextF, true);
};
AvatarGUI.prototype.guiStartPlay = function () {
this._setElsDisabled(this.domEls.avMenu, true);
this._setElsDisabled(this.domEls.playSU, true);
this._setElsDisabled(this.domEls.playST, true);
this._setElsDisabled(this.domEls.stop, false);
this._setElsDisabled(this.domEls.suspend, false);
this._setElsDisabled(this.domEls.resume, true);
this._setElsDisabled(this.domEls.prevF, true);
return this._setElsDisabled(this.domEls.nextF, true);
};
AvatarGUI.prototype.guiStopPlay = function () {
this._setElsDisabled(this.domEls.avMenu, false);
this._setElsDisabled(this.domEls.playSU, false);
this._setElsDisabled(this.domEls.playST, false);
this._setElsDisabled(this.domEls.stop, true);
this._setElsDisabled(this.domEls.suspend, true);
this._setElsDisabled(this.domEls.resume, true);
this._setElsDisabled(this.domEls.prevF, true);
return this._setElsDisabled(this.domEls.nextF, true);
};
AvatarGUI.prototype.guiSuspendPlay = function () {
this._setElsDisabled(this.domEls.avMenu, true);
this._setElsDisabled(this.domEls.playSU, true);
this._setElsDisabled(this.domEls.playST, true);
this._setElsDisabled(this.domEls.stop, false);
this._setElsDisabled(this.domEls.suspend, true);
this._setElsDisabled(this.domEls.resume, false);
this._setElsDisabled(this.domEls.prevF, false);
return this._setElsDisabled(this.domEls.nextF, false);
};
AvatarGUI.prototype._getDOMEls = function () {
var avix, bttnForAv, domels, elForAv, txtForAv, txtaForAv;
avix = this.avIndex;
elForAv = function (tag) {
var elms;
elms = document.getElementsByClassName(tag + " av" + avix);
if (elms.length === 0) {} else {
if (elms.length > 1) {
log("Multiple (" + elms.length + ") elements for " + tag + " av" + avix);
}
}
return elms;
};
bttnForAv = function (btag) {
return elForAv("bttn" + btag);
};
txtForAv = function (ttag) {
return elForAv("txt" + ttag);
};
txtaForAv = function (tatag) {
return elForAv("txta" + tatag);
};
return domels = {
avCanvas: elForAv("canvasAv"),
avMenu: elForAv("menuAv"),
playSU: bttnForAv("PlaySiGMLURL"),
playST: bttnForAv("PlaySiGMLText"),
stop: bttnForAv("Stop"),
suspend: bttnForAv("Suspend"),
resume: bttnForAv("Resume"),
prevF: bttnForAv("PrevF"),
nextF: bttnForAv("NextF"),
speedDisplay: txtForAv("LogSpeed"),
speedDown: bttnForAv("SpeedDown"),
speedUp: bttnForAv("SpeedUp"),
speedReset: bttnForAv("SpeedReset"),
signAndFrame: txtForAv("SF"),
gloss: txtForAv("Gloss"),
fps: txtForAv("FPS"),
sigmlURL: txtForAv("SiGMLURL"),
sigmlText: txtaForAv("SiGMLText")
};
};
return AvatarGUI;
})();
cwaenv.add(AvatarGUI, "AvatarGUI");
}).call(this); |
class Program
{
static double RandomNumber(int min, int max)
{
double randomNumber = min - 0.5 +
(double) r.Next(max - min + 1);
return randomNumber;
}
public static void Main()
{
int min = 0;
int max = 5;
Console.WriteLine(RandomNumber(min, max));
}
} |
const autocomplete = require('autocomplete');
const app = express();
app.get('/autocomplete', (req, res) => {
const word = req.query.word;
const result = autocomplete(word);
res.status(200).json({
autocomplete: result
});
})
if (name === 'main') {
app.listen(3000);
} |
import arbolDistanciaGenetica2 as ae # arbolEspecie
import mapa
import individuo
import planta
import random
report = open("report.log", "w")
#Crear territorio
territorios = []
granValle = mapa.Territorio(50,True,0,[])
territorios.append(granValle)
#DawkinsEEE
maynard = ae.ArbolEspecie()
cromosoma0 = {
'fuerza' : 5,
'destreza' : 5,
'constitucion' : 5,
'velocidad' : 5,
'inteligencia' : 5,
'percepcion' : 5,
'esperanzaVida' : 600,
'fecundidad' : 0,
'madurezSexual': 1,
'estrategia': 0}
cromosoma1 = {
'fuerza' : 5,
'destreza' : 5,
'constitucion' : 5,
'velocidad' : 5,
'inteligencia' : 5,
'percepcion' : 5,
'esperanzaVida' : 600,
'fecundidad' : 0,
'madurezSexual': 1,
'estrategia': 1}
#_init__(self, nodo, especie, edad, sexo, cromosoma):
for i in range(0,239):
nodo = ae.Nodo(None,None,maynard)
granValle.newDawkinsEEE([individuo.DawkinsEEE(nodo, maynard, 16, 1, cromosoma1)])
nodo = ae.Nodo(None,None,maynard)
granValle.newDawkinsEEE([individuo.DawkinsEEE(nodo, maynard, 16, 1, cromosoma0)])
for dia in range(0,40):
random.shuffle(granValle.dawkinsEEEs)
print ("Día " + str(dia))
viajes = []
for tierra in territorios:
orgia = []
llorones = []
victimas = []
turnos = []
print("######### DECLARAR #########")
declara = tierra.getDeclaraciones()
for theIndividuo in declara:
print (str(theIndividuo) + " " + str(theIndividuo.nodo.indice))
print("Energia " + str(theIndividuo.energia) + "/" + str(theIndividuo.getCapacidadEnergia()))
print("Inventario " + str(theIndividuo.inventario) + "/" + str(theIndividuo.getCapacidadCarga()))
print("Felicidad " + str(theIndividuo.felicidad))
decision = theIndividuo.getDecision(tierra.getAcciones(theIndividuo, orgia, llorones, victimas), orgia, llorones, victimas) # Decision es una tupla
turnos.append((theIndividuo, decision))
print (decision)
print ("\n")
print("\n######### ACTUAR #########")
tierra.getIniciativas(turnos)
for ii in turnos:
ii[0].actuar(ii[1], viajes)
print("\n\n\n")
for viaje in viajes:
viaje[0].viajar(viaje[1],viaje[2])
for tierra in territorios:
palomas = 0
halcones = 0
for bicho in granValle.dawkinsEEEs:
if bicho.cromosoma["estrategia"]:
halcones +=1
else:
palomas +=1
report.write(str(palomas)+","+str(halcones)+"\n")
tierra.elMundoSeMueve()
report.close()
#_7#for i in range(0,200):
#_7# viajes = []
#_7# for tierra in territorios:
#_7# orgia = []
#_7# llorones = []
#_7# victimas = []
#_7# turnos = []
#_7# print("\n\ndeclara")
#_7# declara = tierra.getDeclaraciones()
#_7# for i in declara:
#_7# decision = i.getDecision(tierra.getAcciones(i, orgia, llorones, victimas), orgia, llorones, victimas) # Decision es una tupla
#_7# turnos.append((i, decision))
#_7# print (i.nodo.indice)
#_7# print(str(i.energia) + "/" + str(i.getCapacidadEnergia()))
#_7# print(str(i.inventario) + "/" + str(i.getCapacidadCarga()))
#_7# print(i.felicidad)
#_7# print (decision)
#_7# print("\n\naccion")
#_7# tierra.getIniciativas(turnos)
#_7# for i in turnos:
#_7# i[0].actuar(i[1], viajes)
#_7# print("\n\n\n")
#_7#
#_7# for viaje in viajes:
#_7# print ("jajajaajajajajajaa")
#_7# print(len(granValle.conejos))
#_7# print(len(granValle2.conejos))
#_7# viaje[0].viajar(viaje[1],viaje[2])
#_7# print(len(granValle.conejos))
#_7# print(len(granValle2.conejos))
#_7# for tierra in territorios:
#_7# tierra.elMundoSeMueve()
#_7#
#_7#print(len(granValle.conejos))
#_7#print(len(granValle2.conejos))
#_7#
#_6#for i in range(0,500):
#_6# orgia = []
#_6# llorones = []
#_6# victimas = []
#_6# turnos = []
#_6# print("\n\ndeclara")
#_6# declara = granValle.getDeclaraciones()
#_6# for i in declara:
#_6# decision = i.getDecision(granValle.getAcciones(i, orgia, llorones, victimas), orgia, llorones, victimas) # Decision es una tupla
#_6# turnos.append((i, decision))
#_6# print (i.nodo.indice)
#_6# print(str(i.energia) + "/" + str(i.getCapacidadEnergia()))
#_6# print(str(i.inventario) + "/" + str(i.getCapacidadCarga()))
#_6# print (decision)
#_6# print("\n\naccion")
#_6# granValle.getIniciativas(turnos)
#_6# for i in turnos:
#_6# i[0].actuar(i[1])
#_6#
#_6# granValle.elMundoSeMueve()
#_6# print("\n\n\n")
#_6#print(len(granValle.conejos))
#_5#nodo = ae.Nodo(None,None,oCuniculus)
#_5#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 1, cromosoma2)])
#_5#nodo = ae.Nodo(None,None,oCuniculus)
#_5#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 1, cromosoma2)])
#_5#nodo = ae.Nodo(None,None,oCuniculus)
#_5#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 0, cromosoma1)])
#_5#nodo = ae.Nodo(None,None,oCuniculus)
#_5#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 0, cromosoma1)])
#_5#
#_5#orgia = []
#_5#declara = granValle.getDeclaraciones()
#_5#turnos = []
#_5#print("percepcion")
#_5#for i in declara:
#_5# print (i.nodo.indice)
#_5# print (i.cromosoma)
#_5# turnos.append(granValle.getAcciones(i, orgia))
#_5#print("\n\naccion")
#_5#granValle.getIniciativas(turnos)
#_5#for i in turnos:
#_5# print (i[0].nodo.indice)
#_5# print ("\n")
#_5# for a in turnos:
#_5# print ("estado")
#_5# print(a[0].nodo.indice)
#_5# print (a[0].isFertil())
#_5# print ("\n\n")
#_5# if i[1] == "reproducirse":
#_5# if i[0].isFertil() and i[2].isFertil():
#_5# print ("proc")
#_5# print(i[0].nodo.indice)
#_5# print(i[2].nodo.indice)
#_5# i[0].procrear(i[2])
#_5#for i in granValle.conejos:
#_5# if i.cinta:
#_5# if i.sexo:
#_5# i.parir()
#_5# else:
#_5# granValle.newConejo([i.parir()])
#_5#print(len(granValle.conejos))
#nodo = ae.Nodo(None,None,oCuniculus)
#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 1, cromosoma2)])
#nodo = ae.Nodo(None,None,oCuniculus)
#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 1, cromosoma3)])
#nodo = ae.Nodo(None,None,oCuniculus)
#granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 5, 0, cromosoma4)])
#_4#for h in range(0,1):
#_4# print("\n\nNewronda")
#_4# orgia = []
#_4#
#_4# for i in granValle.conejos:
#_4# print (i.nodo.indice)
#_4# print (i.cromosoma)
#_4#
#_4# print ("\n\n")
#_4# declara = granValle.getDeclaraciones()
#_4# turnos = []
#_4# print("percepcion")
#_4# for i in declara:
#_4# print (i.nodo.indice)
#_4# print (i.cromosoma)
#_4# turnos.append(granValle.getAcciones(i, orgia))
#_4# print("\n\naccion")
#_4# granValle.getIniciativas(turnos)
#_4# for i in turnos:
#_4# print (i[0].nodo.indice)
#_4# print ("\n")
#_4# for a in turnos:
#_4# print ("estado")
#_4# print(a[0].nodo.indice)
#_4# print (a[0].isFertil())
#_4# print ("\n\n")
#_4# if i[1] == "reproducirse":
#_4# if i[0].isFertil() and i[2].isFertil():
#_4# print ("proc")
#_4# print(i[0].nodo.indice)
#_4# print(i[2].nodo.indice)
#_4# i[0].procrear(i[2])
#_4#
#_4# for i in granValle.conejos:
#_4# if i.cinta:
#_4# granValle.newConejo([i.parir()])
#_4#
#_4#
#_4#
#_4#
######nodo = ae.Nodo(None,None,oCuniculus)
######granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,10,1,80,1,2,0)])
######nodo = ae.Nodo(None,None,oCuniculus)
######granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,5,1,100,1,1,0)])
######nodo = ae.Nodo(None,None,oCuniculus)
######granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,4,1,30,1,5,0)])
######nodo = ae.Nodo(None,None,oCuniculus)
######granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,1,1,50,1,3,0)])
######nodo = ae.Nodo(None,None,oCuniculus)
######granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,8,1,81,1,4,0)])
#_3#
#_3#a = granValle.conejos
#_3#for i in granValle.conejos:
#_3# print (i.nodo.indice)
#_3# print (i.cromosoma)
#_3#
#_3#print ("\n\n")
#_3#declara = granValle.getDeclaraciones()
#_3#turnos = []
#_3#print("percepcion")
#_3#for i in declara:
#_3# print (i.nodo.indice)
#_3# print (i.cromosoma)
#_3# turnos.append((i, "jaja"))
#_3#print("\n\naccion")
#_3#granValle.getIniciativas(turnos)
#_3#for i in turnos:
#_3# print (i[0].nodo.indice)
#_3# print (i[0].cromosoma)
#_3#
#_3#
#_3#
#_2#for i in range(0,2):
#_2# nodo = ae.Nodo(None,None,oCuniculus)
#_2# granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 1, 1, 1,1,1,80,1,1,0)])
#_2# nodo = ae.Nodo(None,None,oCuniculus)
#_2# granValle.newConejo([individuo.Individuo(nodo, oCuniculus, 2, 2, 2,2,2,80,2,2,1)])
#_2# for i in granValle.conejos:
#_2# print (i.nodo.indice)
#_2# print (i.cromosoma)
#_2# print("\n")
#_2# granValle.conejos[0].procrear(granValle.conejos[1])
#_2# granValle.newConejo([granValle.conejos[0].parir()])
#_2# for i in granValle.conejos:
#_2# print (i.nodo.indice)
#_2# print (i.cromosoma)
#_2# print("\n\n")
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[1].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[2].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[3].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[4].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[5].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[1].nodo.indice, granValle.conejos[2].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[5].nodo.indice, granValle.conejos[2].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[5].nodo.indice, granValle.conejos[3].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[5].nodo.indice, granValle.conejos[4].nodo.indice))
#_2#granValle.conejos[2].procrear(granValle.conejos[4])
#_2#granValle.newConejo([granValle.conejos[2].parir()])
#_2#print(oCuniculus.getDistancia(granValle.conejos[0].nodo.indice, granValle.conejos[6].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[1].nodo.indice, granValle.conejos[6].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[3].nodo.indice, granValle.conejos[6].nodo.indice))
#_2#print(oCuniculus.getDistancia(granValle.conejos[5].nodo.indice, granValle.conejos[6].nodo.indice))
#_#
#_##Crear lobos
#_#CanisLupus = ae.ArbolEspecie() # Lobo
#_#
#_##Bucle
#_#for i in range(0,15000):
#_# #Conejos comen
#_# for conejo in granValle.conejos:
#_# planta = random.choice(granValle.plantas)
#_# if (planta.serComido(conejo.obtenerComida())):
#_# granValle.morirPlanta(planta)
#_#
#_# #print("conejos comen")
#_# #for planta in granValle.plantas:
#_# # print (planta.__dict__)
#_# #plantas hacen
#_# plantitas = []
#_# for planta in granValle.plantas:
#_# plantitas += planta.crecer()
#_# granValle.NewPlanta(plantitas)
#_#
#_# if not i%5:
#_# esporas = 0
#_# madurez = 0
#_# crecimiento = 0
#_# if not i%100 and len(granValle.plantas) == 100 and i<1200:
#_# for h in range(0,20):
#_# nodo = ae.Nodo(None,None,oCuniculus)
#_# granValle.newConejo([individuo.Individuo(nodo, 1, 1, 1, 1,1,1,80,1,1)])
#_# for planta in granValle.plantas:
#_# esporas += planta.cromosoma['esporas']
#_# madurez += planta.cromosoma['madurez']
#_# crecimiento += planta.cromosoma['crecimiento']
#_# print ("REPORT " + str(i))
#_# print (esporas/len(granValle.plantas))
#_# print (madurez/len(granValle.plantas))
#_# print (crecimiento/len(granValle.plantas))
#_# #print (planta.__dict__)
#_# print (len(granValle.plantas)) |
#!/bin/bash
sudo rm -rf WeexiOSSDK/
sudo rm -rf Benmu-iOS-Library/
#sudo rm -rf Podfile.lock
#sudo rm -rf Pods/
git clone https://github.com/bmfe/WeexiOSSDK.git
#cd WeexiOSSDK
#git checkout 0.18.0
#cd ../
git clone https://github.com/bmfe/Benmu-iOS-Library.git
#cd Benmu-iOS-Library
#git checkout 1.1.8
pod update
echo =========================
echo 🍺 ios资源文件加载完成
echo =========================
open WeexEros.xcworkspace
|
#!/bin/bash
# *****************************************************************************
# *****************************************************************************
#
# testLmsRDomXPathN.bash
#
# *****************************************************************************
#
# @author Jay Wheeler.
# @version 0.0.2
# @copyright © 2016, 2017. EarthWalk Software.
# @license Licensed under the Academic Free License version 3.0
# @package Linux Management Scripts
# @subpackage tests
#
# *****************************************************************************
#
# Copyright © 2016, 2017. EarthWalk Software
# Licensed under the Academic Free License, version 3.0.
#
# Refer to the file named License.txt provided with the source,
# or from
#
# http://opensource.org/licenses/academic.php
#
# *****************************************************************************
#
# Version 0.0.1 - 06-30-2016.
# 0.0.2 - 02-10-2017.
#
# *****************************************************************************
# *****************************************************************************
testlibDir="../../testlib"
. $testlibDir/installDirs.bash
. $testlibDir/stdLibs.bash
. $testlibDir/cliOptions.bash
. $testlibDir/commonVars.bash
# *****************************************************************************
declare lmsscr_Version="0.0.2" # script version
declare lmstst_Declarations="$etcDir/testVariables.xml"
# *****************************************************************************
# *****************************************************************************
#
# External Functions
#
# *****************************************************************************
# *****************************************************************************
. $testlibDir/testDump.bash
. $testlibDir/testUtilities.bash
# *****************************************************************************
# *****************************************************************************
#
# Test Functions
#
# *****************************************************************************
# *****************************************************************************
# *******************************************************
#
# testLmsRDomXPNSet
#
#
# *******************************************************
testLmsRDomXPNSet()
{
lmsConioDisplay ""
lmsConioDisplay "XML_ENTITY : '${lmsxml_Entity}'"
lmsStrTrim "${lmsxml_Content}" lmsxml_Content
lmsConioDisplay "XML_CONTENT : '${lmsxml_Content}'"
lmsConioDisplay "XML_TAG_NAME : '${lmsxml_TagName}'"
lmsConioDisplay "XML_TAG_TYPE : '${lmsxml_TagType}'"
if [[ "${lmsxml_TagType}" == "OPEN" || "${lmsxml_TagType}" == "OPENCLOSE" ]]
then
if [ -n "${lmsxml_Attributes}" ]
then
lmsRDomParseAtt
lmsConioDisplay "XML_ATT_COUNT : '${#lmsxml_AttributesArray[@]}'"
for attribute in "${!lmsxml_AttributesArray[@]}"
do
lmsConioDisplay "XML_ATT_NAME : '${attribute}'"
lmsConioDisplay "XML_ATT_VAL : '${lmsxml_AttributesArray[$attribute]}'"
done
fi
fi
lmsStrTrim "${lmsxml_Comment}" lmsxml_Comment
lmsConioDisplay "XML_COMMENT : '${lmsxml_Comment}'"
lmsConioDisplay "XML_PATH : '${lmsxml_Path}'"
lmsConioDisplay "XPATH : '${lmsxml_XPath}'"
}
# *******************************************************
#
# testLmsRDomXPNData
#
# Show the xml data element selected
#
# *******************************************************
testLmsRDomXPNData()
{
local content
lmsConioDisplay ""
lmsConioDisplay "XML_ENTITY : '${lmsxml_Entity}'"
lmsStrTrim "${lmsxml_Content}" lmsxml_Content
lmsConioDisplay "XML_CONTENT : '${lmsxml_Content}'"
lmsConioDisplay "XML_TAG_NAME : '${lmsxml_TagName}'"
lmsConioDisplay "XML_TAG_TYPE : '${lmsxml_TagType}'"
if [[ "${lmsxml_TagType}" == "OPEN" || "${lmsxml_TagType}" == "OPENCLOSE" ]]
then
if [ -n "${lmsxml_Attributes}" ]
then
lmsRDomParseAtt
lmsConioDisplay "XML_ATT_COUNT : '${#lmsxml_AttributesArray[@]}'"
for attribute in "${!lmsxml_AttributesArray[@]}"
do
lmsConioDisplay "XML_ATT_NAME : '${attribute}'"
lmsConioDisplay "XML_ATT_VAL : '${lmsxml_AttributesArray[$attribute]}'"
done
fi
fi
lmsStrTrim "${lmsxml_Comment}" lmsxml_Comment
lmsConioDisplay "XML_COMMENT : '${lmsxml_Comment}'"
lmsConioDisplay "XML_PATH : '${lmsxml_Path}'"
lmsConioDisplay "XPATH : '${lmsxml_XPath}'"
}
# *****************************************************************************
# *****************************************************************************
#
# Start main program below here
#
# *****************************************************************************
# *****************************************************************************
lmsScriptFileName $0
. $testlibDir/openLog.bash
. $testlibDir/startInit.bash
# *****************************************************************************
# *****************************************************************************
#
# Run the tests starting here
#
# *****************************************************************************
# *****************************************************************************
lmsStackCreate "global" lmstst_guid 8
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "Debug" "StackCreate Unable to open/create stack 'global'"
}
lmsStackCreate "namespace" lmstst_nsuid 8
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "Debug" "StackCreate Unable to open/create stack 'namespace'"
}
# *******************************************************
if [[ $showData -ne 0 ]]
then
lmsRDomCallback "testLmsRDomXPNData"
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "RDomError" "Callback function name is missing"
}
lmsRDomParseDOM ${lmstst_Declarations}
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "RDomError" "TDOMParseDOM '${lmstst_Declarations}'"
}
lmsConioDisplay "*******************************************************"
fi
# *******************************************************
lmsRDomCallback "lmsRDOMXPathNode"
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "RDomError" "Callback function name is missing"
}
lmsRDomParse ${lmstst_Declarations}
[[ $? -eq 0 ]] ||
{
lmsConioDebugExit $LINENO "RDomError" "TDOMParseDOM '${lmstst_Declarations}'"
}
# *****************************************************************************
. $testlibDir/testEnd.bash
# *****************************************************************************
|
#!/bin/bash
# Copyright 2019 Google LLC.
# This script builds DeepVariant binaries and runs DeepVariant for exome.
# Main purpose of this script is to evaluate the total runtime of DeepVariant on
# different computer (cloud instance) types.
# Runtime measurements do not include the time for building binaries and
# localizing test data.
set -euo pipefail
## Preliminaries
# Set a number of shell variables, to make what follows easier to read.
BASE="${HOME}/exome-case-study"
MODEL_VERSION="0.9.0"
MODEL_NAME="DeepVariant-inception_v3-${MODEL_VERSION}+data-wes_standard"
DEFAULT_MODEL_HTTP_DIR="http://storage.googleapis.com/deepvariant/models/DeepVariant/${MODEL_VERSION}/${MODEL_NAME}"
INPUT_DIR="${BASE}/input"
MODELS_DIR="${INPUT_DIR}/models"
MODEL="${MODELS_DIR}/model.ckpt"
DATA_DIR="${INPUT_DIR}/data"
REF="${DATA_DIR}/hs37d5.fa.gz"
BAM="${DATA_DIR}/151002_7001448_0359_AC7F6GANXX_Sample_HG002-EEogPU_v02-KIT-Av5_AGATGTAC_L008.posiSrt.markDup.bam"
TRUTH_VCF="${DATA_DIR}/HG002_GRCh37_GIAB_highconf_CG-IllFB-IllGATKHC-Ion-10X-SOLID_CHROM1-22_v.3.3.2_highconf_triophased.vcf.gz"
TRUTH_BED="${DATA_DIR}/HG002_GRCh37_GIAB_highconf_CG-IllFB-IllGATKHC-Ion-10X-SOLID_CHROM1-22_v.3.3.2_highconf_noinconsistent.bed"
N_SHARDS=$(nproc)
OUTPUT_DIR="${BASE}/output"
EXAMPLES="${OUTPUT_DIR}/make_examples.tfrecord@${N_SHARDS}.gz"
GVCF_TFRECORDS="${OUTPUT_DIR}/gvcf.tfrecord@${N_SHARDS}.gz"
CALL_VARIANTS_OUTPUT="${OUTPUT_DIR}/call_variants_output.tfrecord.gz"
OUTPUT_VCF="${OUTPUT_DIR}/HG002.output.vcf.gz"
OUTPUT_GVCF="${OUTPUT_DIR}/HG002.output.g.vcf.gz"
LOG_DIR="${OUTPUT_DIR}/logs"
CAPTURE_BED="${DATA_DIR}/agilent_sureselect_human_all_exon_v5_b37_targets.bed"
# Build binaries.
# If you're using the pre-built binaries, you can skip these and just run
# ./run-prereq.sh instead. And update the script to point to your *zip binaries.
function build_binaries() {
./build-prereq.sh
./build_release_binaries.sh
}
function setup_test() {
## Create local directory structure
mkdir -p "${OUTPUT_DIR}"
mkdir -p "${DATA_DIR}"
mkdir -p "${MODELS_DIR}"
mkdir -p "${LOG_DIR}"
## Download extra packages
# There are some extra programs we will need.
# We are going to use [GNU Parallel](https://www.gnu.org/software/parallel/) to
# run `make_examples`.
sudo apt-get -y update
sudo apt-get -y install parallel
sudo apt-get -y install docker.io
sudo apt-get -y install aria2
## Download models, and test data
# Copy the model files to your local disk.
HTTP_ADDRESS="^http:\/\/.*"
GS_ADDRESS="^gs:\/\/.*"
if [[ $model_http_dir =~ $HTTP_ADDRESS ]];
then
aria2c -c -x10 -s10 -d "${MODELS_DIR}" "${model_http_dir}"/model.ckpt.data-00000-of-00001
aria2c -c -x10 -s10 -d "${MODELS_DIR}" "${model_http_dir}"/model.ckpt.index
aria2c -c -x10 -s10 -d "${MODELS_DIR}" "${model_http_dir}"/model.ckpt.meta
elif [[ $model_http_dir =~ $GS_ADDRESS ]];
then
gsutil cp "${model_http_dir}"/model.ckpt.data-00000-of-00001 "${MODELS_DIR}"
gsutil cp "${model_http_dir}"/model.ckpt.index "${MODELS_DIR}"
gsutil cp "${model_http_dir}"/model.ckpt.meta "${MODELS_DIR}"
else
echo 'Could not copy model. Unknown address prefix.'
fi
# Copy the data
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/151002_7001448_0359_AC7F6GANXX_Sample_HG002-EEogPU_v02-KIT-Av5_AGATGTAC_L008.posiSrt.markDup.bai
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/151002_7001448_0359_AC7F6GANXX_Sample_HG002-EEogPU_v02-KIT-Av5_AGATGTAC_L008.posiSrt.markDup.bam
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/HG002_GRCh37_GIAB_highconf_CG-IllFB-IllGATKHC-Ion-10X-SOLID_CHROM1-22_v.3.3.2_highconf_noinconsistent.bed
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/HG002_GRCh37_GIAB_highconf_CG-IllFB-IllGATKHC-Ion-10X-SOLID_CHROM1-22_v.3.3.2_highconf_triophased.vcf.gz
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/HG002_GRCh37_GIAB_highconf_CG-IllFB-IllGATKHC-Ion-10X-SOLID_CHROM1-22_v.3.3.2_highconf_triophased.vcf.gz.tbi
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/agilent_sureselect_human_all_exon_v5_b37_targets.bed
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/hs37d5.fa.gz
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/hs37d5.fa.gz.fai
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/hs37d5.fa.gz.gzi
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/hs37d5.fa.gzi
aria2c -c -x10 -s10 -d "${DATA_DIR}" http://storage.googleapis.com/deepvariant/exome-case-study-testdata/hs37d5.fa.fai
}
## Run `make_examples`
# In this step, we used the `--regions` flag to constrain the regions we processed
# to the capture region BED file:
function run_make_examples() {
echo "Start running make_examples...Log will be in the terminal and also to ${LOG_DIR}/make_examples.log."
seq 0 $((N_SHARDS-1)) | \
parallel -k --line-buffer \
python ./bazel-bin/deepvariant/make_examples.zip \
--mode calling \
--ref "${REF}" \
--reads "${BAM}" \
--examples "${EXAMPLES}" \
--regions "${CAPTURE_BED}" \
--gvcf "${GVCF_TFRECORDS}" \
--task {}
echo "Done."
echo
}
## Run `call_variants`
function run_call_variants() {
echo "Start running call_variants...Log will be in the terminal and also to ${LOG_DIR}/call_variants.log."
python ./bazel-bin/deepvariant/call_variants.zip \
--outfile "${CALL_VARIANTS_OUTPUT}" \
--examples "${EXAMPLES}" \
--checkpoint "${MODEL}"
echo "Done."
echo
}
## Run `postprocess_variants`, without gVCFs.
function run_postprocess_variants() {
echo "Start running postprocess_variants (without gVCFs)...Log will be in the terminal and also to ${LOG_DIR}/postprocess_variants.log."
python ./bazel-bin/deepvariant/postprocess_variants.zip \
--ref "${REF}" \
--infile "${CALL_VARIANTS_OUTPUT}" \
--outfile "${OUTPUT_VCF}"
echo "Done."
echo
}
## Run `postprocess_variants`, with gVCFs.
function run_postprocess_variants_gVCF() {
echo "Start running postprocess_variants (with gVCFs)...Log will be in the terminal and also to ${LOG_DIR}/postprocess_variants.withGVCF.log."
python ./bazel-bin/deepvariant/postprocess_variants.zip \
--ref "${REF}" \
--infile "${CALL_VARIANTS_OUTPUT}" \
--outfile "${OUTPUT_VCF}" \
--nonvariant_site_tfrecord_path "${GVCF_TFRECORDS}" \
--gvcf_outfile "${OUTPUT_GVCF}"
echo "Done."
echo
}
function run_deepvariant() {
(time run_make_examples) > "${LOG_DIR}/make_examples.log" 2>&1
(time run_call_variants) > "${LOG_DIR}/call_variants.log" 2>&1
(time run_postprocess_variants_gVCF) > "${LOG_DIR}/postprocess_variants.log" 2>&1
}
function main() {
echo 'Starting the test...'
local -r model_http_dir="${1:-$DEFAULT_MODEL_HTTP_DIR}"
echo "Using model from: ${model_http_dir}"
build_binaries
setup_test
(time run_deepvariant) 2>&1 | tee "${LOG_DIR}/deepvariant_runtime.log"
}
main "$@"
|
#!/bin/bash
inspec check .
|
/**
* Copyright © 2014-2021 The SiteWhere Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sitewhere.rest.client;
import org.junit.Before;
import com.sitewhere.spi.ISiteWhereClient;
import com.sitewhere.spi.ITenantAuthentication;
import com.sitewhere.spi.SiteWhereException;
/**
* Base Tenant Test class.
*
* @author <NAME>
*/
public abstract class AbstractRestTest {
/** SiteWhere Client */
private ISiteWhereClient client;
/** Tenant Authentication */
private ITenantAuthentication tenantAuthentication;
@Before
public void init() throws SiteWhereException {
this.client = SiteWhereClient.newBuilder().build().initialize();
this.tenantAuthentication = SiteWhereClient.defaultTenant();
}
// Getters/Setters --------------------------------------------------------
protected ISiteWhereClient getClient() {
return client;
}
protected ITenantAuthentication getTenatAuthentication() {
return tenantAuthentication;
}
}
|
#!/bin/sh
# pictures directory
XDG_PICTURES_DIR="${XDG_PICTURES_DIR:-$HOME/Pictures}"
# take a screenshot using gnome-screenshot
gnome-screenshot -w -f /tmp/sharexin_img.png
# launches ruby script
ruby ./Picture.rb
# date and time for naming
date=$(date +%Y-%m-%d)
time=$(date +%T)
# copies to permanent location
cp /tmp/sharexin_img.png $XDG_PICTURES_DIR/ShareXin/twitter_window-$date-$time.png
# makes notification
notify-send "Sent" \
"To Twitter" -i /tmp/sharexin_img.png -t 2500
|
<filename>arrays/intersection_two_arr.py<gh_stars>0
"""
349. Intersection of Two Arrays
https://leetcode.com/problems/intersection-of-two-arrays/
Given two arrays, write a function to compute their intersection.
Example:
Input: nums1 = [1,2,2,1], nums2 = [2,2]
Output: [2]
"""
# Runtime: 44ms
class Solution:
def intersection(self, nums1: List[int], nums2: List[int]) -> List[int]:
return set(nums1).intersection(set(nums2))
|
export type MTradeClassicTab = 'charts' | 'trade' | 'open-orders';
|
#!/bin/sh
build_dir=build-msvc15
set -e
dir="$(dirname -- "$0")"
cd "$dir/.."
pushd "./$build_dir" >/dev/null
cmake --build . --target i18n >/dev/null
popd >/dev/null
rel="$(git describe --tag --alw)"
rm -f "$rel"
find . -wholename "?*/lang/stub.ts" | zip -q9 "$build_dir/$rel-i18n-stub.zip" -@
|
import numpy as np
import tensorflow as tf
# Input Data
X_data = np.array([[
length,
number_of_compartments,
number_of_axles,
passenger_capacity
] for sample in samples])
# Labels Data
y_data = np.array([[1 if sample contains a bus else 0] for sample in samples])
# Create model
model = tf.keras.models.Sequential([
tf.keras.layers.InputLayer(input_shape = X_data.shape[1:]),
tf.keras.layers.Dense(10, activation = 'relu'),
tf.keras.layers.Dense(1, activation = 'sigmoid')
])
# Compile Model
model.compile(
optimizer = 'adam',
loss = 'binary_crossentropy',
metrics = ['accuracy'],
)
# Train Model
model.fit(x = X_data, y = y_data, epochs = 5)
# Test Model
model.evaluate(x=X_data, y=y_data) |
package co.binapp.android.data;
public class AnimationConstants {
public static class Transitions {
public static final int FROM_RIGHT = 1;
public static final int FROM_BOTTOM = 2;
}
}
|
#!/bin/bash
echo ""
echo "Applying migration $className;format="snake"$"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /:srn/new-return/$className;format="decap"$ controllers.$className$Controller.onPageLoad(mode: Mode = NormalMode, srn: String)" >> ../conf/app.routes
echo "POST /:srn/new-return/$className;format="decap"$ controllers.$className$Controller.onSubmit(mode: Mode = NormalMode, srn: String)" >> ../conf/app.routes
echo "GET /:srn/new-return/change$className$ controllers.$className$Controller.onPageLoad(mode: Mode = CheckMode, srn: String)" >> ../conf/app.routes
echo "POST /:srn/new-return/change$className$ controllers.$className$Controller.onSubmit(mode: Mode = CheckMode, srn: String)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "$className;format="decap"$.title = $className$" >> ../conf/messages.en
echo "$className;format="decap"$.heading = $className$" >> ../conf/messages.en
echo "$className;format="decap"$.checkYourAnswersLabel = $className$" >> ../conf/messages.en
echo "$className;format="decap"$.error.nonNumeric = Enter your $className;format="decap"$ using numbers" >> ../conf/messages.en
echo "$className;format="decap"$.error.required = Enter your $className;format="decap"$" >> ../conf/messages.en
echo "$className;format="decap"$.error.wholeNumber = Enter your $className;format="decap"$ using whole numbers" >> ../conf/messages.en
echo "$className;format="decap"$.error.outOfRange = $className$ must be between {0} and {1}" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitrary$className$UserAnswersEntry: Arbitrary[($className$Page.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[$className$Page.type]";\
print " value <- arbitrary[Int].map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitrary$className$Page: Arbitrary[$className$Page.type] =";\
print " Arbitrary($className$Page)";\
next }1' ../test/generators/PageGenerators.scala > tmp && mv tmp ../test/generators/PageGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[($className$Page.type, JsValue)] ::";\
next }1' ../test/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test/generators/UserAnswersGenerator.scala
echo "Adding helper method to CheckYourAnswersHelper"
awk '/class CheckYourAnswersHelper/ {\
print;\
print "";\
print " def $className;format="decap"$: Option[Row] = userAnswers.get($className$Page) map {";\
print " answer =>";\
print " Row(";\
print " key = Key(msg\"$className;format="decap"$.checkYourAnswersLabel\", classes = Seq(\"govuk-!-width-one-half\")),";\
print " value = Value(Literal(answer.toString)),";\
print " actions = List(";\
print " Action(";\
print " content = msg\"site.edit\",";\
print " href = controllers.routes.$className$Controller.onPageLoad(CheckMode, srn).url,";\
print " visuallyHiddenText = Some(msg\"site.edit.hidden\".withArgs(msg\"$className;format="decap"$.checkYourAnswersLabel\"))";\
print " )";\
print " )";\
print " )";\
print " }";\
next }1' ../app/utils/CYAHelper.scala > tmp && mv tmp ../app/utils/CYAHelper.scala
echo "Migration $className;format="snake"$ completed"
|
#!/bin/bash
# shellcheck source=/usr/local/pengwin-setup.d/common.sh
source "$(dirname "$0")/common.sh" "$@"
if (confirm --title "ANSIBLE" --yesno "Would you like to download and install Ansible?" 8 55); then
echo "Installing ANSIBLE"
install_packages ansible
else
echo "Skipping ANSIBLE"
fi
|
package org.nem.core.connect;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpRequestBase;
/**
* Strategy for coercing an HTTP response into a specific type.
*
* @param <T> Type of response.
*/
public interface HttpResponseStrategy<T> {
/**
* Coerces a result of type T given the specified request and response.
*
* @param request The request
* @param response The response.
* @return The coerced result.
*/
T coerce(final HttpRequestBase request, final HttpResponse response);
/**
* Gets the supported content type.
*
* @return The supported content type.
*/
String getSupportedContentType();
}
|
from . import program
from . import formatting
|
#!/bin/bash
echo "Replacing local files with combined jsdelivr."
find . -maxdepth 1 -type f -name '*.html' -print0 |
while IFS= read -r -d $'\0' line; do
sed -n -i '/lib\/jquery.js/!p' $line
sed -n -i '/lib\/list.js/!p' $line
sed -n -i '/lib\/elasticlunr.js/!p' $line
# Lots of messy escapes below, you care about the version number after the @ sign & the comma.
sed -i -e '/<!--5ETOOLS_SCRIPT_ANCHOR-->/a <script type="text/javascript" src="https://cdn.jsdelivr.net/combine/npm/jquery@3.2/dist/jquery.min.js,npm/list.js@1.5/dist/list.min.js,gh/weixsong/elasticlunr.js@0.9/elasticlunr.min.js"><\/script> <script>window.jQuery || document.write(`<script src="/lib\/jquery.js"><\\\/script>`); window.List || document.write(`<script src="/lib\/list.js"><\\\/script>`);<\/script>' $line
done
|
import { Injectable, HttpService, HttpException, HttpStatus } from '@nestjs/common'
import { map } from 'rxjs/operators'
import * as DTO from './banner.interface'
@Injectable()
export class BannerService {
constructor(private readonly httpService: HttpService) {}
//代理bing壁纸
async nodeBanner() {
try {
return await this.httpService
.request<{ images: Array<DTO.BingResponse> }>({
url: `https://cn.bing.com/HPImageArchive.aspx`,
method: 'GET',
params: {
format: 'js',
idx: -1,
n: 8,
mkt: 'zh-CN'
}
})
.pipe(
map(response => {
return (response.data.images || []).map(k => ({
start: k.startdate,
end: k.enddate,
cover: `https://www.bing.com${k.url}`,
name: k.copyright,
search: k.copyrightlink
}))
})
)
} catch (e) {
throw new HttpException(e.message || e.toString(), HttpStatus.BAD_REQUEST)
}
}
}
|
<reponame>werdck/imgbrd-grabber<filename>src/gui/src/monitoring-center.cpp
#include "monitoring-center.h"
#include <QEventLoop>
#include <QSettings>
#include <QSystemTrayIcon>
#include <QTimer>
#include "downloader/download-query-group.h"
#include "downloader/download-queue.h"
#include "downloader/image-downloader.h"
#include "loader/pack-loader.h"
#include "logger.h"
#include "models/favorite.h"
#include "models/image.h"
#include "models/monitor.h"
#include "models/monitor-manager.h"
#include "models/profile.h"
#include "models/search-query/tag-search-query.h"
#include "models/site.h"
#define MONITOR_CHECK_LIMIT 20
#define MONITOR_CHECK_TOTAL 1000
MonitoringCenter::MonitoringCenter(Profile *profile, DownloadQueue *downloadQueue, QSystemTrayIcon *trayIcon, QObject *parent)
: QObject(parent), m_profile(profile), m_downloadQueue(downloadQueue), m_trayIcon(trayIcon)
{
connect(m_downloadQueue, &DownloadQueue::finished, this, &MonitoringCenter::queueEmpty);
}
void MonitoringCenter::start()
{
log(QStringLiteral("Monitoring starting"), Logger::Info);
m_stop = false;
int secsDelay = m_profile->getSettings()->value("Monitoring/startupDelay", 0).toInt();
QTimer::singleShot(secsDelay * 1000, this, SLOT(tick()));
}
void MonitoringCenter::checkMonitor(Monitor &monitor, const Favorite &favorite)
{
bool newImages = checkMonitor(monitor, favorite.getName().split(' ', QString::SkipEmptyParts), favorite.getPostFiltering());
if (newImages) {
emit m_profile->favoritesChanged();
}
}
bool MonitoringCenter::checkMonitor(Monitor &monitor, const SearchQuery &search, const QStringList &postFiltering)
{
const int delay = monitor.delay();
const QDateTime limit = QDateTime::currentDateTimeUtc().addSecs(-delay);
QStringList siteNames;
for (Site *site : monitor.sites()) {
siteNames.append(site->name());
}
emit statusChanged(monitor, MonitoringStatus::Checking);
log(QStringLiteral("Monitoring new images for '%1' on '%2'").arg(search.toString(), siteNames.join(", ")), Logger::Info);
int count = 0;
int newImages = 0;
QList<QSharedPointer<Image>> newImagesList;
for (Site *site : monitor.sites()) {
// Create a pack loader
DownloadQueryGroup query(m_profile->getSettings(), search, 1, MONITOR_CHECK_LIMIT, MONITOR_CHECK_TOTAL, postFiltering, site);
PackLoader loader(m_profile, query, MONITOR_CHECK_LIMIT, this);
loader.start();
// Load all images
bool firstRun = true;
int countRun = 0;
int newImagesRun = 0;
while ((firstRun || monitor.download()) && loader.hasNext() && newImagesRun == countRun) {
// Load the next page
QList<QSharedPointer<Image>> allImages = loader.next();
countRun += allImages.count();
// Filter out old images
for (const QSharedPointer<Image> &img : allImages) {
if (img->createdAt() > monitor.lastCheck() && (delay <= 0 || img->createdAt() <= limit)) {
newImagesList.append(img);
newImagesRun++;
}
}
}
count += countRun;
newImages += newImagesRun;
}
emit statusChanged(monitor, MonitoringStatus::Performing);
// Send notification
if (monitor.notify() && newImages > 0 && m_trayIcon != nullptr && m_trayIcon->isVisible()) {
QString msg;
if (count == 1) {
msg = tr("New images found for tag '%1' on '%2'");
} else if (newImages < count) {
msg = tr("%n new image(s) found for tag '%1' on '%2'", "", newImages);
} else {
msg = tr("More than %n new image(s) found for tag '%1' on '%2'", "", newImages);
}
m_trayIcon->showMessage(tr("Grabber monitoring"), msg.arg(search.toString(), siteNames.join(", ")), QSystemTrayIcon::Information);
}
// Add images to download queue
if (monitor.download() && newImages > 0) {
QString path = monitor.pathOverride();
if (path.isEmpty()) {
path = m_profile->getSettings()->value("save/path").toString();
}
QString filename = monitor.filenameOverride();
if (filename.isEmpty()) {
filename = m_profile->getSettings()->value("save/filename").toString();
}
for (const QSharedPointer<Image> &img : newImagesList) {
auto downloader = new ImageDownloader(m_profile, img, filename, path, 0, true, false, this);
if (!monitor.getBlacklisted()) {
downloader->setBlacklist(&m_profile->getBlacklist());
}
m_downloadQueue->add(DownloadQueue::Background, downloader);
}
log(QStringLiteral("Added %1 images to the download queue for monitor '%2' on '%3'").arg(QString::number(newImages), search.toString(), siteNames.join(", ")), Logger::Info);
m_waitingForQueue = true;
}
// Update monitor
monitor.setLastCheck(limit);
monitor.setCumulated(monitor.cumulated() + newImages, count != 1 && newImages < count);
m_changed = true;
if (!m_waitingForQueue) {
sync();
}
emit statusChanged(monitor, MonitoringStatus::Waiting);
return newImages > 0;
}
void MonitoringCenter::sync()
{
// Save only if there were changes to the monitors
if (m_changed) {
m_profile->syncFavorites();
m_profile->monitorManager()->save();
m_changed = false;
}
}
void MonitoringCenter::tick()
{
if (m_stop) {
return;
}
qint64 minNextMonitoring = -1;
log(QStringLiteral("Monitoring tick"), Logger::Info);
// Favorites
QList<Favorite> &favs = m_profile->getFavorites();
for (int j = 0; j < favs.count(); ++j) {
Favorite &fav = favs[j];
for (Monitor &monitor : fav.getMonitors()) {
// If this favorite's monitoring expired, we check it for updates
qint64 next = monitor.secsToNextCheck();
if (next <= 0) {
checkMonitor(monitor, fav);
next = monitor.secsToNextCheck();
}
if (m_waitingForQueue) {
return;
}
// Only keep the soonest expiring timeout
if (next < minNextMonitoring || minNextMonitoring == -1) {
minNextMonitoring = next;
}
}
}
// Normal monitors
for (Monitor &monitor : m_profile->monitorManager()->monitors()) {
// If this favorite's monitoring expired, we check it for updates
qint64 next = monitor.secsToNextCheck();
if (next <= 0) {
checkMonitor(monitor, monitor.query(), monitor.postFilters());
next = monitor.secsToNextCheck();
}
if (m_waitingForQueue) {
return;
}
// Only keep the soonest expiring timeout
if (next < minNextMonitoring || minNextMonitoring == -1) {
minNextMonitoring = next;
}
}
// Re-run this method as soon as one of the monitoring timeout expires
if (minNextMonitoring > 0) {
log(QStringLiteral("Next monitoring will be in %1 seconds").arg(minNextMonitoring), Logger::Info);
QTimer::singleShot(minNextMonitoring * 1000, this, SLOT(tick()));
} else {
log(QStringLiteral("Monitoring finished"), Logger::Info);
}
}
void MonitoringCenter::queueEmpty()
{
if (m_waitingForQueue) {
m_waitingForQueue = false;
sync();
tick();
}
}
void MonitoringCenter::stop()
{
m_stop = true;
log(QStringLiteral("Monitoring stopped"), Logger::Info);
}
bool MonitoringCenter::isRunning() const
{
return !m_stop;
}
|
package com.androidapp.cachewebviewlib.utils;
import java.io.BufferedInputStream;
import java.io.InputStream;
public class InputStreamUtils {
private InputStream mInputStream;
private String mEncoding = "UTF-8";
private int mEncodeBuffer = 500;
public InputStreamUtils(InputStream inputStream){
mInputStream = inputStream;
}
public void setEncodeBufferSize(int bufferSize){
if (bufferSize>mEncodeBuffer){
mEncodeBuffer = bufferSize;
}
}
public String getEncoding(){
return mEncoding;
}
public static String inputStream2Str(InputStream inputStream){
StringBuffer sb = new StringBuffer();
if (inputStream == null){
return sb.toString();
}
try {
BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
byte buffer[] = new byte[1024];
int len = 0;
while ((len = bufferedInputStream.read(buffer,0,1024))>0){
sb.append(new String(buffer,0,len));
}
bufferedInputStream.close();
}catch (Exception e){
}
return sb.toString();
}
}
|
package cn.springmvc.model;
public class PaymentDetail {
private String order_no;
private String item_name;
private String ORDER_CREATE_TIME;
public String getOrder_no() {
return order_no;
}
public void setOrder_no(String order_no) {
this.order_no = order_no;
}
public String getItem_name() {
return item_name;
}
public void setItem_name(String item_name) {
this.item_name = item_name;
}
public String getORDER_CREATE_TIME() {
return ORDER_CREATE_TIME;
}
public void setORDER_CREATE_TIME(String oRDER_CREATE_TIME) {
ORDER_CREATE_TIME = oRDER_CREATE_TIME;
}
@Override
public String toString() {
return "PaymentDetail [order_no=" + order_no + ", item_name="
+ item_name + ",ORDER_CREATE_TIME=" + ORDER_CREATE_TIME + "]";
}
public PaymentDetail() {
super();
}
public PaymentDetail(String order_no, String item_name, String ORDER_CREATE_TIME) {
super();
this.order_no = order_no;
this.item_name = item_name;
this.ORDER_CREATE_TIME = ORDER_CREATE_TIME;
}
}
|
<filename>test/shared/test_web_cc_sample.rb<gh_stars>1-10
require 'shared/web_cc_sample.rb'
require 'test/unit'
class TestWebCCSample< Test::Unit::TestCase
def setup
@unit = WebCCSample.new
end
def test_data_map
@unit = WebCCSample.new
expected_map = {:name=>nil, :status => nil, :last_failed => nil, :last_passed => nil, :build_number => nil}
assert_equal expected_map, @unit.data_map, "Maps don't match..."
end
def test_source_name_and_version
assert_equal "CruiseControl", @unit.data_source_name
assert_equal "2.7.1", @unit.data_source_version
end
def test_accessors_for_all_data_map_values
valid_send = 0
@unit.data_map.keys.each do |accessor|
assert_nil @unit.send(accessor, nil), \
"SimpleHttpSample does not respond to message '#{accessor}'"
valid_send += 1
end
assert_equal valid_send, @unit.data_map.size, "Should respond to each data_map entry"
end
end
|
import React from 'react';
import './styles/style_index.css';
import './styles/modal.css';
import 'bootstrap/dist/css/bootstrap.css';
import 'bootstrap/dist/js/bootstrap.bundle.min.js';
import img1 from '../../assets/images/5.jpg';
import img2 from '../../assets/images/11.jpg';
import img3 from '../../assets/images/14.jpg';
import img4 from '../../assets/images/15.jpg';
import menu from '../../assets/images/menu@2x.png';
import plato from '../../assets/images/plato_banner.png';
const seccion_1 = () => {
function mostrarModal(){
var modal = document.getElementById("myModal");
// Get the image and insert it inside the modal - use its "alt" text as a caption
var img = document.getElementById("myImg");
var modalImg = document.getElementById("img01");
var captionText = document.getElementById("caption");
img.onclick = function () {
modal.style.display = "block";
modalImg.src = this.src;
captionText.innerHTML = this.alt;
}
// Get the <span> element that closes the modal
var span = document.getElementsByClassName("close")[0];
// When the user clicks on <span> (x), close the modal
span.onclick = function () {
modal.style.display = "none";
}
}
return (
<>
<div className="py-4">
<div className="card bg-dark text-white">
<img src={plato} className="card-img" alt="..." />
<div className="card-img-overlay">
<p className="card-text fs-1 fw-bolder">¡Saborea el mundo, conoce su historia y tradiciones, con nuestros platillos!</p>
</div>
</div>
</div>
<div className="container py-4">
<div className="row">
<div className="col-md-6">
<div className="row">
<h1 className="fw-light text-center text-lg-start mt-4 mb-0">Recomendación del chef</h1>
<div className="col-6 py-2">
<div className="d-block h-100 mb-4">
<img className="img-fluid" src={img1} alt="" />
{/*<p><small>Plato de prueba.</small></p>*/}
</div>
</div>
<div className="col-6 py-2">
<div className="d-block h-100 mb-4">
<img className="img-fluid" src={img3} alt="" />
</div>
</div>
<div className="col-6">
<div className="d-block h-100 mb-4">
<img className="img-fluid" src={img4} alt="" />
</div>
</div>
<div className="col-6">
<div className="d-block h-100 mb-4">
<img className="img-fluid" src={img2} alt="" />
</div>
</div>
</div>
</div>
<div className="col-md-6">
<div className="row">
{/*<h1 className="fw-light text-center text-lg-start mt-4 mb-0"><br /></h1>*/}
<div className="col-md-12 container-img">
<div className="d-block h-100 mb-4">
{/*<img className="img-fluid" src={menu} alt="" style={{width:'100%'}}/>*/}
<img className="img-fluid" id="myImg" alt="" src={menu} style={{ width: '530px'}} onClick={mostrarModal} />
</div>
<div id="myModal" className="modal">
<span className="close">×</span>
<img className="modal-content" id="img01" alt="" />
<div id="caption"></div>
</div>
</div>
</div>
<div className="row">
<div className="text-center">
{/* <a href="" className="btn btn-primary my-2" download="prueba.pdf" target="_blank">Ver la Carta</a> */}
</div>
</div>
</div>
</div>
</div>
</>
)
}
export default seccion_1
|
let arr = [2, 4, 7, 5];
let max = Math.max(...arr);
console.log(max); |
#!/bin/bash
if [ "$TRAVIS_REPO_SLUG" == "icoretech/audiobox-jlib" ] && [ "$TRAVIS_JDK_VERSION" == "oraclejdk7" ] && [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_BRANCH" == "master" ]; then
echo -e "Publishing javadoc...\n"
cp -R build/docs/apidocs $HOME/javadoc-latest
cd $HOME
git config --global user.email "travis@travis-ci.org"
git config --global user.name "travis-ci"
git clone --quiet --branch=gh-pages https://${GH_TOKEN}@github.com/icoretech/audiobox-jlib gh-pages > /dev/null
cd gh-pages
git rm -rf ./apidocs
cp -Rf $HOME/javadoc-latest ./apidocs
git add -f .
git commit -m "Lastest javadoc on successful travis build $TRAVIS_BUILD_NUMBER auto-pushed to gh-pages"
git push -fq origin gh-pages > /dev/null
echo -e "Published Javadoc to gh-pages.\n"
fi |
#!/bin/sh
# vim:sw=2:ts=2:sts=2:et
set -eu
LC_ALL=C
ME=$( basename "$0" )
PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
[ "${NGINX_ENTRYPOINT_WORKER_PROCESSES_AUTOTUNE:-}" ] || exit 0
touch /etc/nginx/nginx.conf 2>/dev/null || { echo >&2 "$ME: error: can not modify /etc/nginx/nginx.conf (read-only file system?)"; exit 0; }
ceildiv() {
num=$1
div=$2
echo $(( (num + div - 1) / div ))
}
get_cpuset() {
cpusetroot=$1
cpusetfile=$2
ncpu=0
[ -f "$cpusetroot/$cpusetfile" ] || return
for token in $( tr ',' ' ' < "$cpusetroot/$cpusetfile" ); do
case "$token" in
*-*)
count=$( seq $(echo "$token" | tr '-' ' ') | wc -l )
ncpu=$(( ncpu+count ))
;;
*)
ncpu=$(( ncpu+1 ))
;;
esac
done
echo "$ncpu"
}
get_quota() {
cpuroot=$1
ncpu=0
[ -f "$cpuroot/cpu.cfs_quota_us" ] || return
[ -f "$cpuroot/cpu.cfs_period_us" ] || return
cfs_quota=$( cat "$cpuroot/cpu.cfs_quota_us" )
cfs_period=$( cat "$cpuroot/cpu.cfs_period_us" )
[ "$cfs_quota" = "-1" ] && return
[ "$cfs_period" = "0" ] && return
ncpu=$( ceildiv "$cfs_quota" "$cfs_period" )
[ "$ncpu" -gt 0 ] || return
echo "$ncpu"
}
get_quota_v2() {
cpuroot=$1
ncpu=0
[ -f "$cpuroot/cpu.max" ] || return
cfs_quota=$( cut -d' ' -f 1 < "$cpuroot/cpu.max" )
cfs_period=$( cut -d' ' -f 2 < "$cpuroot/cpu.max" )
[ "$cfs_quota" = "max" ] && return
[ "$cfs_period" = "0" ] && return
ncpu=$( ceildiv "$cfs_quota" "$cfs_period" )
[ "$ncpu" -gt 0 ] || return
echo "$ncpu"
}
get_cgroup_v1_path() {
needle=$1
found=
foundroot=
mountpoint=
[ -r "/proc/self/mountinfo" ] || return
[ -r "/proc/self/cgroup" ] || return
while IFS= read -r line; do
case "$needle" in
"cpuset")
case "$line" in
*cpuset*)
found=$( echo "$line" | cut -d ' ' -f 4,5 )
;;
esac
;;
"cpu")
case "$line" in
*cpuset*)
;;
*cpu,cpuacct*|*cpuacct,cpu|*cpuacct*|*cpu*)
found=$( echo "$line" | cut -d ' ' -f 4,5 )
;;
esac
esac
done << __EOF__
$( grep -F -- '- cgroup ' /proc/self/mountinfo )
__EOF__
while IFS= read -r line; do
controller=$( echo "$line" | cut -d: -f 2 )
case "$needle" in
"cpuset")
case "$controller" in
cpuset)
mountpoint=$( echo "$line" | cut -d: -f 3 )
;;
esac
;;
"cpu")
case "$controller" in
cpu,cpuacct|cpuacct,cpu|cpuacct|cpu)
mountpoint=$( echo "$line" | cut -d: -f 3 )
;;
esac
;;
esac
done << __EOF__
$( grep -F -- 'cpu' /proc/self/cgroup )
__EOF__
case "${found%% *}" in
"/")
foundroot="${found##* }$mountpoint"
;;
"$mountpoint")
foundroot="${found##* }"
;;
esac
echo "$foundroot"
}
get_cgroup_v2_path() {
found=
foundroot=
mountpoint=
[ -r "/proc/self/mountinfo" ] || return
[ -r "/proc/self/cgroup" ] || return
while IFS= read -r line; do
found=$( echo "$line" | cut -d ' ' -f 4,5 )
done << __EOF__
$( grep -F -- '- cgroup2 ' /proc/self/mountinfo )
__EOF__
while IFS= read -r line; do
mountpoint=$( echo "$line" | cut -d: -f 3 )
done << __EOF__
$( grep -F -- '0::' /proc/self/cgroup )
__EOF__
case "${found%% *}" in
"")
return
;;
"/")
foundroot="${found##* }$mountpoint"
;;
"$mountpoint")
foundroot="${found##* }"
;;
esac
echo "$foundroot"
}
ncpu_online=$( getconf _NPROCESSORS_ONLN )
ncpu_cpuset=
ncpu_quota=
ncpu_cpuset_v2=
ncpu_quota_v2=
cpuset=$( get_cgroup_v1_path "cpuset" )
[ "$cpuset" ] && ncpu_cpuset=$( get_cpuset "$cpuset" "cpuset.effective_cpus" )
[ "$ncpu_cpuset" ] || ncpu_cpuset=$ncpu_online
cpu=$( get_cgroup_v1_path "cpu" )
[ "$cpu" ] && ncpu_quota=$( get_quota "$cpu" )
[ "$ncpu_quota" ] || ncpu_quota=$ncpu_online
cgroup_v2=$( get_cgroup_v2_path )
[ "$cgroup_v2" ] && ncpu_cpuset_v2=$( get_cpuset "$cgroup_v2" "cpuset.cpus.effective" )
[ "$ncpu_cpuset_v2" ] || ncpu_cpuset_v2=$ncpu_online
[ "$cgroup_v2" ] && ncpu_quota_v2=$( get_quota_v2 "$cgroup_v2" )
[ "$ncpu_quota_v2" ] || ncpu_quota_v2=$ncpu_online
ncpu=$( printf "%s\n%s\n%s\n%s\n%s\n" \
"$ncpu_online" \
"$ncpu_cpuset" \
"$ncpu_quota" \
"$ncpu_cpuset_v2" \
"$ncpu_quota_v2" \
| sort -n \
| head -n 1 )
sed -i.bak -r 's/^(worker_processes)(.*)$/# Commented out by '"$ME"' on '"$(date)"'\n#\1\2\n\1 '"$ncpu"';/' /etc/nginx/nginx.conf
|
psiturk -e "server off"
exit
|
#!/usr/bin/env bash
set -x
set -e
function checkenv() {
if [ "${BUILDER}" == make ];then
make --version
fi
cmake --version
if [ "${BUILDER}" == cmake ] || [ "${LANGUAGE}" != cc ]; then
swig -version
fi
if [ "${BUILDER}" == cmake ] || [ "${LANGUAGE}" == python3 ];then
python3.7 --version
python3.7 -m pip --version
elif [ "${LANGUAGE}" == python2 ]; then
python2.7 --version
python2.7 -m pip --version
elif [ "${LANGUAGE}" == java ]; then
java -version
elif [ "${LANGUAGE}" == dotnet ]; then
dotnet --info
fi
}
################
## MAKEFILE ##
################
if [ "${BUILDER}" == make ];then
if [ "${TRAVIS_OS_NAME}" == linux ];then
echo 'travis_fold:start:env'
if [ "${LANGUAGE}" != cc ]; then
export PATH="${HOME}"/swig/bin:"${PATH}"
fi
checkenv
if [ "${LANGUAGE}" == cc ]; then
make detect
elif [ "${LANGUAGE}" == python2 ]; then
make detect UNIX_PYTHON_VER=2.7
elif [ "${LANGUAGE}" == python3 ]; then
make detect UNIX_PYTHON_VER=3.7
elif [ "${LANGUAGE}" == java ]; then
make detect JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
elif [ "${LANGUAGE}" == dotnet ] ; then
make detect
fi
cat Makefile.local
echo 'travis_fold:end:env'
echo 'travis_fold:start:third_party'
make third_party --jobs=4
echo 'travis_fold:end:third_party'
if [ "${LANGUAGE}" == python2 ] || [ "${LANGUAGE}" == python3 ]; then
echo 'travis_fold:start:python'
make python --jobs=4
echo 'travis_fold:end:python'
echo 'travis_fold:start:test_python'
make test_python --jobs=4
echo 'travis_fold:end:test_python'
elif [ "${LANGUAGE}" == java ]; then
echo 'travis_fold:start:java'
make java --jobs=4
echo 'travis_fold:end:java'
echo 'travis_fold:start:test_java'
make test_java --jobs=1
echo 'travis_fold:end:test_java'
else
echo "travis_fold:start:${LANGUAGE}"
make "${LANGUAGE}" --jobs=4
echo "travis_fold:end:${LANGUAGE}"
echo "travis_fold:start:test_${LANGUAGE}"
make test_"${LANGUAGE}" --jobs=4
echo "travis_fold:end:test_${LANGUAGE}"
fi
if [ "${LANGUAGE}" == cc ]; then
echo "travis_fold:start:flatzinc"
make test_fz --jobs=2
echo "travis_fold:end:flatzinc"
fi
elif [ "${TRAVIS_OS_NAME}" == osx ];then
echo 'travis_fold:start:env'
export PATH="/usr/local/opt/ccache/libexec:$PATH"
if [ "${LANGUAGE}" == dotnet ]; then
# Installer changes path but won't be picked up in current terminal session
# Need to explicitly add location
export PATH=/usr/local/share/dotnet:"${PATH}"
fi
checkenv
if [ "${LANGUAGE}" == cc ]; then
make detect
elif [ "${LANGUAGE}" == python2 ]; then
make detect UNIX_PYTHON_VER=2.7
elif [ "${LANGUAGE}" == python3 ]; then
make detect UNIX_PYTHON_VER=3.7
elif [ "${LANGUAGE}" == java ] || [ "${LANGUAGE}" == dotnet ] ; then
make detect
fi
cat Makefile.local
echo 'travis_fold:end:env'
echo 'travis_fold:start:third_party'
make third_party --jobs=4
echo 'travis_fold:end:third_party'
if [ "${LANGUAGE}" == python2 ] || [ "${LANGUAGE}" == python3 ]; then
echo 'travis_fold:start:python'
make python --jobs=4
echo 'travis_fold:end:python'
echo 'travis_fold:start:test_python'
make test_python --jobs=4
echo 'travis_fold:end:test_python'
elif [ "${LANGUAGE}" == java ]; then
echo 'travis_fold:start:java'
make java --jobs=4
echo 'travis_fold:end:java'
echo 'travis_fold:start:test_java'
make test_java --jobs=1
echo 'travis_fold:end:test_java'
else
echo "travis_fold:start:${LANGUAGE}"
make "${LANGUAGE}" --jobs=4
echo "travis_fold:end:${LANGUAGE}"
echo "travis_fold:start:test_${LANGUAGE}"
make test_"${LANGUAGE}" --jobs=4
echo "travis_fold:end:test_${LANGUAGE}"
fi
if [ "${LANGUAGE}" == cc ]; then
echo "travis_fold:start:flatzinc"
make test_fz --jobs=2
echo "travis_fold:end:flatzinc"
fi
fi
fi
#############
## CMAKE ##
#############
if [ "${BUILDER}" == cmake ];then
export CMAKE_BUILD_PARALLEL_LEVEL=4
if [ "${TRAVIS_OS_NAME}" == linux ];then
echo 'travis_fold:start:env'
# Add clang support in ccache
if [[ "${CC}" == "clang" ]]; then
sudo ln -s ../../bin/ccache /usr/lib/ccache/clang
export CFLAGS="-Qunused-arguments $CFLAGS"
fi
if [[ "${CXX}" == "clang++" ]]; then
sudo ln -s ../../bin/ccache /usr/lib/ccache/clang++
export CXXFLAGS="-Qunused-arguments $CXXFLAGS"
fi
export PATH="${HOME}/swig/bin:${PATH}"
pyenv global system 3.7
checkenv
echo 'travis_fold:end:env'
elif [ "${TRAVIS_OS_NAME}" == osx ];then
echo 'travis_fold:start:env'
export PATH="/usr/local/opt/ccache/libexec:$PATH"
checkenv
echo 'travis_fold:end:env'
fi
echo 'travis_fold:start:configure'
cmake -H. -Bbuild -DBUILD_DEPS:BOOL=ON
echo 'travis_fold:end:configure'
echo 'travis_fold:start:build'
cmake --build build --target all
echo 'travis_fold:end:build'
echo 'travis_fold:start:test'
cmake --build build --target test -- CTEST_OUTPUT_ON_FAILURE=1
echo 'travis_fold:end:test'
fi
#############
## BAZEL ##
#############
if [ "${BUILDER}" == bazel ]; then
echo 'travis_fold:start:build'
bazel build --curses=no --copt='-Wno-sign-compare' //...:all
echo 'travis_fold:end:build'
echo 'travis_fold:start:test'
bazel test -c opt --curses=no --copt='-Wno-sign-compare' //...:all
echo 'travis_fold:end:test'
fi
|
#!/bin/bash
# 每五分鐘更新一次選手的積分
clear
while true; do
/usr/bin/python3 -m tracking.query.opgg
sleep 300
done
|
<gh_stars>0
package com.nostalgia.resource;
import com.nostalgia.*;
import java.awt.image.RenderedImage;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.security.GeneralSecurityException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import javax.imageio.ImageIO;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.codec.binary.Hex;
import org.geojson.Point;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.annotation.Timed;
import com.couchbase.client.java.document.JsonDocument;
import com.google.api.client.auth.openidconnect.IdToken.Payload;
import com.google.api.client.googleapis.auth.oauth2.GoogleIdToken;
import com.google.api.client.googleapis.auth.oauth2.GoogleIdTokenVerifier;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.nostalgia.ImageDownloaderBase64;
import com.nostalgia.LocationRepository;
import com.nostalgia.UserRepository;
import com.nostalgia.VideoRepository;
import com.nostalgia.client.SynchClient;
import com.nostalgia.persistence.model.*;
import facebook4j.Facebook;
import facebook4j.FacebookException;
import facebook4j.FacebookFactory;
import facebook4j.Reading;
import facebook4j.conf.Configuration;
import facebook4j.conf.ConfigurationBuilder;
@Path("/api/v0/user/subscribe")
public class SubscriptionResource {
@Context HttpServletResponse resp;
private static final Logger logger = LoggerFactory.getLogger(SubscriptionResource.class);
private final UserRepository userRepo;
private final LocationRepository locRepo;
private final SynchClient sync;
private final MediaCollectionRepository collRepo;
public User subscribeToLocation(User wantsSubscription, KnownLocation toSubscribeTo) throws Exception{
wantsSubscription.subscribeToLocation(toSubscribeTo.get_id());
sync.setSyncChannels(wantsSubscription);
userRepo.save(wantsSubscription);
return wantsSubscription;
}
public User unsubscribeFromLocation(User wantsSubscription, String idToRemove) throws Exception{
wantsSubscription.unsubscribeFromLocation(idToRemove);
sync.setSyncChannels(wantsSubscription);
userRepo.save(wantsSubscription);
return wantsSubscription;
}
public User subscribeToCollection(User wantsSubscription, MediaCollection toSubscribeTo) throws Exception{
wantsSubscription.addCollection(toSubscribeTo);
sync.setSyncChannels(wantsSubscription);
userRepo.save(wantsSubscription);
return wantsSubscription;
}
public User unsubscribeFromCollection(User wantsSubscription,MediaCollection toRemove) throws Exception{
wantsSubscription.removeCollection(toRemove);
sync.setSyncChannels(wantsSubscription);
userRepo.save(wantsSubscription);
return wantsSubscription;
}
public SubscriptionResource( UserRepository userRepo, LocationRepository locRepo, SynchClient sync, MediaCollectionRepository collRepo) {
this.userRepo = userRepo;
this.locRepo = locRepo;
this.sync = sync;
this.collRepo = collRepo;
//this.sManager = manager;
}
@SuppressWarnings("unused")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/remove")
@Timed
public String removeLocation(@QueryParam("userId") String userId, @QueryParam("id") String id, @Context HttpServletRequest req) throws Exception{
if(id== null){
throw new BadRequestException("no id specified to add");
}
if(userId == null){
throw new BadRequestException("user id required");
}
User adding = userRepo.findOneById(userId);
if(adding == null){
throw new NotFoundException("no user found for id");
}
KnownLocation loc = locRepo.findOneById(id);
MediaCollection coll = null;
if(loc == null){
coll = collRepo.findOneById(id);
}
if(loc != null){
User subscribed = unsubscribeFromLocation(adding, loc.get_id());
return id;
}
if(coll != null){
if(coll.getName().contains("_linked")){
throw new BadRequestException("not allowed to sub to linked colls directly");
}
User subscribed = unsubscribeFromCollection(adding, coll);
return id;
}
return null;
}
@SuppressWarnings("unused")
@POST
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Path("/add")
@Timed
public String newLocation(@QueryParam("userId") String userId, @QueryParam("id") String id,@Context HttpServletRequest req) throws Exception{
if(id== null){
throw new BadRequestException("no id specified to add");
}
if(userId == null){
throw new BadRequestException("user id required");
}
User adding = userRepo.findOneById(userId);
if(adding == null){
throw new NotFoundException("no user found for id");
}
KnownLocation loc = locRepo.findOneById(id);
MediaCollection coll = null;
if(loc == null){
coll = collRepo.findOneById(id);
}
if(loc == null && coll == null){
throw new NotFoundException("no resource found for id: " + id);
}
if(loc != null){
User subscribed = subscribeToLocation(adding, loc);
return id;
}
if(coll != null){
User subscribed = subscribeToCollection(adding, coll);
return id;
}
return null;
}
}
|
package xyz.zkyq.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import xyz.zkyq.entity.User;
/**
* JpaRepository<实体类T, T中被映射为主键的数据类型>
*
* @author zkyq
* @date 1/5/20
*/
public interface UserRepository extends JpaRepository<User, Integer> {
}
|
def mergeSort(arr):
if len(arr) >1:
mid = len(arr)//2
L = arr[:mid]
R = arr[mid:]
mergeSort(L) # Sorting the first half
mergeSort(R) # Sorting the second half
i = j = k = 0
# Copy data to temp arrays L[] and R[]
while i < len(L) and j < len(R):
if L[i] < R[j]:
arr[k] = L[i]
i+=1
else:
arr[k] = R[j]
j+=1
k+=1
# Checking if any element was left
while i < len(L):
arr[k] = L[i]
i+=1
k+=1
while j < len(R):
arr[k] = R[j]
j+=1
k+=1 |
package com.mcgrady.xtitlebar.interf;
import android.view.View;
import com.mcgrady.xtitlebar.TitleBarClickAction;
/**
* <p>类说明</p>
*
* @author: mcgrady
* @date: 2019/1/25
*/
public interface OnTitleBarDoubleClickListener {
void onDoubleClick(View view, TitleBarClickAction action);
}
|
import { browser, by, element, promise } from 'protractor';
export class AppPage {
navigateToHome(): promise.Promise<any> {
return browser.get('http://localhost:9000');
};
getCurrentUrl() {
return browser.getCurrentUrl();
}
getTitle(): promise.Promise<string> {
return browser.getTitle();
};
getLandingPageButton() {
return element(by.className('btn btn-lg landing-btn-enter'));
}
getActiveLink() {
return element(by.className('nav-link py-1 active'));
}
}
|
#pragma once
#include <flowi_core/image.h>
#include <flowi_core/render_commands.h>
#include "types.h"
struct Atlas;
struct FlContext;
struct NSVGimage;
struct NSVGrasterizer;
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
typedef struct ImagePrivate {
u64 handle;
u8* data;
struct NSVGimage* svg_image;
struct NSVGrasterizer* svg_raster;
FlString name;
u32 texture_id;
int atlas_x;
int atlas_y;
FlImageInfo info;
FlTextureFormat format;
} ImagePrivate;
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
bool Image_add_to_atlas(const u8* cmd, struct Atlas* atlas);
bool Image_render(struct FlContext* ctx, const u8* cmd); |
<gh_stars>0
from django.db import models
# Create your models here.
class Employee(models.Model):
eno=models.IntegerField()
ename=models.CharField(max_length=100)
esal=models.FloatField()
eaddr=models.CharField(max_length=300) |
#ifndef FT_PRINTF_H
# define FT_PRINTF_H
# include <stdio.h>
# include <unistd.h>
# include <stdarg.h> //for such as va_start
# include <stdlib.h> //for free(), malloc
# include <limits.h> // for INT_MAX
//# include <stdint.h>
# include "libft.h"
# define ON 1
# define OFF 0
typedef struct list
{
char *buffer;
size_t done;
int status;
char type;
int len; //length of output
int flag_left_align;
int flag_zero_padding;
int flag_notation;
int flag_sign;
int flag_space;
int width;
int precision;
} t_analyze;
typedef struct
{
size_t padding;
size_t prefix;
size_t body;
size_t precision;
size_t sum;
} t_length;
int ft_printf(const char *fmt, ...);
//int ft_printf(const char *input, ...);
int ft_analyze_fmt(const char *fmt, va_list *args, t_analyze *analyze);
int ft_formatlen(const char *fmt, va_list *args, t_analyze *analyze);
void ft_write_char(char c, t_analyze *analyze);
void ft_print_char(int c, t_analyze *analyze);
void ft_print_string(char *string, t_analyze *analyze);
void ft_print_address(uintptr_t dec, t_analyze *analyze);
void ft_print_integer(int num, t_analyze *analyze);
void ft_print_unsigned(unsigned int num, t_analyze *analyze);
void ft_print_hex(unsigned int dec, t_analyze *analyze);
void ft_print_double(double num, t_analyze *analyze);
void ft_print_percent(t_analyze *analyze);
void ft_push_to_analyze(char *str, int len, t_analyze *analyze);
void ft_push_char_to_analyze(char ch, t_analyze *analyze);
void ft_zero_padding(int size, t_analyze *analyze);
void ft_space_padding(int size, t_analyze *analyze);
int ft_push(t_analyze *analyze, char *prefix, char *str);
//int8_t ft_add_prefix(t_format *format, int sign);
//void ft_init_format(t_format *format);
#endif
|
#!/bin/bash
module load libraries/openmpi-2.0.1-gcc-5.4.0
module load mpi/openmpi-x86_64
echo "Rulare Bandwidth Timing Test"
mpirun --mca btl_tcp_if_include eth0 -np 4 ./mpi_bandwidth
echo "Rulare Round Trip Latency Timing Test"
mpirun --mca btl_tcp_if_include eth0 -np 4 ./mpi_latency
echo "Rulare Matrix Multiply"
mpirun --mca btl_tcp_if_include eth0 -np 4 ./mpi_mm
echo "Rulare pi Calculation"
mpirun --mca btl_tcp_if_include eth0 -np 4 ./mpi_pi_send
|
package org.quark.microapidemo.config;
/**
* 全局配置
*/
public class GlobalConfig {
public class WebConfig {
public static final double REDIS_SUBSCRIBER_EXPIRE_HOUR = 0.50;
public static final String PASSWORD_NOSECURITY = "NONE";
public static final String DEFAULT_ROLE = "DEFAULT";
public static final String HEADER_AUTHORIZE = "Approve";
public static final String HEADER_REFRESHTOKEN = "RefreshToken";
public static final String CLAIMS_USER_KEY = "UserId";
public static final String CLAIMS_ROLE_KEY = "Role";
public static final int BAD_HEARTBEAT_DASHBOARD_SHOWRULE = 60 * 2; // 2分钟监测不到心跳则判定为心跳异常
}
public class JWTConfig {
public static final String JWTID = "SpringDocker Access Token";
public static final String JWTREFRESHID = "SpringDocker Refresh Token";
public static final String SUBJECT = "Online JWT Builder";
public static final String IISUSER = "StelyLan";
public static final long TTLMILLIS = 1000 * 30;
public static final long REFRESHTTLMILLIS = 1000 * 60 * 20;
}
public static class RateLimiter {
public static final String TOKEN_BUCKET_IDENTITIEFER = "TOKEN_BUCKET_";
}
}
|
#!/usr/bin/env bash
OLD_USER_Name="pi"
OLD_PASSWD_Name="raspberry"
USER_Name="chell"
PASSWD_Name="portal"
|
package com.cgfy.user.base.bean;
import lombok.Data;
import lombok.ToString;
import java.text.SimpleDateFormat;
import java.util.Date;
@Data
@ToString
public class Comment {
private String id;
private String content;
private String noBtnOpinion;
private String action;
private Integer actionId;
private String creatorId;
private String creatorName;
private String organId;
private String organName;
private String created;
private String node;
private String formId;
private String uid;
private String uname;
private String oname;
private String keyId;
private String agentUid;
private String agentName;
private String time;
public Comment() {
if (this.created == null){
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
this.created = simpleDateFormat.format(new Date());
}
}
public String getCreated() {
return created;
}
public void setCreated(String created) {
if (this.created == null){
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
this.created = simpleDateFormat.format(new Date());
}else{
this.created = created;
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Comment other = (Comment) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
}
|
#!/usr/bin/env python
##############################################################################
#
# tk.py -or- tk.pyw
#
# 1. Globally replace "MYAPP" with the symbol name of the new application.
# 2. Update "application info" at the top of MYAPP class.
#
# http://infohost.nmt.edu/tcc/help/pubs/tkinter/web/index.html
# http://effbot.org/tkinterbook/
#
# Todo:
# - tk.Canvas, tk.Listbox, tk.Message, tk.OptionMenu, tk.Spinbox, tk.Text
# - build a simple text editor with file handling
# - implement some parameterized event handlers
# + StatusBar.useWidget() to swap a label for a user's widget
# - ttk.PanedWindow
# - make an hztk.py module that provides an intermediate class that just
# lets you start making a normal, native-looking GUI immediately
#
##############################################################################
import os
try:
import Tkinter as tk
except ImportError:
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
else:
import tkMessageBox as messagebox
import ttk
#=============================================================================
class MYAPP( ttk.Frame ):
""" Application root frame """
# application info
_author = '<NAME> <<EMAIL>>'
_date = '2013-06-28'
#_icon = 'tk.ico'
_title = 'My Application'
_version = '0.0.0'
# layout configs
_gridcfg_label = {
'padx' : 4,
'pady' : 4,
'sticky' : ( tk.E + tk.W )
}
_gridcfg_stick = {
'sticky' : ( tk.N + tk.S + tk.E + tk.W )
}
_gridcfg_wpad = {
'padx' : 8,
'pady' : 8
}
#=========================================================================
def __init__( self, parent = None ):
""" Initializes MYAPP instance """
# call the parent constructor
ttk.Frame.__init__( self, parent )
# create a place to keep widget state
self.wvars = {}
# set the application-wide default icon
if os.name == 'nt':
ipath = os.path.realpath( 'tk.ico' )
else:
ipath = os.path.realpath( 'tk.xpm' )
if os.path.exists( ipath ):
try:
self.master.iconbitmap( ipath )
except tk.TclError:
photo = tk.PhotoImage( ipath )
self.master.call(
'wm',
'iconphoto',
self.master._w,
photo
)
# old way
#self.master.call(
# 'wm',
# 'iconbitmap',
# self.master._w,
# '-default',
# ipath
#)
# set the application title
self.master.title( MYAPP._title )
# this sets up the root frame to resize things
# any descendent widgets will also need to use _gridcfg_stick
self.grid( **MYAPP._gridcfg_stick )
top = self.winfo_toplevel()
top.rowconfigure( 0, weight = 1 )
top.columnconfigure( 0, weight = 1 )
self.rowconfigure( 0, weight = 1 )
self.columnconfigure( 0, weight = 1 )
# initialize the GUI's widgets
self._create_widgets()
#=========================================================================
def _create_example_1( self, parent ):
group = ttk.LabelFrame(
parent,
text = 'Group Name'
)
group.grid( **MYAPP._gridcfg_label )
button = ttk.Button(
group,
text = 'Hello',
command = self._cmd_hello
)
button.grid( row = 0, column = 0, **MYAPP._gridcfg_wpad )
button = ttk.Button(
group,
text = 'Quit',
command = self.quit
)
button.grid( row = 0, column = 1, **MYAPP._gridcfg_wpad )
#=========================================================================
def _create_example_2( self, parent ):
cfg = MYAPP._gridcfg_wpad
group = ttk.LabelFrame(
parent,
text = 'Basic Widgets'
)
group.grid( **MYAPP._gridcfg_label )
# checkbox example
self.wvars[ 'cbstate' ] = tk.IntVar()
self.wvars[ 'cbstate' ].set( 0 )
ttk.Checkbutton(
group,
text = 'Checkbutton',
variable = self.wvars[ 'cbstate' ]
).grid( **cfg )
# entry example
self.wvars[ 'etext' ] = tk.StringVar()
self.wvars[ 'etext' ].set( 'Entry' )
ttk.Entry( group, textvariable = self.wvars[ 'etext' ] ).grid( **cfg )
# label example
ttk.Label( group, text = 'Label' ).grid( **cfg )
# radio group example
self.wvars[ 'radio' ] = tk.IntVar()
ttk.Radiobutton(
group,
text = 'Radio 1',
value = 1,
variable = self.wvars[ 'radio' ]
).grid()
ttk.Radiobutton(
group,
text = 'Radio 2',
value = 2,
variable = self.wvars[ 'radio' ]
).grid()
ttk.Radiobutton(
group,
text = 'Radio 3',
value = 3,
variable = self.wvars[ 'radio' ]
).grid()
group = ttk.LabelFrame(
parent,
text = 'Complex Widgets'
)
group.grid( **MYAPP._gridcfg_label )
# combo box example
cbox = ttk.Combobox(
group,
values = ( 'Option 1', 'Option 2', 'Option 3' )
)
cbox.set( 'Combobox' )
cbox.grid( **cfg )
# progress bar examples
bar = ttk.Progressbar( group )
bar.grid( **cfg )
bar.start()
# use .stop() to stop, and .step() to manually update
bar = ttk.Progressbar( group, mode = 'indeterminate' )
bar.grid( **cfg )
bar.start()
# scale example
ttk.Scale( group ).grid( **cfg )
# scroll bar example
ttk.Scrollbar( group, orient = tk.HORIZONTAL ).grid( **cfg )
# tree view example
#tv = ttk.Treeview( group ).grid( **cfg )
#=========================================================================
def _create_menu( self ):
self.menubar = tk.Menu( self )
menu = tk.Menu( self.menubar, tearoff = 0 )
self.menubar.add_cascade( label = 'File', menu = menu )
menu.add_command( label = 'Hello', command = self._cmd_hello )
menu = tk.Menu( self.menubar, tearoff = 0 )
self.menubar.add_cascade( label = 'Help', menu = menu )
menu.add_command( label = 'About', command = self._cmd_about )
self.master.config( menu = self.menubar )
#=========================================================================
def _create_statusbar( self ):
self.statusBar = StatusBar( self, 2, ( 'Status', 'Status 2' ) )
self.statusBar.setStatus( 0, 'Status 1' )
#=========================================================================
def _create_tabs( self, parent ):
#style = ttk.Style()
#style.configure( 'Page.TFrame', background = 'white' )
#style.configure( 'TLabelframe', background = 'white' )
#style.configure( 'TLabelframe.Label', background = 'white' )
book = ttk.Notebook( parent )
book.columnconfigure( 0, weight = 1 )
tab = ttk.Frame( parent, style = 'Page.TFrame' )
tab.columnconfigure( 0, weight = 1 )
self._create_example_1( tab )
book.add( tab, text = 'Basic Tab' )
tab = ttk.Frame( parent, style = 'Page.TFrame' )
tab.columnconfigure( 0, weight = 1 )
self._create_example_2( tab )
book.add( tab, text = 'Demo Tab' )
tab = ttk.Frame( parent, style = 'Page.TFrame' )
tab.columnconfigure( 0, weight = 1 )
t = Table( tab, rows = 8, columns = 4 )
t.grid()
t.set( 0, 0, '0,0' )
t.set( 0, 1, '0,1' )
t.set( 1, 4, '1,4' )
t.set( 2, 7, '2,7' )
t.set( 3, 0, 'a cell with a lot of text' )
t.set( 3, 1, 'another cell with a lot of text' )
book.add( tab, text = 'Table Tab' )
tab = ttk.Frame( parent, style = 'Page.TFrame' )
tab.columnconfigure( 0, weight = 1 )
tv = ttk.Treeview( tab, columns = ( 'col1', 'col2' ) )
tv.xview()
tv.yview()
tv.heading( '#0', text = 'Col 0' )
tv.heading( 'col1', text = 'Col 1' )
tv.heading( 'col2', text = 'Col 2',
command = lambda: sys.stdout.write('Sort by col2\n') )
tv.column( '#0', stretch = False )
tv.column( 'col1', stretch = True )
tv.column( 'col2', stretch = False )
tv.grid( **MYAPP._gridcfg_stick )
tv.insert( '', 'end', 'r1_key', text = 'Row 1',
values = ( 'Value 1,1', 'Value 1,2' ),
tags = ( 'style_tag', 'behavior_tag' ) )
tv.tag_configure( 'style_tag', background = '#EEEEEE' )
tv.tag_bind( 'behavior_tag', '<1>',
lambda e: sys.stdout.write('Clicked %s\n'%str(tv.focus())) )
r2_id = tv.insert( '', 'end', 'r2_key', text = 'Row 2',
values = ( 'Value 2,1', 'Value 2,2' ) )
tv.insert( 'r2_key', 'end', text = 'Row 2 A' )
tv.insert( r2_id, 'end', text = 'Row 2 B' )
book.add( tab, text = 'Tree Tab' )
scrollbar_howto = """
winDirSel = tk.Toplevel()
winDirSel.title('Select Test Directory...')
tvwDirSel = ttk.Treeview(winDirSel,
height=10,padding=3,
show='tree')
lblTestDir = tk.Label(winDirSel, relief=tk.SUNKEN,
justify=tk.LEFT, anchor=tk.W,
textvariable=ctrlTestDir,width=80)
scbHDirSel = ttk.Scrollbar(winDirSel,
orient=tk.HORIZONTAL,
command=tvwDirSel.xview)
scbVDirSel = ttk.Scrollbar(winDirSel,
orient=tk.VERTICAL,
command=tvwDirSel.yview)
tvwDirSel.configure(xscrollcommand=scbHDirSel.set,
yscrollcommand=scbVDirSel.set)
lblTestDir.grid(row=0,column=0,sticky=tk.EW)
tvwDirSel.grid(row=1,column=0,sticky=tk.NSEW)
scbVDirSel.grid(row=1,column=1,sticky=tk.NS)
scbHDirSel.grid(row=2,column=0,sticky=tk.EW)
winDirSel.rowconfigure(1,weight=1)
winDirSel.columnconfigure(0,weight=1)
"""
book.grid( **MYAPP._gridcfg_stick )
#=========================================================================
def _create_widgets( self ):
""" Builds the root frame's interface """
self._create_menu()
self._create_tabs( self )
self._create_statusbar()
#=========================================================================
def _cmd_about( self ):
""" Displays the application about info """
messagebox.showinfo(
'About %s' % MYAPP._title,
'%s\nVersion: %s\nDate: %s\nAuthor: %s' % (
MYAPP._title,
MYAPP._version,
MYAPP._date,
MYAPP._author
)
)
#=========================================================================
def _cmd_hello( self ):
""" Example of using a message box """
messagebox.showinfo( 'Hello', 'Well, hello there!' )
#=============================================================================
class Table( ttk.Frame ):
# anything that needs to be extra sticky can use this
_gcfg_stick = {
'sticky' : ( tk.N + tk.E + tk.S + tk.W )
}
#=========================================================================
def __init__( self, parent, rows = 2, columns = 2 ):
""" Initializes the table widget """
# call the parent constructor
ttk.Frame.__init__( self, parent )
# create a style for the labels used to display text in a cell
style = ttk.Style()
style.configure(
'TableCell.TLabel',
background = 'white',
padding = 1
)
# create the table widgets
self.matrix = self._create_table( self, rows, columns )
#=========================================================================
def set( self, x, y, value ):
""" Sets the string of a given table cell """
self.matrix[ y ][ x ].label.configure( text = value )
#=========================================================================
def _create_table( self, parent, rows, columns ):
row_list = []
row = 0
for rindex in range( rows ):
ref = self._create_row( parent, row, columns )
row_list.append( ref )
row += 1
return row_list
#=========================================================================
def _create_row( self, parent, row, columns ):
cell_list = []
column = 0
for cindex in range( columns ):
cell = self._create_cell( parent )
cell.grid( row = row, column = column, **Table._gcfg_stick )
cell_list.append( cell )
column += 1
return cell_list
#=========================================================================
def _create_cell( self, parent ):
frame = ttk.Frame(
parent,
padding = ( 0, 0, 1, 1 )
)
frame.label = ttk.Label(
frame,
style = 'TableCell.TLabel'
)
frame.label.grid( **Table._gcfg_stick )
frame.columnconfigure( 0, weight = 1 )
return frame
#=============================================================================
class StatusBar( ttk.Frame ):
""" A specialized frame for displaying a status bar """
#=========================================================================
def __init__( self, parent, count = 1, initial = () ):
""" Initializes the status bar widget """
# call the parent constructor
ttk.Frame.__init__( self, parent )
# initialize some internal memory
self.fields = []
self.values = []
# build the status bar label fields
column = 0
for index in range( count ):
# insert separators between label fields
if index > 0:
ttk.Separator( self, orient = tk.VERTICAL ).grid(
row = 0,
column = column,
sticky = ( tk.N + tk.S )
)
column += 1
# create a modifiable text variable for this label
tvar = tk.StringVar()
if len( initial ) > index:
tvar.set( initial[ index ] )
self.values.append( tvar )
# create a standard label to display the status
label = ttk.Label( self, textvariable = tvar )
label.grid( row = 0, column = column, padx = 4, pady = 2 )
self.fields.append( label )
column += 1
# add a size grip area to the corner
if count > 0:
column = count + ( count - 1 )
else:
column = 0
ttk.Sizegrip( self ).grid( row = 0, column = column, sticky = tk.SE )
self.columnconfigure( column, weight = 1 )
# make sure the widget fills the column
self.grid( sticky = ( tk.E + tk.W ) )
#=========================================================================
def setStatus( self, index, text ):
""" Sets the text displayed in a status bar field """
# check the requested index
if index < len( self.values ):
# set the text in the label
self.values[ index ].set( text )
# return success
return True
# no field available for the requested index
return False
#=============================================================================
def main( argv ):
""" script execution entry point """
# initialize and start the user interface
ui = MYAPP()
ui.mainloop()
# return success
return 0
#=============================================================================
if __name__ == "__main__":
import sys
sys.exit( main( sys.argv ) )
|
package dataTypes;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class Matrix extends Vector<DoubleVector> {
private final List<Integer> vectorsDimensions;
public Matrix() {
super();
vectorsDimensions = null;
}
public Matrix(List<DoubleVector> list) {
super(list);
vectorsDimensions = null;
}
public Matrix(int size, List<Integer> vectorsDimensions) {
super();
this.vectorsDimensions = new ArrayList<>(vectorsDimensions);
generateList(size);
}
public Matrix(int matSize, int vectorSize) {
super();
vectorsDimensions = createSameList(matSize, vectorSize);
generateList(matSize);
}
private List<Integer> createSameList(int size, int value) {
return IntStream.range(0, size)
.mapToObj(e -> value)
.collect(Collectors.toList());
}
public Matrix multiply(Matrix matrix) {
return new Matrix(super.multiply(matrix));
}
public Matrix add(Matrix matrix) {
return new Matrix(super.add(matrix));
}
public Matrix subtract(Matrix matrix) {
return new Matrix(super.subtract(matrix));
}
@Override
protected DoubleVector generateValue(int index) {
return new DoubleVector(vectorsDimensions.get(index));
}
@Override
protected DoubleVector multiplyValues(DoubleVector v1, DoubleVector v2) {
return v1.multiply(v2);
}
@Override
protected DoubleVector addValues(DoubleVector v1, DoubleVector v2) {
return v1.add(v2);
}
@Override
protected DoubleVector subtractValues(DoubleVector v1, DoubleVector v2) {
return v1.subtract(v2);
}
public DoubleVector multiplyByVector(DoubleVector vector) {
return new DoubleVector(stream()
.map(matVector -> matVector.multiply(vector).sum())
.collect(Collectors.toList()));
}
public Matrix transpose() {
return new Matrix(get(0).lengthStream()//won't work if not all vectors are the same lengths
.mapToObj(innerIndex -> new DoubleVector(lengthStream()
.mapToObj(outerIndex -> get(outerIndex).get(innerIndex))
.collect(Collectors.toList())))
.collect(Collectors.toList()));
}
public Matrix scale(Double scalar) {
return new Matrix(stream()
.map(vector -> vector.scale(scalar))
.collect(Collectors.toList()));
}
public Matrix addVector(DoubleVector vector) {
return new Matrix(super.addValue(vector));
}
}
|
// Problem 1. Format with placeholders
function solve(args) {
String.prototype.format = function (options) {
var prop,
result = this;
for (prop in options) {
result = result.replace(new RegExp('#{' + prop + '}', 'g'), options[prop]);
}
return result;
};
console.log(args[1].format(JSON.parse(args[0])));
}
// Problem 2. HTML binding
function solve(args) {
var options = JSON.parse(args[0]);
var inputStr = args[1].replace(/'/, '"');
String.prototype.bind = function (parameters) {
var result = this;
var currentMatch;
regExAttr = new RegExp('data-bind-(.*?)="(.*?)"', 'gmi');
while (currentMatch = regExAttr.exec(result)) {
var arr;
var index = result.indexOf('>');
if (result[index - 1] === '/') {
index--;
}
if (currentMatch[1] !== 'content') {
arr = result.split('');
arr.splice(index, 0, " " + currentMatch[1] + '="' + parameters[currentMatch[2]] + '"');
result = arr.join('');
} else {
arr = result.split('');
arr.splice(index + 1, 0, parameters[currentMatch[2]]);
result = arr.join('');
}
}
return result;
};
return inputStr.bind(options);
} |
# compute mean
total = 0
for num in nums:
total += num
mean = total/len(nums)
# compute median
sorted_nums = sorted(nums)
if len(sorted_nums) % 2 == 0:
median = (sorted_nums[len(nums)//2] + sorted_nums[len(nums)//2 - 1]) / 2
else:
median = sorted_nums[len(nums)//2]
# compute mode
from collections import Counter
counter = Counter(nums)
mode = max(counter.values())
# print the results
print('Mean:', mean)
print('Median:', median)
print('Mode:', mode) |
import React from 'react';
import { useStaticQuery, graphql } from 'gatsby';
import BigList from '../Common/BigList/BigList';
import BigListItem from '../Common/BigList/BigListItem';
const query = graphql`
query {
work: allProjectYaml(sort: {fields: order}) {
nodes {
id
title
type
category
fields {
slug
}
image {
childImageSharp {
fixed(width: 1000) {
src
}
}
}
}
}
}
`;
const WorkList = ({ category = false, limit = false }) => {
const { nodes: work } = useStaticQuery(query).work;
const filterWork = (item) => {
if (!category) {
return true;
}
return item.category === category;
}
let _limit = limit || work.length;
return (
<BigList>
{
work.filter(filterWork).slice(0, _limit).map(({ id, title, type, fields, image }) => (
<BigListItem
key={ id }
to={ fields.slug }
image={ image.childImageSharp.fixed.src }
text={ title }
label={ type }
noSmallImage/>
))
}
</BigList>
)
}
export default WorkList;
|
def extract_topics(metadata):
topics_frequency = {}
for data in metadata:
topics = data.split("::")
for topic in topics[1:]:
topic = topic.strip()
if topic in topics_frequency:
topics_frequency[topic] += 1
else:
topics_frequency[topic] = 1
return topics_frequency |
function processCardEffect(status, language) {
const cardEffect = {
effect: {
en: "Your opponent’s Active Pokémon is now Asleep.",
fr: "Le Pokémon Actif de votre adversaire est maintenant Endormi.",
es: "El Pokémon Activo de tu rival pasa a estar Dormido.",
it: "Il Pokémon attivo del tuo avversario viene addormentato.",
pt: "O Pokémon Ativo do seu oponente agora está Adormecido.",
de: "Das Aktive Pokémon deines Gegners schläft jetzt."
},
damage: 20
};
let effectMessage = cardEffect.effect[language] || cardEffect.effect.en;
let totalDamage = cardEffect.damage;
if (status !== "None") {
effectMessage = `Your opponent’s Active Pokémon is now ${status}.`;
}
return { effectMessage, totalDamage };
} |
<filename>make_palettes.h
#ifndef MAKE_PALETTES_H
#define MAKE_PALETTES_H
void SetupRandom16Palette() {
for (int i = 0; i < 16; i++) targetPalette[i] = CHSV(random8(), random8(192,255), random8(128,255));
} // SetupRandom16Palette()
void SetupRandom4Palette() {
targetPalette = CRGBPalette16(CHSV(random8(), 255, random8(128,255)), CHSV(random8(), 255, random8(128,255)), CHSV(random8(), 192, random8(128,255)), CHSV(random8(), 255, 255));
} // SetupRandom4Palette()
void SetupSimilar4Palette() {
thishue=random8(); // This one picks a random hue for you.
targetPalette = CRGBPalette16(CHSV(thishue+random8(32), 255, random8(128,255)),
CHSV(thishue+random8(32), 255, random8(192,255)),
CHSV(thishue+random8(32), 192, random8(192,255)),
CHSV(thishue+random8(32), 255, random8(128,255)));
} // SetupSimilar4Palette()
void SetupMySimilar4Palette() { // This one allows you to select your own hue in advance.
targetPalette = CRGBPalette16(CHSV(thishue+random8(32), 255, random8(128,255)),
CHSV(thishue+random8(32), 255, random8(192,255)),
CHSV(thishue+random8(32), 192, random8(192,255)),
CHSV(thishue+random8(32), 255, random8(128,255)));
} // SetupMySimilar4Palette()
#endif
|
import logging
log = logging.getLogger('deepthought.datasets')
# Helper function to generate hdf5 subsets
def get_dataset(hdf5name, selectors=None, sources=('features', 'targets', 'subjects')):
if selectors is None:
selectors = {}
# load metadata
import deepthought.util.fs_util as fs_util
base_meta = fs_util.load(hdf5name + '.meta.pklz')
# build lookup structure
from deepthought.datasets.selection import DatasetMetaDB
metadb = DatasetMetaDB(base_meta, selectors.keys())
# get selected trial IDs
selected_trial_ids = metadb.select(selectors)
log.debug('selectors: {}'.format(selectors))
log.debug('selected trials: {}'.format(selected_trial_ids))
log.debug('selected sources: {}'.format(sources))
# load data and generate metadata
from fuel.datasets.hdf5 import H5PYDataset
hdf5 = H5PYDataset(hdf5name,
which_sets=('all',), subset=selected_trial_ids,
load_in_memory=True, sources=sources
)
meta = [base_meta[i] for i in selected_trial_ids]
log.debug('number of examples: {}'.format(hdf5.num_examples))
return hdf5, meta
|
<reponame>direkshan-digital/light-eventuate-4j
package com.networknt.eventuate.common;
/**
* Base interface for event sourcing events
*
* <p>Each aggregate typically defines an interface that is the base interface for all of it's event classes.
* For example:
*
* <pre class="code">
* @EventEntity(entity="io.eventuate.example.banking.domain.Account")
* public interface AccountEvent extends Event {
* }
* </pre>
*
* and
*
* <pre class="code">
* public class AccountDebitedEvent implements AccountEvent {
* private BigDecimal amount;
* private String transactionId;
* ...
* }
* </pre>
*/
public interface Event {
}
|
# download pre-processed dataset (NELA-17 based)
# whole-type data (253 MB)
wget http://milabfile.snu.ac.kr:16000/detecting-incongruity/data-processed-nela-17.tar.gz
tar xzvf data-processed-nela-17.tar.gz
rm data-processed-nela-17.tar.gz |
<filename>li-apache-kafka-clients/src/test/java/com/linkedin/kafka/clients/consumer/MockLiKafkaConsumer.java
/*
* Copyright 2019 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License").
See License in the project root for license information.
*/
package com.linkedin.kafka.clients.consumer;
import java.time.Duration;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.MockConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.clients.consumer.OffsetCommitCallback;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
// Mock LiKafkaConsumer with raw byte key and value
public class MockLiKafkaConsumer implements LiKafkaConsumer<byte[], byte[]> {
private MockConsumer<byte[], byte[]> _delegate;
public MockLiKafkaConsumer(OffsetResetStrategy offsetResetStrategy) {
_delegate = new MockConsumer<>(offsetResetStrategy);
}
@Override
public Set<TopicPartition> assignment() {
return _delegate.assignment();
}
@Override
public Set<String> subscription() {
return _delegate.subscription();
}
@Override
public void subscribe(Collection<String> topics) {
_delegate.subscribe(topics);
}
@Override
public void subscribe(Collection<String> topics, ConsumerRebalanceListener callback) {
_delegate.subscribe(topics, callback);
}
@Override
public void assign(Collection<TopicPartition> partitions) {
_delegate.assign(partitions);
}
@Override
public void subscribe(Pattern pattern, ConsumerRebalanceListener callback) {
_delegate.subscribe(pattern, callback);
}
@Override
public void subscribe(Pattern pattern) {
_delegate.subscribe(pattern);
}
@Override
public void unsubscribe() {
_delegate.unsubscribe();
}
@Override
public ConsumerRecords<byte[], byte[]> poll(long timeout) {
return _delegate.poll(timeout);
}
@Override
public ConsumerRecords<byte[], byte[]> poll(Duration timeout) {
return _delegate.poll(timeout);
}
@Override
public void commitSync() {
_delegate.commitSync();
}
@Override
public void commitSync(Duration timeout) {
_delegate.commitSync(timeout);
}
@Override
public void commitSync(Map<TopicPartition, OffsetAndMetadata> offsets) {
_delegate.commitSync(offsets);
}
@Override
public void commitSync(Map<TopicPartition, OffsetAndMetadata> offsets, Duration timeout) {
_delegate.commitSync(offsets, timeout);
}
@Override
public void commitAsync() {
_delegate.commitAsync();
}
@Override
public void commitAsync(OffsetCommitCallback callback) {
_delegate.commitAsync(callback);
}
@Override
public void commitAsync(Map<TopicPartition, OffsetAndMetadata> offsets, OffsetCommitCallback callback) {
_delegate.commitAsync(offsets, callback);
}
@Override
public void seek(TopicPartition partition, long offset) {
_delegate.seek(partition, offset);
}
@Override
public void seekToBeginning(Collection<TopicPartition> partitions) {
_delegate.seekToBeginning(partitions);
}
@Override
public void seekToEnd(Collection<TopicPartition> partitions) {
_delegate.seekToEnd(partitions);
}
@Override
public void seekToCommitted(Collection<TopicPartition> partitions) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public long position(TopicPartition partition) {
return _delegate.position(partition);
}
@Override
public long position(TopicPartition partition, Duration timeout) {
return _delegate.position(partition, timeout);
}
@Override
public OffsetAndMetadata committed(TopicPartition partition) {
return _delegate.committed(partition);
}
@Override
public OffsetAndMetadata committed(TopicPartition partition, Duration timeout) {
return _delegate.committed(partition, timeout);
}
@Override
public Long committedSafeOffset(TopicPartition tp) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Map<MetricName, ? extends Metric> metrics() {
return _delegate.metrics();
}
@Override
public List<PartitionInfo> partitionsFor(String topic) {
return _delegate.partitionsFor(topic);
}
@Override
public List<PartitionInfo> partitionsFor(String topic, Duration timeout) {
return _delegate.partitionsFor(topic, timeout);
}
@Override
public Map<String, List<PartitionInfo>> listTopics() {
return _delegate.listTopics();
}
@Override
public Map<String, List<PartitionInfo>> listTopics(Duration timeout) {
return _delegate.listTopics(timeout);
}
@Override
public Set<TopicPartition> paused() {
return _delegate.paused();
}
@Override
public void pause(Collection<TopicPartition> partitions) {
_delegate.pause(partitions);
}
@Override
public void resume(Collection<TopicPartition> partitions) {
_delegate.resume(partitions);
}
@Override
public Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes(Map<TopicPartition, Long> timestampsToSearch) {
return _delegate.offsetsForTimes(timestampsToSearch);
}
@Override
public Map<TopicPartition, OffsetAndTimestamp> offsetsForTimes(Map<TopicPartition, Long> timestampsToSearch, Duration timeout) {
return _delegate.offsetsForTimes(timestampsToSearch, timeout);
}
@Override
public Map<TopicPartition, Long> beginningOffsets(Collection<TopicPartition> partitions) {
return _delegate.beginningOffsets(partitions);
}
@Override
public Map<TopicPartition, Long> beginningOffsets(Collection<TopicPartition> partitions, Duration timeout) {
return _delegate.beginningOffsets(partitions, timeout);
}
@Override
public Map<TopicPartition, Long> endOffsets(Collection<TopicPartition> partitions) {
return _delegate.endOffsets(partitions);
}
@Override
public Map<TopicPartition, Long> endOffsets(Collection<TopicPartition> partitions, Duration timeout) {
return _delegate.endOffsets(partitions, timeout);
}
@Override
public Long safeOffset(TopicPartition tp, long messageOffset) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Long safeOffset(TopicPartition tp) {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public Map<TopicPartition, Long> safeOffsets() {
throw new UnsupportedOperationException("Not implemented yet");
}
@Override
public void close() {
_delegate.close();
}
@Override
public void close(long timeout, TimeUnit timeUnit) {
_delegate.close(timeout, timeUnit);
}
@Override
public void close(Duration timeout) {
_delegate.close(timeout);
}
@Override
public void wakeup() {
_delegate.wakeup();
}
public MockConsumer<byte[], byte[]> getDelegate() {
return _delegate;
}
}
|
package tools.claimr.reactnativeclient;
import android.location.GnssClock;
import android.location.GnssMeasurement;
import android.os.Build;
import android.os.SystemClock;
import androidx.annotation.RequiresApi;
public class GnssLoggerUtil {
private static final String COMMENT_START = "# ";
private static final char LINE_END = '\n';
private static final String VERSION_TAG = "Version: v2.0.0.1";
static String gnssMeasurementsFileHeader() {
return COMMENT_START + LINE_END +
COMMENT_START + "Header Description:" + LINE_END +
COMMENT_START + LINE_END +
COMMENT_START + VERSION_TAG +
String.format(" Platform: %s", Build.VERSION.RELEASE) +
String.format(" Manufacturer: %s", Build.MANUFACTURER) +
String.format(" Model: %s", Build.MODEL) +
LINE_END +
COMMENT_START + LINE_END +
COMMENT_START +
"Raw,ElapsedRealtimeMillis,TimeNanos,LeapSecond,TimeUncertaintyNanos,FullBiasNanos,"
+ "BiasNanos,BiasUncertaintyNanos,DriftNanosPerSecond,DriftUncertaintyNanosPerSecond,"
+ "HardwareClockDiscontinuityCount,Svid,TimeOffsetNanos,State,ReceivedSvTimeNanos,"
+ "ReceivedSvTimeUncertaintyNanos,Cn0DbHz,PseudorangeRateMetersPerSecond,"
+ "PseudorangeRateUncertaintyMetersPerSecond,"
+ "AccumulatedDeltaRangeState,AccumulatedDeltaRangeMeters,"
+ "AccumulatedDeltaRangeUncertaintyMeters,CarrierFrequencyHz,CarrierCycles,"
+ "CarrierPhase,CarrierPhaseUncertainty,MultipathIndicator,SnrInDb,"
+ "ConstellationType,AgcDb,CarrierFrequencyHz" +
LINE_END +
COMMENT_START + LINE_END;
}
/**
* Create a GNSS Raw measurements line from the internal GNSS data representation objects.
*
* @param clock The {@link GnssClock} of this measurement.
* @param measurement The {@link GnssMeasurement} of this measurement.
* @return A string formatted as a single line usable for RAW GNSS measurement files.
*/
@RequiresApi(api = Build.VERSION_CODES.N)
static String gnssMeasurementToFileLine(GnssClock clock, GnssMeasurement measurement) {
return String.format(
"Raw,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s",
SystemClock.elapsedRealtime(),
clock.getTimeNanos(),
clock.hasLeapSecond() ? clock.getLeapSecond() : "",
clock.hasTimeUncertaintyNanos() ? clock.getTimeUncertaintyNanos() : "",
clock.getFullBiasNanos(),
clock.hasBiasNanos() ? clock.getBiasNanos() : "",
clock.hasBiasUncertaintyNanos() ? clock.getBiasUncertaintyNanos() : "",
clock.hasDriftNanosPerSecond() ? clock.getDriftNanosPerSecond() : "",
clock.hasDriftUncertaintyNanosPerSecond()
? clock.getDriftUncertaintyNanosPerSecond()
: "",
clock.getHardwareClockDiscontinuityCount(),
measurement.getSvid(),
measurement.getTimeOffsetNanos(),
measurement.getState(),
measurement.getReceivedSvTimeNanos(),
measurement.getReceivedSvTimeUncertaintyNanos(),
measurement.getCn0DbHz(),
measurement.getPseudorangeRateMetersPerSecond(),
measurement.getPseudorangeRateUncertaintyMetersPerSecond(),
measurement.getAccumulatedDeltaRangeState(),
measurement.getAccumulatedDeltaRangeMeters(),
measurement.getAccumulatedDeltaRangeUncertaintyMeters(),
measurement.hasCarrierFrequencyHz() ? measurement.getCarrierFrequencyHz() : "",
measurement.hasCarrierCycles() ? measurement.getCarrierCycles() : "",
measurement.hasCarrierPhase() ? measurement.getCarrierPhase() : "",
measurement.hasCarrierPhaseUncertainty()
? measurement.getCarrierPhaseUncertainty()
: "",
measurement.getMultipathIndicator(),
measurement.hasSnrInDb() ? measurement.getSnrInDb() : "",
measurement.getConstellationType(),
Build.VERSION.SDK_INT >= Build.VERSION_CODES.O
&& measurement.hasAutomaticGainControlLevelDb()
? measurement.getAutomaticGainControlLevelDb()
: "",
measurement.hasCarrierFrequencyHz() ? measurement.getCarrierFrequencyHz() : "");
}
}
|
#!/bin/bash
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -eo pipefail
# Start the releasetool reporter
python3 -m pip install gcp-releasetool
python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
python3 -m pip install --upgrade twine wheel setuptools
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
# Move into the package, build the distribution and upload.
TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
cd github/python-bigquery-reservation
python3 setup.py sdist bdist_wheel
twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
|
#!/bin/bash
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
set -xe
usage() {
echo "install_saw.sh download_dir install_dir"
exit 1
}
if [ "$#" -ne "2" ]; then
usage
fi
DOWNLOAD_DIR=$1
INSTALL_DIR=$2
mkdir -p "$DOWNLOAD_DIR"
cd "$DOWNLOAD_DIR"
#download saw binaries
curl --retry 3 https://s3-us-west-2.amazonaws.com/s2n-public-test-dependencies/saw-0.4.0.99-2019-12-10-Ubuntu14.04-64.tar.gz --output saw.tar.gz
mkdir -p saw && tar -xzf saw.tar.gz --strip-components=1 -C saw
mkdir -p "$INSTALL_DIR" && mv saw/* "$INSTALL_DIR"
clang --version
"$INSTALL_DIR"/bin/saw --version
|
class Prot:
class STATES:
playerWon = "playerWon"
enemyWon = "enemyWon"
def determineOutcome(state):
if state == Prot.STATES.playerWon:
return 1
elif state == Prot.STATES.enemyWon:
return 0
else:
return -1 |
<reponame>amelvill-umich/libbat<filename>parallel_io/mpi_send_recv.h
#pragma once
#include <memory>
#include <mpi.h>
#include "abstract_array.h"
#include "borrowed_array.h"
#include "owned_array.h"
struct ISend {
ArrayHandle<uint8_t> data;
MPI_Request request = MPI_REQUEST_NULL;
int count;
MPI_Datatype send_type;
int dest;
int tag;
MPI_Comm comm;
ISend() = default;
template <typename T>
ISend(const T *t,
int count,
MPI_Datatype send_type,
int dest,
int tag,
MPI_Comm comm,
bool shared_data);
ISend(const ArrayHandle<uint8_t> &data,
int count,
MPI_Datatype send_type,
int dest,
int tag,
MPI_Comm comm);
bool complete();
private:
void start_send(int count, MPI_Datatype send_type, int dest, int tag, MPI_Comm comm);
};
struct IRecv {
ArrayHandle<uint8_t> data;
MPI_Request request = MPI_REQUEST_NULL;
int count;
MPI_Datatype recv_type;
int src;
int tag;
MPI_Comm comm;
IRecv() = default;
template <typename T>
static IRecv recv(int count, MPI_Datatype recv_type, int src, int tag, MPI_Comm comm);
template <typename T>
IRecv(T *t, int count, MPI_Datatype recv_type, int src, int tag, MPI_Comm comm);
IRecv(ArrayHandle<uint8_t> &data,
int count,
MPI_Datatype recv_type,
int src,
int tag,
MPI_Comm comm);
bool complete();
private:
void start_recv(int count, MPI_Datatype recv_type, int src, int tag, MPI_Comm comm);
};
template <typename T>
ISend::ISend(const T *t,
int count,
MPI_Datatype send_type,
int dest,
int tag,
MPI_Comm comm,
bool shared_data)
{
if (shared_data) {
data = std::make_shared<BorrowedArray<uint8_t>>(reinterpret_cast<const uint8_t *>(t),
count * sizeof(T));
} else {
data = std::make_shared<OwnedArray<uint8_t>>(reinterpret_cast<const uint8_t *>(t),
count * sizeof(T));
}
start_send(count, send_type, dest, tag, comm);
}
template <typename T>
IRecv IRecv::recv(int count, MPI_Datatype recv_type, int src, int tag, MPI_Comm comm)
{
auto data = std::dynamic_pointer_cast<AbstractArray<uint8_t>>(
std::make_shared<OwnedArray<uint8_t>>(count * sizeof(T)));
return IRecv(data, count, recv_type, src, tag, comm);
}
template <typename T>
IRecv::IRecv(T *t, int count, MPI_Datatype recv_type, int src, int tag, MPI_Comm comm)
: data(std::make_shared<BorrowedArray<uint8_t>>(reinterpret_cast<uint8_t *>(t),
count * sizeof(T)))
{
start_recv(count, recv_type, src, tag, comm);
}
|
import React from 'react';
import Home from "./views/Home"
import Bridges from "./views/Bridges";
import Bridge from "./views/Bridge";
import Map from "./views/Map";
import Schedule from "./views/Schedule";
import { Route, HashRouter as Router} from "react-router-dom";
import './App.scss';
function App() {
return (
<Router>
<Route exact path="/" component={Home} />
<Route path="/bridges" component={Bridges} />
<Route path="/map" component={Map} />
<Route path="/schedule" component={Schedule} />
<Route path="/bridge/:bridgeId" component={Bridge} />
</Router>
);
}
export default App;
|
#!/bin/bash
# GNU GPL v3
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
PROGRAM="check_laborange_login";
VERSION="0.2";
AUTHOR="Régis Leroy regis.leroy@makina-corpus.com";
NICK="LOGINLAB"
OK=0;
WARNING=1;
CRITICAL=2;
UNKNOWN=3;
PENDING=4;
print_version() {
echo $PROGRAM
echo "Version: $VERSION Author: $AUTHOR";
echo "-------------------------------------------------------------------------------------"
}
print_help() {
print_version
echo "";
echo "$PROGRAM : Check we can connect with a user login on Lab'Orange";
echo "";
print_usage
}
print_usage() {
echo "USAGE: $PROGRAM [OPTIONS]";
echo;
echo "-h|--help : show program help";
echo "-v|--version : show program version";
echo "-u : Host url (full with http://)";
echo "-a : HTTP Auth pair user";
echo "-p : HTTP Auth pair password";
echo "-c : Critical thresold (default 10)";
echo "-w : Warning thresold (default 5)";
echo "";
echo "Thresolds are computed in seconds on the time needed to load home page after login.";
echo "Bad login will always be critical.";
}
#Default
URL="http://dev.laborange.com"
LOGIN="monitoring"
PASSWORD="RuedCiWruj1"
# To check in resulting page content
USERNAME="monitoring"
COOKIE="/tmp/nagios_laborange_conn_cookie.txt"
BODY="/tmp/nagios_laborange_conn_page-content.txt"
BODYSHOP="/tmp/nagios_laborange_conn_page-contentshop.txt"
BODYFAILURE="/tmp/nagios_laborange_conn_page-content-failure.txt"
BODYSHOPFAILURE="/tmp/nagios_laborange_conn_page-contentshop-failure.txt"
CTHRESOLD=10
WTHRESOLD=5
# Parse args
ARGS=`getopt vhc:w:u:a:p: $*`;
while test -n "$1"; do
case "$1" in
--help|-h)
shift;
print_help;
exit $UNKNOWN;
;;
--version|-v)
shift;
print_version ;
exit $UNKNOWN;
;;
-u)
shift;
URL=$1;
shift;
;;
-a)
shift;
AUTHUSER=$1;
shift;
;;
-p)
shift;
AUTHPASSWORD=$1;
shift;
;;
-c)
shift;
CTHRESOLD=$1;
shift;
;;
-w)
shift;
WTHRESOLD=$1;
shift;
;;
*)
echo
echo "====================================="
echo "$NICK UNKNOWN - Unknown argument: $1";
echo "====================================="
echo
print_help;
exit $UNKNOWN;
;;
esac;
done
# Check tools
WGET=`which wget`;
if [ ! ${WGET} ]; then
echo "$NICK UNKNOWN - You do not have the wget utility?"
exit $UNKNOWN
fi
GREP=`which grep`;
if [ ! ${GREP} ]; then
echo "$NICK UNKNOWN - You do not have the grep utility?"
exit $UNKNOWN
fi
TR=`which tr`;
if [ ! ${TR} ]; then
echo "$NICK UNKNOWN - You do not have the tr utility?"
exit $UNKNOWN
fi
CUT=`which cut`;
if [ ! ${CUT} ]; then
echo "$NICK UNKNOWN - You do not have the cut utility?"
exit $UNKNOWN
fi
SED=`which sed`;
if [ ! ${SED} ]; then
echo "$NICK UNKNOWN - You do not have the sed utility?"
exit $UNKNOWN
fi
if [ ${AUTHUSER} ]; then
WGET="${WGET} --user=${AUTHUSER} --password=${AUTHPASSWORD}"
fi
# Build urls & commands
LOGIN_URL=${URL}"/home";
HOME_URL=${URL}"/home";
SHOP_BASEURL=`echo ${URL} | sed 's_http://_http://boutique._'`;
SHOP_URL=${SHOP_BASEURL}"/";
LOGOUT_URL=${URL}"/user/logout"
POST_LOGIN_URL=${URL}"/home?destination=home"
# 1st page command
COMMAND1="${WGET} -q --cookies=on --keep-session-cookies --save-cookies=${COOKIE} -O ${BODY} ${LOGIN_URL}"
# COMMAND2 is the login POST command, build later as we need to extract some content from COMMAND1 result
# After login post page GET command
COMMAND3="${WGET} -q --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${LOGIN_URL} -O ${BODY} ${HOME_URL}"
# Same as After login post page GET command but with another save file (record failure)
COMMAND3BIS="${WGET} -q --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${LOGIN_URL} -O ${BODYFAILURE} ${HOME_URL}"
# Shop GET command
COMMAND4="${WGET} -q --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${HOME_URL} -O ${BODYSHOP} ${SHOP_URL}"
# Same as SHOP GET command but with another save file (record failure)
COMMAND4BIS="${WGET} -q --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${HOME_URL} -O ${BODYSHOPFAILURE} ${SHOP_URL}"
# Logout command
COMMAND5="${WGET} -q --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${HOME_URL} -O - ${LOGOUT_URL}"
# Let's go ########################################################
# Empty cookies and old files
rm -f ${COOKIE};
rm -f ${BODY};
rm -f ${BODYSHOP};
rm -f ${BODYFAILURE};
rm -f ${BODYSHOPFAILURE};
# @see http://serverfault.com/questions/151109/how-do-i-get-current-unix-time-in-milliseconds-using-bash
TIMESTART=`date +%s%N|${CUT} -b1-13`;
# get login page ###############
# store result in BODY file ####
#echo ${COMMAND1};
${COMMAND1} >/dev/null;
TIMEINIT=`date +%s%N|${CUT} -b1-13`;
FORMID=`${GREP} -B1 "name=\"form_id\" value=\"user_login_block\"" ${BODY}|${GREP} "form_build_id"|${TR} -s " " |${CUT} -d "=" -f 4|${CUT} -d "\"" -f 2`;
#echo $FORMID
# Post login form ##############
POSTDATA="name=${LOGIN}&pass=${PASSWORD}&form_id=user_login_block&op=Se+connecter&form_build_id=${FORMID}"
COMMAND2="${WGET} -q -O - --cookies=on --keep-session-cookies --load-cookies=${COOKIE} --save-cookies=${COOKIE} --referer=${LOGIN_URL} --post-data=${POSTDATA} ${POST_LOGIN_URL}"
#echo ${COMMAND2};
${COMMAND2} >/dev/null;
TIMELOG=`date +%s%N|${CUT} -b1-13`;
# Take the HOME redirect #######
# store result in BODY file ####
#echo ${COMMAND3};
${COMMAND3} >/dev/null;
TIMEEND=`date +%s%N|${CUT} -b1-13`;
# Visit Shop #######################
# store result in BODYSHOP file ####
#echo ${COMMAND4};
${COMMAND4} >/dev/null;
TIMESHOP=`date +%s%N|${CUT} -b1-13`;
# Logout #######################
#echo ${COMMAND5}
${COMMAND5} >/dev/null;
# Visit Shop again (failure) ##############
# store result in BODYSHOPFAILURE file ####
#echo ${COMMAND4BIS};
${COMMAND4BIS} >/dev/null;
# Visit portal again (failure) ########
# store result in BODYFAILURE file ####
#echo ${COMMAND3BIS};
${COMMAND3BIS} >/dev/null;
((TIMEFIRST= ${TIMEINIT}-${TIMESTART}));
((TIMELOGIN=${TIMELOG}-${TIMEINIT}));
((TIMEUSER=${TIMEEND}-${TIMEINIT}));
((TIMECONNECTED=${TIMEEND}-${TIMELOG}));
((TIMELOADSHOP=${TIMESHOP}-${TIMEEND}));
let CTHRESOLDMS=${CTHRESOLD}*1000;
let WTHRESOLDMS=${WTHRESOLD}*1000;
HUMAN=" (connected page load: ${TIMECONNECTED}ms, shop page load: ${TIMELOADSHOP}ms, first page:${TIMEFIRST}ms, form post: ${TIMELOGIN}ms user feeling:${TIMEUSER}ms) ";
#'label'=value[UOM];[warn];[crit];[min];[max]
PERFPARSE="page_load=${TIMECONNECTED}ms;${WTHRESOLDMS}ms;${CTHRESOLDMS}ms;${TIMECONNECTED}ms;${TIMECONNECTED}ms;shop_page_load=${TIMELOADSHOP}ms;${WTHRESOLDMS}ms;${CTHRESOLDMS}ms;${TIMELOADSHOP}ms;${TIMELOADSHOP}ms;;;;;first_page=${TIMEFIRST}ms;;;;;form_post=${TIMELOGIN}ms;;;;;user_time_feeling=${TIMEUSER}ms;;;;;";
### CRIT checks ##################
# ensure redirect after login post was a connected success
SUCCESS=`${GREP} "user-menu-toggle" ${BODY}|${GREP} -i "${USERNAME}"`
if [ ! "$?" == "0" ]; then
echo "$NICK CRITICAL - Failed to get nickname \"${USERNAME}\" in resulting home page. Seems the login failed."${HUMAN}"|"${PERFPARSE};
exit $CRITICAL;
fi
# ensure shop visit was a success
SUCCESS=`${GREP} "<h1 class=\"site-name element-invisible\">" ${BODYSHOP}| ${GREP} -i "outique"`
if [ ! "$?" == "0" ]; then
echo "$NICK CRITICAL - Boutique url does not return 'boutique' in hidden branding site name. Seems the login failed on shop site."${HUMAN}"|"${PERFPARSE};
exit $CRITICAL;
fi
# Check time
if [ ${TIMECONNECTED} -gt ${CTHRESOLDMS} ]; then
echo "$NICK CRITICAL - Connected page is too long to load! "${HUMAN}"|"${PERFPARSE};
exit $CRITICAL;
fi
if [ ${TIMELOADSHOP} -gt ${CTHRESOLDMS} ]; then
echo "$NICK CRITICAL - Shop Connected page is too long to load! "${HUMAN}"|"${PERFPARSE};
exit $CRITICAL;
fi
### WARN checks ##################
# ensure shop visit was a success
SUCCESS=`${GREP} "user-menu-toggle" ${BODYSHOP}|${GREP} "${USERNAME}"`
if [ ! "$?" == "0" ]; then
echo "$NICK WARNING - Failed to get nickname \"${USERNAME}\" in resulting shop home page. Seems the login failed on shop site."${HUMAN}"|"${PERFPARSE};
exit $WARNING;
fi
# Check time
if [ ${TIMECONNECTED} -gt ${WTHRESOLDMS} ]; then
echo "$NICK WARNING - Connected page is too long to load! "${HUMAN}"|"${PERFPARSE};
exit $WARNING;
fi
if [ ${TIMELOADSHOP} -gt ${WTHRESOLDMS} ]; then
echo "$NICK WARNING - Shop Connected page is too long to load! "${HUMAN}"|"${PERFPARSE};
exit $WARNING;
fi
# ensure logout command was ok for portal -> anonymous page
FAILURE=`${GREP} "user-menu-toggle" ${BODYFAILURE}`
if [ "$?" == "0" ]; then
echo "$NICK WARNING - Failed to logout from portal site. user-menu-toggle still on resulting page."${HUMAN}"|"${PERFPARSE};
exit $WARNING;
fi
# ensure logout command was ok for shop -> redirected to portal page
FAILURE=`${GREP} "<h1 class=\"site-name element-invisible\">" ${BODYSHOPFAILURE}| ${GREP} -i "boutique"`
if [ "$?" == "0" ]; then
echo "$NICK WARNING - Boutique url does is still returning 'boutique' in hidden branding site name. Seems the logout failed on shop site."${HUMAN}"|"${PERFPARSE};
exit $WARNING;
fi
### OK result ####################
echo "$NICK OK - "${HUMAN}"|"${PERFPARSE};
exit $OK;
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.rsc;
import java.io.IOException;
import java.net.URI;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicInteger;
import io.netty.util.concurrent.ImmediateEventExecutor;
import io.netty.util.concurrent.Promise;
import org.apache.livy.LivyClient;
import org.apache.livy.LivyClientFactory;
import org.apache.livy.rsc.rpc.RpcServer;
/**
* Factory for RSC clients.
*/
public final class RSCClientFactory implements LivyClientFactory {
private final AtomicInteger refCount = new AtomicInteger();
private RpcServer server = null;
// interactive session child processes number
private static AtomicInteger iscpn = new AtomicInteger();
public static AtomicInteger childProcesses() {
return iscpn;
}
/**
* Creates a local Livy client if the URI has the "rsc" scheme.
* <p>
* If the URI contains user information, host and port, the library will try to connect to an
* existing RSC instance with the provided information, and most of the provided configuration
* will be ignored.
* <p>
* Otherwise, a new Spark context will be started with the given configuration.
*/
@Override
public LivyClient createClient(URI uri, Properties config) {
if (!"rsc".equals(uri.getScheme())) {
return null;
}
RSCConf lconf = new RSCConf(config);
boolean needsServer = false;
try {
Promise<ContextInfo> info;
Process driverProcess = null;
if (uri.getUserInfo() != null && uri.getHost() != null && uri.getPort() > 0) {
info = createContextInfo(uri);
} else {
needsServer = true;
ref(lconf);
DriverProcessInfo processInfo = ContextLauncher.create(this, lconf);
info = processInfo.getContextInfo();
driverProcess = processInfo.getDriverProcess();
}
return new RSCClient(lconf, info, driverProcess);
} catch (Exception e) {
if (needsServer) {
unref();
}
throw Utils.propagate(e);
}
}
RpcServer getServer() {
return server;
}
private synchronized void ref(RSCConf config) throws IOException {
if (refCount.get() != 0) {
refCount.incrementAndGet();
return;
}
Utils.checkState(server == null, "Server already running but ref count is 0.");
if (server == null) {
try {
server = new RpcServer(config);
} catch (InterruptedException ie) {
throw Utils.propagate(ie);
}
}
refCount.incrementAndGet();
}
synchronized void unref() {
if (refCount.decrementAndGet() == 0) {
server.close();
server = null;
}
}
private static Promise<ContextInfo> createContextInfo(final URI uri) {
String[] userInfo = uri.getUserInfo().split(":", 2);
ImmediateEventExecutor executor = ImmediateEventExecutor.INSTANCE;
Promise<ContextInfo> promise = executor.newPromise();
promise.setSuccess(new ContextInfo(uri.getHost(), uri.getPort(), userInfo[0], userInfo[1]));
return promise;
}
}
|
<gh_stars>0
/*
* BackoffLock.java
*
* Created on January 20, 2006, 11:02 PM
*
* From "Multiprocessor Synchronization and Concurrent Data Structures",
* by <NAME> and <NAME>.
* Copyright 2006 Elsevier Inc. All rights reserved.
*/
package tamp.ch07.Spin.spin;
/**
* Exponential backoff lock
*
* @author <NAME>
*/
import java.util.Random;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
public class BackoffLock implements Lock {
private Backoff backoff;
private Random random = new Random();
private AtomicBoolean state = new AtomicBoolean(false);
private static final int MIN_DELAY = 32;
private static final int MAX_DELAY = 1024;
public void lock() {
Backoff backoff = new Backoff(MIN_DELAY, MAX_DELAY);
while (true) {
while (state.get()) {
}
; // spin
if (!state.getAndSet(true)) { // try to acquire lock
return;
} else { // backoff on failure
try {
backoff.backoff();
} catch (InterruptedException ex) {
}
}
}
}
public void unlock() {
state.set(false);
}
// Any class implementing Lock must provide these methods
public Condition newCondition() {
throw new java.lang.UnsupportedOperationException();
}
public boolean tryLock(long time,
TimeUnit unit)
throws InterruptedException {
throw new java.lang.UnsupportedOperationException();
}
public boolean tryLock() {
throw new java.lang.UnsupportedOperationException();
}
public void lockInterruptibly() throws InterruptedException {
throw new java.lang.UnsupportedOperationException();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.