text stringlengths 1 1.05M |
|---|
<!DOCTYPE html>
<html>
<head>
<title>My Webpage</title>
</head>
<body>
<header>My Header</header>
<p>This is my first website! It's pretty cool.</p>
</body>
</html> |
function findFirstFalsey(arr) {
for (let i = 0; i < arr.length; i++) {
if (!arr[i]) {
return arr[i];
}
}
return undefined;
} |
#!/usr/bin/env bash
# -----------------------------------------------------------------------------
# usage
# -----------------------------------------------------------------------------
_usage_function() {
read -r -d '' _usage_string <<EOF
Usage:
./bootstrap.sh [-h|--help]
./bootstrap.sh [-n|--name <name>]
[-e|--email <email>]
[-g|--github <github>]
[-i|--irssi <irssi>]
Options:
-h, --help
print this help message
-n, --name <name>
set full name (defaults to "$USER")
-e, --email <email>
set email address (defaults to "$USER@$HOSTNAME")
-g, --github <github>
set GitHub username (defaults to "$USER")
-i, --irssi <irssi>
set irssi username (defaults to "$USER")
EOF
echo "$_usage_string"
}
while [[ $# -gt 0 ]]; do
case "$1" in
-h|--help)
_usage_function
exit 0
;;
-n|--name)
_name="$2"
# shift past argument and value
shift
shift
;;
-e|--email)
_email="$2"
shift
shift
;;
-g|--github)
_github="$2"
shift
shift
;;
-i|--irssi)
_irssi="$2"
shift
shift
;;
*)
# unknown option
_usage_function
exit 1
;;
esac
done
# -----------------------------------------------------------------------------
# settings
# -----------------------------------------------------------------------------
# e.g. Andy Weidenbaum (for git/hg)
name="${_name:-$USER}"
# e.g. atweiden@ioiojo.com (for git/hg)
email="${_email:-$USER@$HOSTNAME}"
# e.g. atweiden (for github)
github="${_github:-$USER}"
# e.g. atweiden (for irc)
irssi="${_irssi:-$USER}"
# -----------------------------------------------------------------------------
# dirs
# -----------------------------------------------------------------------------
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
mkdir -p "$HOME/.src" \
"$HOME/Desktop" \
"$HOME/Documents" \
"$HOME/Downloads" \
"$HOME/Graphics" \
"$HOME/Music" \
"$HOME/Projects"
# -----------------------------------------------------------------------------
# rsync
# -----------------------------------------------------------------------------
# rsync options
_rsync_opts=()
# exclude files
_rsync_opts+=('--exclude=.git'
'--exclude=.gitkeep'
'--exclude=.hg'
'--exclude=.subgit'
'--exclude=doc'
'--exclude=resources'
'--exclude=bootstrap.sh'
'--exclude=fetch-pgp-keys.sh'
'--exclude=macos.sh'
'--exclude=README.txt'
'--exclude=UNLICENSE')
# copy directories recursively
_rsync_opts+=('--recursive')
# copy symlinks as symlinks
_rsync_opts+=('--links')
# preserve permissions
_rsync_opts+=('--perms')
# backup files in ~/.local/share/dotfiles
_rsync_opts+=('--backup'
"--backup-dir=$HOME/.local/share/dotfiles")
rsync "${_rsync_opts[@]}" "$DIR/" "$HOME"
# -----------------------------------------------------------------------------
# vim
# -----------------------------------------------------------------------------
_packager_src='https://github.com/kristijanhusak/vim-packager'
_packager_dst="$HOME/.vim/pack/packager/opt/vim-packager"
if ! [[ -d "$_packager_dst" ]]; then
echo -n 'Installing vim plugin manager (kristijanhusak/vim-packager)... '
git clone \
--quiet \
"$_packager_src" \
"$_packager_dst"
echo 'done.'
echo -n 'Installing vim plugins... '
vim \
-c 'call PackagerSetup()' \
-c 'call packager#install({ "on_finish": "quitall" })' > \
/dev/null 2>&1
echo 'done.'
fi
# -----------------------------------------------------------------------------
# git/hg
# -----------------------------------------------------------------------------
gsed -i "s#yourname#$name#" "$HOME/.config/git/config"
gsed -i "s#youremail#$email#g" "$HOME/.config/git/config"
gsed -i "s#githubusername#$github#" "$HOME/.config/git/config"
gsed -i "s#yourname#$name#" "$HOME/.config/hg/hgrc"
gsed -i "s#youremail#$email#" "$HOME/.config/hg/hgrc"
gsed -i "s#githubusername#$github#" "$HOME/.ssh/config"
# -----------------------------------------------------------------------------
# irssi
# -----------------------------------------------------------------------------
gsed -i "s#yourname#$irssi#" "$HOME/.config/irssi/config"
# -----------------------------------------------------------------------------
# iTerm2
# -----------------------------------------------------------------------------
# use $USER instead of $name because we’re configuring $HOME
gsed -i "s#yourname#$USER#" "$HOME/.config/iterm2/seoul256.json"
# -----------------------------------------------------------------------------
# permissions
# -----------------------------------------------------------------------------
chmod 700 "$HOME/.gnupg"
chmod 700 "$HOME/.ssh"
# vim: set filetype=sh foldmethod=marker foldlevel=0 nowrap:
|
#!/bin/bash
set -o nounset
DELETE_TAG=
build_number=
if [[ $# -eq 1 ]]; then
build_number=$1
elif [[ $# -ne 0 ]]; then
exit 1
fi
branch=pie
aosp_version=PQ3A.190505.002
aosp_version_real=PQ3A.190505.002
aosp_tag=android-9.0.0_r37
aosp_forks=(
device_common
device_google_crosshatch
device_google_crosshatch-sepolicy
device_google_marlin
device_google_muskie
device_google_taimen
device_google_wahoo
#device_linaro_hikey
kernel_configs
platform_art
platform_bionic
platform_bootable_recovery
platform_build
platform_build_soong
#platform_external_clang
platform_external_conscrypt
#platform_external_llvm
#platform_external_sqlite
platform_frameworks_av
platform_frameworks_base
#platform_frameworks_ex
platform_frameworks_native
platform_frameworks_opt_net_wifi
platform_libcore
platform_manifest
platform_packages_apps_Bluetooth
platform_packages_apps_Camera2
platform_packages_apps_Contacts
platform_packages_apps_ExactCalculator
#platform_packages_apps_Gallery2
platform_packages_apps_Launcher3
platform_packages_apps_Music
platform_packages_apps_Nfc
platform_packages_apps_PackageInstaller
#platform_packages_apps_QuickSearchBox
platform_packages_apps_Settings
platform_packages_inputmethods_LatinIME
platform_packages_providers_DownloadProvider
platform_packages_services_Telephony
#platform_prebuilts_clang_host_linux-x86
platform_system_bt
platform_system_core
platform_system_extras
platform_system_netd
platform_system_sepolicy
)
declare -A kernels=(
[google_marlin]=android-9.0.0_r0.74 # May 2019
[google_wahoo]=android-9.0.0_r0.75 # May 2019
[google_crosshatch]=android-9.0.0_r0.77 # May 2019
#[linaro_hikey]=dc721a4ac71d
)
independent=(
android-prepare-vendor
branding
chromium_build
chromium_patches
hardened_malloc
platform_external_Auditor
platform_external_chromium
#platform_external_Etar-Calendar
#platform_external_F-Droid
#platform_external_offline-calendar
#platform_external_talkback
#platform_packages_apps_Backup
#platform_packages_apps_F-Droid_privileged-extension
#platform_packages_apps_PdfViewer
platform_packages_apps_Updater
script
vendor_linaro
)
for repo in "${aosp_forks[@]}"; do
echo -e "\n>>> $(tput setaf 3)Handling $repo$(tput sgr0)"
cd $repo || exit 1
git checkout $branch || exit 1
if [[ -n $DELETE_TAG ]]; then
git tag -d $DELETE_TAG
git push origin :refs/tags/$DELETE_TAG
cd .. || exit 1
continue
fi
if [[ -n $build_number ]]; then
if [[ $repo == platform_manifest ]]; then
git checkout -B tmp || exit 1
sed -i s%refs/heads/$branch%refs/tags/$aosp_version.$build_number% default.xml || exit 1
git commit default.xml -m $aosp_version.$build_number || exit 1
elif [[ $aosp_version != $aosp_version_real && $repo == platform_build ]]; then
git checkout -B tmp || exit 1
sed -i s/$aosp_version_real/$aosp_version/ core/build_id.mk
git commit core/build_id.mk -m $aosp_version.$build_number || exit 1
fi
git tag -s $aosp_version.$build_number -m $aosp_version.$build_number || exit 1
git push origin $aosp_version.$build_number || exit 1
if [[ $repo == platform_manifest ]]; then
git checkout $branch || exit 1
git branch -D tmp || exit 1
fi
else
git fetch upstream --tags || exit 1
git pull --rebase upstream $aosp_tag || exit 1
git push -f || exit 1
fi
cd .. || exit 1
done
for kernel in ${!kernels[@]}; do
echo -e "\n>>> $(tput setaf 3)Handling kernel_$kernel$(tput sgr0)"
cd kernel_$kernel || exit 1
git checkout $branch || exit 1
if [[ -n $DELETE_TAG ]]; then
git tag -d $DELETE_TAG
git push origin :refs/tags/$DELETE_TAG
cd .. || exit 1
continue
fi
if [[ -n $build_number ]]; then
git tag -s $aosp_version.$build_number -m $aosp_version.$build_number || exit 1
git push origin $aosp_version.$build_number || exit 1
else
git fetch upstream --tags || exit 1
kernel_tag=${kernels[$kernel]}
if [[ -z $kernel_tag ]]; then
cd .. || exit 1
continue
fi
if [[ $kernel == google_marlin || $kernel == google_wahoo ]]; then
git checkout $branch-stable-base || exit 1
fi
git rebase $kernel_tag || exit 1
git push -f || exit 1
if [[ $kernel == google_marlin || $kernel == google_wahoo ]]; then
git checkout $branch || exit 1
git rebase $branch-stable-base || exit 1
git push -f || exit 1
fi
fi
cd .. || exit 1
done
for repo in ${independent[@]}; do
echo -e "\n>>> $(tput setaf 3)Handling $repo$(tput sgr0)"
cd $repo || exit 1
git checkout $branch || exit 1
if [[ -n $DELETE_TAG ]]; then
git tag -d $DELETE_TAG
git push origin :refs/tags/$DELETE_TAG
cd .. || exit 1
continue
fi
if [[ -n $build_number ]]; then
git tag -s $aosp_version.$build_number -m $aosp_version.$build_number || exit 1
git push origin $aosp_version.$build_number || exit 1
else
git push -f || exit 1
fi
cd .. || exit 1
done
|
import java.util.*;
public class BalancedParentheses {
public static boolean isBalanced(String exp)
{
Stack<Character> stack = new Stack<Character>();
for (int i = 0; i < exp.length(); i++)
{
if (exp.charAt(i) == '[' || exp.charAt(i) == '{' || exp.charAt(i) == '(')
{
stack.push(exp.charAt(i));
}
if (exp.charAt(i) == ']' || exp.charAt(i) == '}' || exp.charAt(i) == ')')
{
if (stack.isEmpty())
{
return false;
}
else if ( !matches(stack.pop(), exp.charAt(i)) )
{
return false;
}
}
}
if (stack.isEmpty())
return true; /* balanced */
else
{ /* not balanced */
return false;
}
}
public static boolean matches(char a, char b)
{
if (a == '[' && b == ']')
return true;
else if (a == '{' && b == '}')
return true;
else if (a == '(' && b == ')')
return true;
else
return false;
}
public static void main(String[] args)
{
String exp = "[]{}()";
if (isBalanced(exp))
System.out.println(exp + " is Balanced");
else
System.out.println(exp + " is Unbalanced");
}
} |
<filename>src/containers/Main/VrtConversion.tsx
import React, { useEffect, useState } from 'react';
import styled from 'styled-components';
import BigNumber from 'bignumber.js';
import MainLayout from 'containers/Layout/MainLayout';
import { useWeb3React } from '@web3-react/core';
import { Row, Col } from 'antd';
import LoadingSpinner from '../../components/Basic/LoadingSpinner';
import useRefresh from '../../hooks/useRefresh';
import * as constants from '../../utilities/constants';
import { getVrtConverterProxyAddress } from '../../utilities/addressHelpers';
import Convert from '../../components/VrtConversion/Convert';
import Withdraw from '../../components/VrtConversion/Withdraw';
import TabContainer from '../../components/Basic/TabContainer';
import {
useVrtConverterProxy,
useVrtToken,
useXvsVestingProxy,
useToken,
} from '../../hooks/useContract';
const VrtConversionWrapper = styled.div`
margin: 16px;
display: flex;
color: #fff;
.vrt-conversion-container {
width: 100%;
}
.vrt-conversion-tab-container {
border-radius: 8px;
}
.title {
font-size: 40px;
line-height: 47px;
margin-top: 20px;
margin-bottom: 40px;
text-align: center;
}
`;
const VRT_DECIMAL = new BigNumber(10).pow(constants.CONTRACT_TOKEN_ADDRESS.vrt.decimals);
const XVS_DECIMAL = new BigNumber(10).pow(constants.CONTRACT_TOKEN_ADDRESS.xvs.decimals);
const CONVERSION_RATIO_DECIMAL = new BigNumber(10).pow(18);
export default () => {
// contract data
const [withdrawableAmount, setWithdrawableAmount] = useState(new BigNumber(0));
const [conversionRatio, setConversionRatio] = useState(new BigNumber(0));
const [conversionEndTime, setConversionEndTime] = useState(new BigNumber(0));
const [userVrtBalance, setUserVrtBalance] = useState(new BigNumber(0));
// user's allowance to VRT converter contracr
const [userEnabled, setUserEnabled] = useState(false);
// UI
const [loading, setLoading] = useState(true);
// account
const { account } = useWeb3React();
const { fastRefresh } = useRefresh();
// contracts
const vrtConverterContract = useVrtConverterProxy();
const xvsVestingContract = useXvsVestingProxy();
const vrtTokenContract = useVrtToken();
const xvsTokenContract = useToken('xvs');
useEffect(() => {
let mounted = true;
const update = async () => {
if (account) {
try {
const { totalWithdrawableAmount: totalWithdrawableAmountTemp } =
await xvsVestingContract.methods.getWithdrawableAmount(account).call();
setWithdrawableAmount(new BigNumber(totalWithdrawableAmountTemp).div(VRT_DECIMAL));
} catch (e) {
console.log('no vestings');
}
}
const [conversionRatioTemp, conversionEndTimeTemp, userVrtBalanceTemp, userVrtAllowanceTemp] =
await Promise.all([
vrtConverterContract.methods.conversionRatio().call(),
vrtConverterContract.methods.conversionEndTime().call(),
account ? vrtTokenContract.methods.balanceOf(account).call() : Promise.resolve(0),
account
? vrtTokenContract.methods.allowance(account, getVrtConverterProxyAddress()).call()
: Promise.resolve(0),
xvsTokenContract.methods.balanceOf(xvsVestingContract.options.address).call(),
]);
if (mounted) {
setLoading(false);
setConversionRatio(new BigNumber(conversionRatioTemp).div(CONVERSION_RATIO_DECIMAL));
setConversionEndTime(new BigNumber(conversionEndTimeTemp)); // in seconds
setUserVrtBalance(new BigNumber(userVrtBalanceTemp).div(VRT_DECIMAL));
setUserEnabled(new BigNumber(userVrtAllowanceTemp).gt(0));
}
};
update();
return () => {
mounted = false;
};
}, [fastRefresh, account]);
return (
<MainLayout title="Convert VRT">
{loading ? (
<LoadingSpinner />
) : (
<VrtConversionWrapper>
<Row className="vrt-conversion-container">
<Col
xl={{ span: 8, offset: 8 }}
lg={{ span: 12, offset: 6 }}
md={{ span: 12, offset: 6 }}
sm={{ span: 24 }}
xs={{ span: 24 }}
>
<div className="container">
<TabContainer
className="vrt-conversion-tab-container"
titles={['Convert', 'Withdraw']}
>
<Convert
userVrtBalance={userVrtBalance}
userEnabled={userEnabled}
conversionEndTime={conversionEndTime}
conversionRatio={conversionRatio}
handleClickConvert={async convertAmount => {
try {
if (!userEnabled && account) {
// approve user's VRT allownace to converter
await vrtTokenContract.methods
.approve(
vrtConverterContract.options.address,
new BigNumber(2).pow(256).minus(1).toFixed(),
)
.send({
from: account,
});
} else {
await vrtConverterContract.methods
.convert(convertAmount.times(VRT_DECIMAL).toFixed())
.send({
from: account,
});
}
} catch (e) {
console.log('>> convert error', e);
}
}}
account={account || ''}
/>
<Withdraw
withdrawableAmount={withdrawableAmount}
handleClickWithdraw={async () => {
try {
await xvsVestingContract.methods.withdraw().send({
from: account,
});
} catch (e) {
console.log('>> withdraw error', e);
}
}}
account={account || ''}
/>
</TabContainer>
</div>
</Col>
</Row>
</VrtConversionWrapper>
)}
</MainLayout>
);
};
|
#创建镜像
docker create --name influx-data -v /data/jmx tutum/influxdb
docker run -d --volumes-from influx-data -p 8083:8083 -p 8086:8086 --expose 2003 --expose 8084 -e PRE_CREATE=grafana -e GRAPHITE_DB="grafana" -e GRAPHITE_BINDING=':2003' -e GRAPHITE_PROTOCOL="tcp" --name influxdb tutum/influxdb
docker run -d --link influxdb:influxdb -p 3000:3000 tutum/grafana
docker run -d -v `pwd`/json-files:/var/lib/jmxtrans -P jmxtrans/jmxtrans
docker pull registry.alauda.cn/library/telegraf
#拷贝命令
scp openbridge-monitor.jar root@192.168.0.176:/opt/open-falcon
#执行monitor测试例子
java -Djava.rmi.server.hostname=192.168.0.179 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=10001 -jar openbridge-monitor.jar
java -Djava.rmi.server.hostname=192.168.0.176 -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.port=10001 -jar openbridge-monitor.jar
#influxdb-ui界面
http://192.168.0.179:8083/
# 可以使用这个,这个是查询所有表,显示1条记录
select * from /.*/ limit 1
# 也可以使用这个,这个是显示所有表
show measurements
#测试新增数据
curl -XPOST 'http://localhost:8086/write?db=grafana' -d 'cpu,host=server01,region=uswest load=42 1434055562000000000'
|
echo $1
cd mapping
#### test directory
if [ ! -d "$1" ]; then
mkdir $1
fi
cd $1
### making first duplicate sample
if [ ! -d "1" ]; then
mkdir 1
fi
cd 1
### making first duplicate sample
if [ ! -f "$1_1_cutNs.bam" ]; then
echo "File $1_1_cutNs.bam doens't exist for sample $1_1" >> ../../../error_log.txt
exit 1
fi
### making first duplicate sample
if [ ! -f "../2/$1_2_cutNs.bam" ]; then
echo "File $1_2_cutNs.bam doens't exist for sample $1_2" >> ../../../error_log.txt
exit 1
fi
echo `pwd`
### indel realignment
### Two passing modes on the fly...
### http://gatkforums.broadinstitute.org/discussion/5274/tumour-normal-exome-analysis-for-non-human-data
### https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_indels_RealignerTargetCreator.php
### Minimize the mismatches across all the reads
### Determining (small) suspicious intervals which are likely in need of realignment (RealignerTargetCreator)
### Running the realigner over those intervals (IndelRealigner tool)
### input: bam output file from reassingMapping Quality.
echo "Create interval target realignment"
java -jar /usr/local/Modules/modulefiles/tools/gatk/3.4.0/GenomeAnalysisTK.jar -T RealignerTargetCreator -R /storage/home/public/genomes/homo_sapiens/STAR/GRCh38.p2.genome.fa -I $1_1_cutNs.bam -I ../2/$1_2_cutNs.bam -o $1.intervalListFromRTC.intervals
echo "Realignment"
java -Xmx8g -jar /usr/local/Modules/modulefiles/tools/gatk/3.4.0/GenomeAnalysisTK.jar -T IndelRealigner -S Silent -I $1_1_cutNs.bam -I ../2/$1_2_cutNs.bam -R /storage/home/public/genomes/homo_sapiens/STAR/GRCh38.p2.genome.fa -targetIntervals $1.intervalListFromRTC.intervals --nWayOut '.ir.bam'
mv $1_2_cutNs.ir.bai ../2
mv $1_2_cutNs.ir.bam ../2
cd ../..
|
package main
import (
"fmt"
"log"
"sort"
"strconv"
"strings"
"github.com/dmies/adventOfGo/filehandler"
)
// PackageDimensions defines the length, width and height of an package
type PackageDimensions struct {
length int
width int
height int
}
func (p PackageDimensions) getSurface() int {
length := p.length * p.width
width := p.width * p.height
height := p.height * p.length
// get smallest side
sides := []int{length, width, height}
sort.Ints(sides)
return 2*length + 2*width + 2*height + sides[0]
}
func (p PackageDimensions) getWrap() int {
// get smallest side
sides := []int{p.length, p.width, p.height}
sort.Ints(sides)
return (sides[0] + sides[1]) * 2
}
func (p PackageDimensions) getBow() int {
return p.length * p.width * p.height
}
// Parse parses a String and returns the matching PackageDimensions
func Parse(input string) (PackageDimensions, error) {
dimensions := strings.Split(input, "x")
length, err := strconv.Atoi(dimensions[0])
if err != nil {
return PackageDimensions{}, err
}
width, err := strconv.Atoi(dimensions[1])
if err != nil {
return PackageDimensions{}, err
}
height, err := strconv.Atoi(dimensions[2])
if err != nil {
return PackageDimensions{}, err
}
return PackageDimensions{length, width, height}, nil
}
// GetTotalSquareFeetOfWrappingPaper parsses the input and returns the total square feet of wrapping paper for the packages
func GetTotalSquareFeetOfWrappingPaper(input []string) (int, error) {
result := 0
for _, line := range input {
dimensions, err := Parse(line)
if err != nil {
return -1, err
}
result += dimensions.getSurface()
}
return result, nil
}
// GetTotalFeetOfRibbon parsses the input and returns the total feet of ribbon for the packages
func GetTotalFeetOfRibbon(input []string) (int, error) {
result := 0
for _, line := range input {
dimensions, err := Parse(line)
if err != nil {
return -1, err
}
result += dimensions.getWrap() + dimensions.getBow()
}
return result, nil
}
func main() {
input, err := filehandler.ImportStringList("./input.txt")
if err != nil {
log.Fatal(err)
}
solution1, err := GetTotalSquareFeetOfWrappingPaper(input)
if err != nil {
log.Fatalf("Couldn't calculate total square feet of wrapping paper: %v", err)
}
fmt.Printf("day 02, part1 %v\n", solution1)
solution2, err := GetTotalFeetOfRibbon(input)
if err != nil {
log.Fatalf("Couldn't calculate total feet of ribbon: %v", err)
}
fmt.Printf("day 02, part2 %v\n", solution2)
}
|
import { Router } from "express";
const router = Router();
import response from "../../assets/response";
import status from "../../assets/status";
import textPack from "../../assets/textPack.json";
import Performance from "../../assets/tests/performance";
import logger from "../../assets/logger";
router.get("/", async (req, res) => {
const performanceLog = new Performance(req.baseUrl);
let testResults = {
translator: {
statusCode: 0,
timeToRespond: 0,
classifiedAs: "",
},
upload: {
statusCode: 0,
timeToRespond: 0,
classifiedAs: "",
},
access: {
statusCode: 0,
timeToRespond: 0,
classifiedAs: "",
},
delete: {
statusCode: 0,
timeToRespond: 0,
classifiedAs: "",
},
};
try {
performanceLog.watchpoint("translatorTest");
await status(`/translator?text=testando essa API&from=pt&to=en`)
.get()
.then((test) => {
testResults.translator = test;
})
.catch((test) => {
testResults.translator = test;
});
performanceLog.watchpointEnd("translatorTest");
let testImageName = "";
performanceLog.watchpoint("uploadTest");
await status(`/storage/upload`)
.post()
.then((test) => {
testImageName = test.filename;
testResults.upload.statusCode = test.statusCode;
testResults.upload.timeToRespond = test.timeToRespond;
testResults.upload.classifiedAs = test.classifiedAs;
})
.catch((test) => {
testResults.upload.statusCode = test.statusCode;
testResults.upload.timeToRespond = test.timeToRespond;
testResults.upload.classifiedAs = test.classifiedAs;
});
performanceLog.watchpointEnd("uploadTest");
performanceLog.watchpoint("accessTest");
await status(`/storage/access?filename=${testImageName}`)
.get()
.then((test) => {
testResults.access = test;
})
.catch((test) => {
testResults.access = test;
});
performanceLog.watchpointEnd("accessTest");
performanceLog.watchpoint("deleteTest");
await status(`/storage/delete?filename=${testImageName}`)
.del()
.then((test) => {
testResults.delete = test;
})
.catch((test) => {
testResults.delete = test;
});
performanceLog.watchpointEnd("deleteTest");
performanceLog.finish();
return res.json(
response(false, textPack.status.responseOK, {
storage: {
access: testResults.access,
delete: testResults.delete,
upload: testResults.upload,
},
translator: testResults.translator,
})
);
} catch (err) {
logger.error(err.message);
return res
.status(500)
.json(response(true, textPack.standards.responseError));
}
});
export default router;
|
<reponame>tanshuai/reference-wallet<gh_stars>10-100
# pyre-ignore-all-errors
# Copyright (c) The Diem Core Contributors
# SPDX-License-Identifier: Apache-2.0
from copy import deepcopy
from datetime import datetime
from typing import Optional
from tests.wallet_tests.resources.seeds import prototypes
from wallet.storage import (
Account,
Transaction,
User,
)
from diem_utils.types.currencies import DiemCurrency
from wallet.types import TransactionType, TransactionStatus, RegistrationStatus
class OneUser:
@staticmethod
def run(
db_session,
account_amount: Optional[int] = None,
account_currency: Optional[DiemCurrency] = None,
registration_status: Optional[RegistrationStatus] = RegistrationStatus.Approved,
account_name: str = "fake_account",
username: Optional[str] = None,
) -> User:
user = deepcopy(prototypes.user)
if username:
user.username = username
user.first_name = f"{<NAME>"
user.last_name = f"{<NAME>"
user.registration_status = registration_status
user.account = Account(name=account_name)
db_session.add(user)
db_session.commit()
if account_amount and account_currency:
user_income = Transaction(
created_timestamp=datetime.now(),
amount=account_amount,
currency=account_currency,
type=TransactionType.EXTERNAL,
status=TransactionStatus.COMPLETED,
source_address="na",
destination_id=user.account.id,
)
user.account.received_transactions.append(user_income)
db_session.commit()
return user
|
def check_cache(cache_info, current_mtime, max_age):
reloaded = False
if 'mtime' in cache_info and 'data' in cache_info:
cached_mtime = cache_info['mtime']
if current_mtime - cached_mtime > max_age:
# Data needs to be reloaded
cache_info['mtime'] = current_mtime
reloaded = True
else:
# Data is still valid
reloaded = False
else:
# Cache is empty, load data for the first time
cache_info['mtime'] = current_mtime
reloaded = True
return reloaded, cache_info['data'] |
<filename>src/emails/emails.repository.ts<gh_stars>0
import {EntityRepository, Repository} from "typeorm";
import {Emails} from "./entities/Emails";
@EntityRepository(Emails)
export class EmailsRepository extends Repository<Emails>{
}
|
#!/bin/bash
# Copyright (C) 2020 Private Internet Access, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
parent_folder=$(dirname $(realpath $0))
# Check if terminal allows output, if yes, define colors for output
if test -t 1; then
ncolors=$(tput colors)
if test -n "$ncolors" && test $ncolors -ge 8; then
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m' # No Color
else
GREEN=''
RED=''
NC='' # No Color
fi
fi
# Variables to use for validating input
intCheck='^[0-9]+$'
floatCheck='^[0-9]+([.][0-9]+)?$'
# Only allow script to run as
if [ "$(whoami)" != "root" ]; then
echo -e "${RED}This script needs to be run as root. Try again with 'sudo $0'${NC}"
exit 1
fi
# Erase previous authentication token if present
rm -f /opt/piavpn-manual/token /opt/piavpn-manual/latencyList
# Retry login if no token is generated
while :; do
while :; do
# Check for in-line definition of $PIA_USER
if [[ ! $PIA_USER || $PIA_USER = "" ]]; then
echo
read -p "PIA username (p#######): " PIA_USER
fi
# Confirm format of PIA_USER input
unPrefix=$( echo ${PIA_USER:0:1} )
unSuffix=$( echo ${PIA_USER:1} )
if [[ -z "$PIA_USER" ]]; then
echo -e "\n${RED}You must provide input.${NC}"
elif [[ ${#PIA_USER} != 8 ]]; then
echo -e "\n${RED}A PIA username is always 8 characters long.${NC}"
elif [[ $unPrefix != "P" ]] && [[ $unPrefix != "p" ]]; then
echo -e "\n${RED}A PIA username must start with \"p\".${NC}"
elif ! [[ $unSuffix =~ $intCheck ]]; then
echo -e "\n${RED}Username formatting is always p#######!${NC}"
else
echo -e "\n${GREEN}PIA_USER=$PIA_USER${NC}"
break
fi
PIA_USER=""
done
export PIA_USER
while :; do
# Check for in-line definition of $PIA_PASS
if [[ ! $PIA_PASS || $PIA_PASS = "" ]]; then
echo
echo -n "PIA password: "
read -rs PIA_PASS
echo
fi
# Confirm format of PIA_PASS input
if [[ -z "$PIA_PASS" ]]; then
echo -e "\n${RED}You must provide input.${NC}"
elif [[ ${#PIA_PASS} -lt 8 ]]; then
echo -e "\n${RED}A PIA password is always a minimum of 8 characters long.${NC}"
else
echo -e "\n${GREEN}PIA_PASS input received.${NC}"
echo
break
fi
PIA_PASS=""
done
export PIA_PASS
# Confirm credentials and generate token
$parent_folder/get_token.sh
tokenLocation="/opt/piavpn-manual/token"
# If the script failed to generate an authentication token, the script will exit early.
if [ ! -f "$tokenLocation" ]; then
read -p "Do you want to try again ([N]o/[y]es): " tryAgain
if ! echo ${tryAgain:0:1} | grep -iq y; then
exit 1
fi
PIA_USER=""
PIA_PASS=""
else
PIA_TOKEN=$( awk 'NR == 1' /opt/piavpn-manual/token )
export PIA_TOKEN
rm -f /opt/piavpn-manual/token
break
fi
done
# Check for in-line definition of PIA_PF and prompt for input
if [[ ! $PIA_PF || $PIA_PF = "" ]]; then
echo -n "Do you want a forwarding port assigned ([N]o/[y]es): "
read portForwarding
echo
if echo ${portForwarding:0:1} | grep -iq y; then
PIA_PF="true"
fi
fi
if [[ $PIA_PF != "true" ]]; then
PIA_PF="false"
fi
export PIA_PF
echo -e ${GREEN}PIA_PF=$PIA_PF${NC}
echo
# Check for in-line definition of DISABLE_IPV6 and prompt for input
if [[ ! $DISABLE_IPV6 || $DISABLE_IPV6 = "" ]]; then
echo "Having active IPv6 connections might compromise security by allowing"
echo "split tunnel connections that run outside the VPN tunnel."
echo -n "Do you want to disable IPv6? (Y/n): "
read DISABLE_IPV6
echo
fi
if echo ${DISABLE_IPV6:0:1} | grep -iq n; then
echo -e ${RED}"IPv6 settings have not been altered.
"${NC}
else
echo -e "The variable ${GREEN}DISABLE_IPV6=$DISABLE_IPV6${NC}, does not start with 'n' for 'no'.
${GREEN}Defaulting to yes.${NC}
"
sysctl -w net.ipv6.conf.all.disable_ipv6=1
sysctl -w net.ipv6.conf.default.disable_ipv6=1
echo
echo -e "${RED}IPv6 has been disabled${NC}, you can ${GREEN}enable it again with: "
echo "sysctl -w net.ipv6.conf.all.disable_ipv6=0"
echo "sysctl -w net.ipv6.conf.default.disable_ipv6=0"
echo -e ${NC}
fi
# Input validation and check for conflicting declartions of AUTOCONNECT and PREFERRED_REGION
# If both variables are set, AUTOCONNECT has superiority and PREFERRED_REGION is ignored
if [[ ! $AUTOCONNECT ]]; then
echo AUTOCONNECT was not declared.
echo
selectServer="ask"
elif echo ${AUTOCONNECT:0:1} | grep -iq f; then
if [[ $AUTOCONNECT != "false" ]]; then
echo -e "The variable ${GREEN}AUTOCONNECT=$AUTOCONNECT${NC}, starts with 'f' for 'false'."
AUTOCONNECT="false"
echo -e "Updated ${GREEN}AUTOCONNECT=$AUTOCONNECT${NC}"
echo
fi
selectServer="yes"
else
if [[ $AUTOCONNECT != "true" ]]; then
echo -e "The variable ${GREEN}AUTOCONNECT=$AUTOCONNECT${NC}, does not start with 'f' for 'false'."
AUTOCONNECT="true"
echo -e "Updated ${GREEN}AUTOCONNECT=$AUTOCONNECT${NC}"
echo
fi
if [[ ! $PREFERRED_REGION ]]; then
echo -e "${GREEN}AUTOCONNECT=true${NC}"
echo
else
echo
echo AUTOCONNECT supercedes in-line definitions of PREFERRED_REGION.
echo -e "${RED}PREFERRED_REGION=$PREFERRED_REGION will be ignored.${NC}
"
PREFERRED_REGION=""
fi
selectServer="no"
fi
# Prompt the user to specify a server or auto-connect to the lowest latency
while :; do
if [[ ! $PREFERRED_REGION || $PREFERRED_REGION = "" ]]; then
# If autoconnect is not set, prompt the user to specify a server or auto-connect to the lowest latency
if [[ $selectServer = "ask" ]]; then
echo -n "Do you want to manually select a server, instead of auto-connecting to the
server with the lowest latency ([N]o/[y]es): "
read selectServer
echo
fi
# Call the region script with input to create an ordered list based upon latency
# When $PREFERRED_REGION is set to none, get_region.sh will generate a list of servers
# that meet the latency requirements speciied by $MAX_LATENCY.
# When $VPN_PROTOCOL is set to no, get_region.sh will sort that list of servers
# to allow for numeric selection, or an easy manual review of options.
if echo ${selectServer:0:1} | grep -iq y; then
# This sets the maximum allowed latency in seconds.
# All servers that respond slower than this will be ignored.
if [[ ! $MAX_LATENCY || $MAX_LATENCY = "" ]]; then
echo -n "With no input, the maximum allowed latency will be set to 0.05s (50ms).
If your connection has high latency, you may need to increase this value.
For example, you can try 0.2 for 200ms allowed latency.
"
else
latencyInput=$MAX_LATENCY
fi
# Assure that input is numeric and properly formatted.
MAX_LATENCY=0.05 # default
while :; do
if [[ ! $latencyInput || $latencyInput = "" ]]; then
read -p "Custom latency (no input required for 50ms): " latencyInput
echo
fi
customLatency=0
customLatency+=$latencyInput
if [[ -z "$latencyInput" ]]; then
break
elif [[ $latencyInput = 0 ]]; then
echo -e "${RED}Latency input must not be zero.${NC}\n"
elif ! [[ $customLatency =~ $floatCheck ]]; then
echo -e "${RED}Latency input must be numeric.${NC}\n"
elif [[ $latencyInput =~ $intCheck ]]; then
MAX_LATENCY=$latencyInput
break
else
MAX_LATENCY=$customLatency
break
fi
latencyInput=""
done
export MAX_LATENCY
echo -e "${GREEN}MAX_LATENCY=$MAX_LATENCY${NC}"
PREFERRED_REGION="none"
export PREFERRED_REGION
VPN_PROTOCOL="no"
export VPN_PROTOCOL
VPN_PROTOCOL=no $parent_folder/get_region.sh
if [ -s /opt/piavpn-manual/latencyList ]; then
# Output the ordered list of servers that meet the latency specification $MAX_LATENCY
echo -e "Orderd list of servers with latency less than ${GREEN}$MAX_LATENCY${NC} seconds:"
i=0
while read line; do
i=$((i+1))
time=$( awk 'NR == '$i' {print $1}' /opt/piavpn-manual/latencyList )
id=$( awk 'NR == '$i' {print $2}' /opt/piavpn-manual/latencyList )
ip=$( awk 'NR == '$i' {print $3}' /opt/piavpn-manual/latencyList )
location1=$( awk 'NR == '$i' {print $4}' /opt/piavpn-manual/latencyList )
location2=$( awk 'NR == '$i' {print $5}' /opt/piavpn-manual/latencyList )
location3=$( awk 'NR == '$i' {print $6}' /opt/piavpn-manual/latencyList )
location4=$( awk 'NR == '$i' {print $7}' /opt/piavpn-manual/latencyList )
location=$location1" "$location2" "$location3" "$location4
printf "%3s : %-8s %-15s %17s" $i $time $ip $id
echo " - "$location
done < /opt/piavpn-manual/latencyList
echo
# Receive input to specify the server to connect to manually
while :; do
read -p "Input the number of the server you want to connect to ([1]-[$i]) : " serverSelection
if [[ -z "$serverSelection" ]]; then
echo -e "\n${RED}You must provide input.${NC}\n"
elif ! [[ $serverSelection =~ $intCheck ]]; then
echo -e "\n${RED}You must enter a number.${NC}\n"
elif [[ $serverSelection -lt 1 ]]; then
echo -e "\n${RED}You must enter a number greater than 1.${NC}\n"
elif [[ $serverSelection -gt $i ]]; then
echo -e "\n${RED}You must enter a number between 1 and $i.${NC}\n"
else
PREFERRED_REGION=$( awk 'NR == '$serverSelection' {print $2}' /opt/piavpn-manual/latencyList )
echo
echo -e ${GREEN}PREFERRED_REGION=$PREFERRED_REGION${NC}
break
fi
done
# Write the serverID for use when connecting, and display the serverName for user confirmation
export PREFERRED_REGION
echo
break
else
exit 1
fi
else
echo -e ${GREEN}You will auto-connect to the server with the lowest latency.${NC}
echo
break
fi
else
# Validate in-line declaration of PREFERRED_REGION; if invalid remove input to initiate prompts
echo Region input is : $PREFERRED_REGION
export PREFERRED_REGION
VPN_PROTOCOL=no $parent_folder/get_region.sh
if [[ $? != 1 ]]; then
break
fi
PREFERRED_REGION=""
fi
done
if [[ ! $VPN_PROTOCOL ]]; then
VPN_PROTOCOL="none"
fi
# This section asks for user connection preferences
case $VPN_PROTOCOL in
openvpn)
VPN_PROTOCOL="openvpn_udp_standard"
;;
wireguard | openvpn_udp_standard | openvpn_udp_strong | openvpn_tcp_standard | openvpn_tcp_strong)
;;
none | *)
echo -n "Connection method ([W]ireguard/[o]penvpn): "
read connection_method
echo
VPN_PROTOCOL="wireguard"
if echo ${connection_method:0:1} | grep -iq o; then
echo -n "Connection method ([U]dp/[t]cp): "
read protocolInput
echo
protocol="udp"
if echo ${protocolInput:0:1} | grep -iq t; then
protocol="tcp"
fi
echo "Higher levels of encryption trade performance for security. "
echo -n "Do you want to use strong encryption ([N]o/[y]es): "
read strongEncryption
echo
encryption="standard"
if echo ${strongEncryption:0:1} | grep -iq y; then
encryption="strong"
fi
VPN_PROTOCOL="openvpn_${protocol}_${encryption}"
fi
;;
esac
export VPN_PROTOCOL
echo -e ${GREEN}VPN_PROTOCOL=$VPN_PROTOCOL"
${NC}"
# Check for the required presence of resolvconf for setting DNS on wireguard connections
setDNS="yes"
if ! command -v resolvconf &>/dev/null && [ "$VPN_PROTOCOL" == wireguard ]; then
echo -e ${RED}The resolvconf package could not be found.
echo This script can not set DNS for you and you will
echo -e need to invoke DNS protection some other way.${NC}
echo
setDNS="no"
fi
# Check for in-line definition of PIA_DNS and prompt for input
if [[ $setDNS = "yes" ]]; then
if [[ ! $PIA_DNS || $PIA_DNS = "" ]]; then
echo Using third party DNS could allow DNS monitoring.
echo -n "Do you want to force PIA DNS ([Y]es/[n]o): "
read setDNS
echo
PIA_DNS="true"
if echo ${setDNS:0:1} | grep -iq n; then
PIA_DNS="false"
fi
fi
elif [[ $PIA_DNS != "true" || $setDNS = "no" ]];then
PIA_DNS="false"
fi
export PIA_DNS
echo -e "${GREEN}PIA_DNS=$PIA_DNS${NC}"
CONNECTION_READY="true"
export CONNECTION_READY
$parent_folder/get_region.sh
|
<filename>src/main/java/malte0811/controlengineering/blockentity/bus/LineAccessBlockEntity.java
package malte0811.controlengineering.blockentity.bus;
import blusunrize.immersiveengineering.api.wires.ConnectionPoint;
import blusunrize.immersiveengineering.api.wires.LocalWireNetwork;
import blusunrize.immersiveengineering.api.wires.WireType;
import blusunrize.immersiveengineering.api.wires.redstone.IRedstoneConnector;
import blusunrize.immersiveengineering.api.wires.redstone.RedstoneNetworkHandler;
import com.google.common.collect.ImmutableList;
import malte0811.controlengineering.bus.BusLine;
import malte0811.controlengineering.bus.BusState;
import malte0811.controlengineering.bus.IBusConnector;
import malte0811.controlengineering.bus.LocalBusHandler;
import net.minecraft.core.BlockPos;
import net.minecraft.core.Vec3i;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.resources.ResourceLocation;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collection;
import java.util.Objects;
public class LineAccessBlockEntity extends DualConnectorBlockEntity implements IBusConnector, IRedstoneConnector {
private static final int REDSTONE_ID = MIN_ID;
private static final int BUS_ID = MAX_ID;
public int selectedLine;
private BusLine lastLineToRS = new BusLine();
public LineAccessBlockEntity(BlockEntityType<?> type, BlockPos pos, BlockState state) {
super(type, pos, state);
}
@Override
public void load(@Nonnull CompoundTag nbt) {
super.load(nbt);
selectedLine = nbt.getInt("selectedLine");
}
@Override
public void saveAdditional(@Nonnull CompoundTag nbt) {
super.saveAdditional(nbt);
nbt.putInt("selectedLine", selectedLine);
}
/*BUS*/
@Override
public void onBusUpdated(ConnectionPoint updatedPoint) {
RedstoneNetworkHandler rsHandler = getRSNet();
BusLine lineToRS = getBusNet().getStateWithout(getBusPoint(), this).getLine(selectedLine);
if (rsHandler != null && !this.lastLineToRS.equals(lineToRS)) {
rsHandler.updateValues();
this.lastLineToRS = lineToRS;
}
}
@Override
public BusState getEmittedState(ConnectionPoint checkedPoint) {
BusState ret = BusState.EMPTY;
var rs = getRSNet();
if (rs != null) {
ret = ret.withLine(selectedLine, BusLine.fromRSState(rs.getValuesExcluding(getRedstonePoint())));
}
return ret;
}
@Override
public LocalWireNetwork getLocalNet(int cpIndex) {
return super.getLocalNet(cpIndex);
}
@Override
public boolean canConnectCable(WireType wireType, ConnectionPoint connectionPoint, Vec3i offset) {
if (countRealWiresAt(connectionPoint) > 0) {
return false;
}
if (connectionPoint.index() == BUS_ID) {
return IBusConnector.super.canConnectCable(wireType, connectionPoint, offset);
} else {
return wireType.getCategory().equals(WireType.REDSTONE_CATEGORY);
}
}
/*REDSTONE*/
@Override
public void onChange(ConnectionPoint cp, RedstoneNetworkHandler handler) {
//TODO more intelligent behavior?
if (getRedstonePoint().equals(cp)) {
getBusNet().requestUpdate();
}
}
@Override
public void updateInput(byte[] signals, ConnectionPoint cp) {
if (getRedstonePoint().equals(cp)) {
BusLine line = getBusNet().getStateWithout(getBusPoint(), this).getLine(selectedLine);
for (int i = 0; i < signals.length; ++i) {
signals[i] = (byte) Math.max(line.getRSValue(i), signals[i]);
}
}
}
@Nullable
private RedstoneNetworkHandler getRSNet() {
return getLocalNet(REDSTONE_ID)
.getHandler(RedstoneNetworkHandler.ID, RedstoneNetworkHandler.class);
}
private LocalBusHandler getBusNet() {
return Objects.requireNonNull(getBusHandler(getBusPoint()));
}
@Override
public Collection<ResourceLocation> getRequestedHandlers() {
return ImmutableList.of(LocalBusHandler.NAME, RedstoneNetworkHandler.ID);
}
@Override
public boolean isBusPoint(ConnectionPoint cp) {
return getBusPoint().equals(cp);
}
private ConnectionPoint getRedstonePoint() {
return minPoint;
}
private ConnectionPoint getBusPoint() {
return maxPoint;
}
}
|
from functools import wraps
class Proxy:
def __init__(self, wrapped):
self._wrapped = wrapped
def _wrapper(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
return getattr(self._wrapped, func.__name__)(*args, **kwargs)
return wrapper
# No additional implementation needed
# Test the implementation
class Wrapped:
def greet(self, name):
return f"Hello, {name}!"
wrapped_obj = Wrapped()
proxy_obj = Proxy(wrapped_obj)
result = proxy_obj.greet("Alice")
print(result) # Expected output: "Hello, Alice!" |
#!/bin/bash
export DISPLAY=":0"
export ALFRED_ROOT="`pwd`"
source activate alfred
model_dir=$1
for split in valid_seen valid_unseen
do
python -u models/eval/eval_seq2seq.py \
--model_path ${model_dir}/best_seen.pth \
--eval_split $split \
--data data/json_feat_2.1.0 \
--model models.model.seq2seq_hierarchical \
--gpu \
--num_threads 3 \
--eval_type hierarchical \
| tee ${model_dir}/eval_hierarchical_${split}.out
done
|
<gh_stars>0
/**
Add index signature to interface
*/
export declare type Indexify<O extends object> = {
[P in keyof O]: O[P];
};
/**
Constructs a index signature to interface
*/
export declare const indexify: <O extends object>(object: O) => Indexify<O>;
|
import IO.UserFileIO;
import java.util.ArrayList;
/**
* Created by IntelliJ IDEA.
* User: swyna
* Date: Jun 3, 2011
* Time: 1:46:50 AM
* To change this template use File | Settings | File Templates.
*/
public class Userlist {
private static ArrayList<String[]> users;
private static int currentUser;
public static int validate(String username, String password) {
if (users == null) {
load();
}
for (String[] i : users) {
if (i[0].equals(username)) {
if (i[1].equals(password)) {
currentUser = users.indexOf(i);
return 1;
} else {
return -1;
}
}
}
return 0;
}
public static int add(String username, String password) {
if (users == null) {
load();
}
if (validate(username, password) == 0) {
users.add(new String[]{username, password, "0", "0"});
System.out.println("User " + username + " created");
save();
return 1;
} else {
System.out.println("Error adding user " + username + "");
return -1;
}
}
public static void save() {
new UserFileIO().save(users);
System.out.println("Userlist saved");
}
public static void addSuccess() {
users.get(currentUser)[2]=String.valueOf(Integer.valueOf(users.get(currentUser)[2])+1);
users.get(currentUser)[3]=String.valueOf(Integer.valueOf(users.get(currentUser)[3])+1);
save();
}
public static void addLoss() {
users.get(currentUser)[3]=String.valueOf(Integer.valueOf(users.get(currentUser)[3])+1);
save();
}
public static ArrayList<String> getUserData() {
ArrayList<String> ret = new ArrayList<String>();
ret.add((users.get(currentUser)[0]));
ret.add((users.get(currentUser)[2]));
ret.add((users.get(currentUser)[3]));
return ret;
}
public static void load() {
users = new UserFileIO().load();
System.out.println("Userlist loeaded");
}
}
|
import * as AssertionError from "assertion-error";
import * as ts from "ts-morph";
import { Expression } from "./Expression";
export class ClassProperty {
constructor(private _node: ts.ClassInstancePropertyTypes) {}
/** Provides the initializer for the property if there is one. */
get initializer(): Expression | undefined {
let value: ts.Expression | undefined;
if (ts.TypeGuards.isParameterDeclaration(this._node)) {
value = this._node.getInitializer();
} else if (ts.TypeGuards.isPropertyDeclaration(this._node)) {
value = this._node.getInitializer();
}
return value ? new Expression(value) : undefined;
}
/** Asserts the property has an inializer and returns the expression. */
hasInitializer(msg = "Expected property to have intializer."): Expression {
const { initializer } = this;
if (!initializer) {
throw new AssertionError(msg, undefined, this.hasInitializer);
}
return initializer;
}
}
|
<reponame>hangmann/Temperature-Management-and-Prediction<gh_stars>1-10
package view;
import javax.media.opengl.GL;
/**
* User: christoph
* Date: 2/24/12
* Time: 12:49 PM
*/
public class V_TemperatureGrid {
private int mSubdivision, mProportionalSize;
private int mSensorGridWidth, mSensorGridHeight;
private int mGridWidth, mGridHeight;
private int mMinTemp, mMaxTemp;
double[][] mGrid;
public V_TemperatureGrid(int min_temp, int max_temp, int pSubdivision, int pGridWidth, int pGridHeight) {
mSubdivision = pSubdivision;
mProportionalSize = Math.min(pGridHeight, pGridWidth) * mSubdivision;
mSensorGridWidth = pGridWidth;
mSensorGridHeight = pGridHeight;
mMinTemp = min_temp;
mMaxTemp = max_temp;
mGridWidth = (mSensorGridWidth + 1) * mSubdivision + 1;
mGridHeight = (mSensorGridHeight + 1) * mSubdivision + 1;
mGrid = new double[mGridHeight][mGridWidth];
}
public void drawGrid2d(GL gl) {
gl.glBegin(GL.GL_QUADS);
for (int i = 0; i < mGridHeight - 1; i++) {
for (int j = 0; j < mGridWidth - 1; j++) {
gl.glColor3dv(tempToColor(mGrid[i][j]), 0);
gl.glVertex2d(j, i);
gl.glColor3dv(tempToColor(mGrid[i][j + 1]), 0);
gl.glVertex2d(j + 1, i);
gl.glColor3dv(tempToColor(mGrid[i + 1][j + 1]), 0);
gl.glVertex2d(j + 1, i + 1);
gl.glColor3dv(tempToColor(mGrid[i + 1][j]), 0);
gl.glVertex2d(j, i + 1);
}
}
gl.glEnd();
}
public void drawGrid3d(GL gl) {
for (int i = 0; i < mGridHeight - 1; i++) {
for (int j = 0; j < mGridWidth - 1; j++) {
gl.glBegin(GL.GL_QUADS);
gl.glPolygonMode(GL.GL_FRONT_AND_BACK, GL.GL_FILL);
gl.glColor3dv(tempToColor(mGrid[i][j]), 0);
gl.glVertex3d(j, i, mGrid[i][j]);
gl.glColor3dv(tempToColor(mGrid[i][j + 1]), 0);
gl.glVertex3d(j + 1, i, mGrid[i][j + 1]);
gl.glColor3dv(tempToColor(mGrid[i + 1][j + 1]), 0);
gl.glVertex3d(j + 1, i + 1, mGrid[i + 1][j + 1]);
gl.glColor3dv(tempToColor(mGrid[i + 1][j]), 0);
gl.glVertex3d(j, i + 1, mGrid[i + 1][j]);
gl.glEnd();
gl.glBegin(GL.GL_LINE_LOOP);
gl.glColor3d(0, 0, 0);
gl.glVertex3d(j, i, mGrid[i][j] + 0.0);
gl.glVertex3d(j + 1, i, mGrid[i][j + 1] + 0.0);
gl.glVertex3d(j + 1, i + 1, mGrid[i + 1][j + 1] + 0.0);
gl.glVertex3d(j, i + 1, mGrid[i + 1][j] + 0.0);
gl.glVertex3d(j, i, mGrid[i][j] + 0.0);
gl.glEnd();
}
}
}
public void setTemperature(double[] pTemp) {
int currX, currY;
int currSensorNumber;
resetGrid();
for (int i = 0; i < mSensorGridHeight; i++) {
for (int j = 0; j < mSensorGridWidth; j++) {
currX = (j + 1) * mSubdivision;
currY = (i + 1) * mSubdivision;
currSensorNumber = i * mSensorGridWidth + j;
//mGrid[currY][currX] = pTemp[currSensorNumber];
proportionalAdd(pTemp[currSensorNumber], currX, currY);
}
}
}
private void proportionalAdd(double pTemp, int pX, int pY) {
double distance;
int currX, currY;
for (int i = -mProportionalSize; i <= mProportionalSize; i++) {
for (int j = -mProportionalSize; j <= mProportionalSize; j++) {
currX = pX + j;
currY = pY + i;
// check if current point is outside grid
if (currX >= 0 && currY >= 0 && currX < mGridWidth && currY < mGridHeight) {
// calculate distance to current sensor
distance = Math.sqrt(i * i + j * j);
//distance = Math.sqrt(i * i + j * j + mGrid[currY][currX] * mGrid[currY][currX]);
if (distance <= mProportionalSize) {
//mGrid[currY][currX] = Math.max(mGrid[currY][currX], gauss(distance, pTemp));
mGrid[currY][currX] = mGrid[currY][currX] + gauss(distance, pTemp - mGrid[currY][currX]);
}
}
}
}
}
private double gauss(double pDistance, double pMaxDisplacement) {
// using gaussian distribution to calculate displacement
return pMaxDisplacement * Math.exp(-0.5 * pDistance * pDistance / (mProportionalSize * mProportionalSize / 12));
}
public int getGridWith() {
return mGridWidth;
}
public int getGridHeight() {
return mGridHeight;
}
private double[] tempToColor(double pTemp) {
double key = (pTemp - mMinTemp) * 891 / (mMaxTemp - mMinTemp);
double[] color = new double[3];
if (key >= 0 && key < 255) {
color[0] = 0;
color[1] = key / 255;
color[2] = 1;
} else if (key >= 255 && key < 510) {
color[0] = (key - 255) / 255;
color[1] = 1;
color[2] = (255 - (key - 255)) / 255;
} else if (key >= 510 && key < 765) {
color[0] = 1;
color[1] = (255 - (key - 510)) / 255;
color[2] = 0;
} else if (key >= 765 && key <= 891) {
color[0] = (255 - (key - 765)) / 255;
color[1] = 0;
color[2] = 0;
} else {
color[0] = 0;
color[1] = 0;
color[2] = 0;
}
return color;
}
private void resetGrid() {
for (int i = 0; i < mGridHeight; i++) {
for (int j = 0; j < mGridWidth; j++) {
mGrid[i][j] = 0;
}
}
}
}
|
<reponame>longshine/calibre-web<gh_stars>1-10
package lx.calibre.repository;
import java.util.Collection;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.metamodel.SingularAttribute;
import org.hibernate.criterion.MatchMode;
import org.springframework.data.jpa.domain.Specification;
/**
* 常用的查询条件。
*/
public class Specifications {
/**
* 字段等于
*
* @param attribute 实体中的属性
* @param value 待查询的值
* @return
*/
public static <X, T> Specification<X> equal(final SingularAttribute<? super X, T> attribute, final T value) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.equal(root.get(attribute), value);
}
};
}
/**
* 字段为空
*
* @param attribute 实体中的属性
* @return
*/
public static <X, T> Specification<X> isNull(final SingularAttribute<? super X, T> attribute) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.isNull(root.get(attribute));
}
};
}
/**
* 字段非空
*
* @param attribute 实体中的属性
* @return
*/
public static <X, T> Specification<X> isNotNull(final SingularAttribute<? super X, T> attribute) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.isNotNull(root.get(attribute));
}
};
}
/**
* 字段为<code>true</code>
*
* @param attribute 实体中的属性
* @return
*/
public static <X> Specification<X> isTrue(final SingularAttribute<? super X, Boolean> attribute) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.isTrue(root.get(attribute));
}
};
}
/**
* 字段为<code>false</code>
*
* @param attribute 实体中的属性
* @return
*/
public static <X> Specification<X> isFalse(final SingularAttribute<? super X, Boolean> attribute) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.isFalse(root.get(attribute));
}
};
}
/**
* 字段内容匹配,默认使用包含匹配策略 <code>('%' + pattern + '%')</code>
*
* @param attribute 实体中的属性
* @param pattern 匹配模式
* @return
*/
public static <X> Specification<X> like(SingularAttribute<? super X, String> attribute, String pattern) {
return like(attribute, pattern, MatchMode.ANYWHERE);
}
/**
* 字段内容匹配
*
* @param attribute 实体中的属性
* @param pattern 匹配模式
* @param mode 匹配策略 {@link MatchMode}
* @return
*/
public static <X> Specification<X> like(final SingularAttribute<? super X, String> attribute, final String pattern, final MatchMode mode) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
return cb.like(root.get(attribute), mode.toMatchString(pattern));
}
};
}
/**
* 字段内容匹配,默认使用包含匹配策略 <code>('%' + pattern + '%')</code>
*
* @param attribute 实体中的属性
* @param patterns 一组匹配模式,匹配其中任意一项
* @return
*/
public static <X> Specification<X> like(SingularAttribute<? super X, String> attribute, Collection<String> patterns) {
return like(attribute, patterns, MatchMode.ANYWHERE);
}
/**
* 字段内容匹配
*
* @param attribute 实体中的属性
* @param patterns 一组匹配模式,匹配其中任意一项
* @param mode 匹配策略 {@link MatchMode}
* @return
*/
public static <X> Specification<X> like(final SingularAttribute<? super X, String> attribute, final Collection<String> patterns,
final MatchMode mode) {
return new Specification<X>() {
@Override
public Predicate toPredicate(Root<X> root, CriteriaQuery<?> query, CriteriaBuilder cb) {
Expression<String> exp = root.get(attribute);
Predicate[] predicates = new Predicate[patterns.size()];
int i = 0;
for (String s : patterns) {
predicates[i++] = cb.like(exp, mode.toMatchString(s));
}
return cb.or(predicates);
}
};
}
}
|
<filename>InteractiveProgramming/pong.py
# SimpleGUI PONG!
import simplegui
class Sprite:
"""A sprite is a video element, so it needs some things
like position, size, color (basic in pong), and if it
moves, a velocity"""
def __init__(self):
self._x_pos = 0
self._y_pos = 0
self._x_dim = 0
self._y_dim = 0
self._x_vel = 0
self._y_vel = 0
self._shape = 'point'
self._color = "Red"
def position(self,x,y):
if x:
self._x_pos = x
if y:
self._y_pos = y
return [self._x_pos, self._y_pos]
def size(self,h=None,w=None):
self.width(w)
self.height(h)
return [self.width(),self.height()]
def color(self,c=None):
if c:
self._color = c
return self._color
def height(self,y=None):
if y:
self._y_dim = y
return self._y_dim
def width(self,x=None):
if x:
self._x_dim = x
return self._x_dim
def move(self):
self._x_pos = self._x_pos + self._x_vel
self._y_pos = self._y_pos + self._y_vel
def top(self):
return [self._x_pos + self._x_dim,
self._y_pos + self._y_dim]
def bounce(self,x,y):
"""Bouncing off a plane will reverse one of the
velocities"""
if x:
"""The plane is a horizontal plane"""
self._y_vel *= -1
if y:
"""The plane is a vertical plane"""
self._x_vel *= -1
def accelerate(self, magnitude, direction):
"""Accelerate the sprite by adding a force and direction
to the current velocity.
magnitude = pixels per second
direction = degrees where 0 = north
due to simplegui's mirrored directions, we have to subtract
the direction in degreees from 180 first
"""
from math import sin,cos,radians
self._x_vel += cos(radians(180-direction)) * magnitude
self._y_vel += sin(radians(180-direction)) * magnitude
class Rectangle(Sprite):
def __init__(self):
self._shape = 'rectangle'
def draw(self,canvas):
return canvas.draw_polygon(
[[self._x_pos, self._y_pos],
[self._x_pos, self._y_pos + self._y_dim],
[self._x_pos + self._x_dim, self._y_pos + self._y_dim],
[self._x_pos + self._x_dim, self._y_pos]],
self._line_width,
self._color)
def dim(self):
"""dimensions of the rectangle is two points. Here we return
x0, y0, x1, y1 so that the intersect function can compare them."""
return [self._x_pos,
self._y_pos,
self._x_pos + self._x_dim,
self._y_pos + self._y_dim]
def intersect(self,sprite):
"""Determine if we intersect another rectangular sprite"""
me = self.dim()
it = sprite.dim()
if (me[0] <= it[2] and me[2] >= it[0] and
me[3] <= it[1] and me[1] >= it[3]):
return False
else:
return True
class Polygon(Sprite):
def __init__(self):
self._shape = 'polygon'
def draw(self,canvas):
"""polygons have five or more points"""
points = self._points * self.position()
canvas.draw_polygon(
[points],
self._line_width,
self._color)
class Circle(Sprite):
def __init__(self):
self._shape = 'circle'
self._radius = 1
def draw(self,canvas):
"""circles are defined by one point and a radius"""
canvas.draw_circle([self._x_pos, self._y_pos], self._radius, self._line_width, self._color)
class One(Rectangle):
def __init__(self):
self._color = 'white'
self.size(40,10)
class Two(Rectangle):
def __init__(self):
def tick():
"""Handler to handle the time tick"""
for polygon in moving:
polygon.move()
[px,py] = polygon.position()
if px < 0:
polygon.
def draw(canvas):
"""Handler to draw on canvas"""
global polygons
for polygon in polygons:
polygon.draw(canvas)
# Create a frame and assign callbacks to event handlers
frame = simplegui.create_frame("Home", 600, 600)
frame.add_button("Start", click)
frame.add_button("Reset", click)
frame.set_draw_handler(draw)
# Start the frame animation
frame.start()
|
import SwiftUI
struct ContentView: View {
@State private var number1: Double = 0
@State private var number2: Double = 0
@State private var operatorIndex = 0
let operators = ["+", "-", "*", "/"]
var body: some View {
VStack {
TextField("Number 1", value: $number1, formatter: NumberFormatter())
TextField("Number 2", value: $number2, formatter: NumberFormatter())
Picker(selection: $operatorIndex, label: Text("Operator")) {
ForEach(0..<operators.count, id: \.self) {
Text(self.operators[$0])
}
}.pickerStyle(SegmentedPickerStyle())
Button(action: {
let result = self.calculate()
print("Result: \(result)")
}) {
Text("Calculate")
}
}
}
func calculate() -> Double {
let operator = operators[operatorIndex]
switch operator {
case "+":
return number1 + number2
case "-":
return number1 - number2
case "*":
return number1 * number2
case "/":
return number1 / number2
default:
fatalError("Unknown operator \(operator)")
}
}
} |
set -x
env_prefix=oss
module_suffix=so
if [ -n "$1" ]
then
env_prefix=$1
fi
if [ "$env_prefix" != "oss" ]
then
module_suffix=zip
fi
shift
echo "no cluster on "$env_prefix
RLTest --clear-logs --module ../redisgears.so --env $env_prefix $@
echo "cluster mode, 1 shard"
RLTest --clear-logs --module ../redisgears.$module_suffix --env $env_prefix-cluster --shards-count 1 $@
echo "cluster mode, 2 shards"
RLTest --clear-logs --module ../redisgears.$module_suffix --env $env_prefix-cluster --shards-count 2 $@
echo "cluster mode, 3 shards"
RLTest --clear-logs --module ../redisgears.$module_suffix --env $env_prefix-cluster --shards-count 3 $@
|
<reponame>jogoes/caradverts
package testutil
import java.time.LocalDate
import java.util.UUID
import java.util.concurrent.ThreadLocalRandom
import model.{CarAdvert, FuelType}
object CarAdvertFactory {
val minDay = LocalDate.of(1970, 1, 1).toEpochDay
val maxDay = LocalDate.of(2050, 12, 31).toEpochDay
def nextInt(min: Int, maxInclusive: Int) = ThreadLocalRandom.current().nextInt(maxInclusive + 1 - min) + min
def randomDate = LocalDate.ofEpochDay(ThreadLocalRandom.current().nextLong(minDay, maxDay))
def randomPrice = nextInt(0, 10000)
def randomMileage = nextInt(0, 5000000)
def usedCarAdvert(name: String, fuelType: FuelType): CarAdvert = CarAdvert(UUID.randomUUID(), name, fuelType, randomPrice, randomMileage, randomDate)
def newCarAdvert(name: String, fuelType: FuelType): CarAdvert = newCarAdvert(UUID.randomUUID(), name, fuelType)
def newCarAdvert(id: UUID, name: String, fuelType: FuelType): CarAdvert = CarAdvert(id, name, fuelType, randomPrice)
}
|
<reponame>UlissesMattos/ExeLogicaCWI<filename>MedidaDeDados/script.js<gh_stars>0
var valor = 8678677;
if (valor < 1024) {
return console.log(`${valor.toFixed(2)} B`);
} else if (valor >= 1024 && valor < 1048576) {
valor/=1024;
return console.log(`${valor.toFixed(2)} KB`);
} else if (valor >= 1048576 && valor < 1073741824) {
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} MB`);
} else if (valor >= 1073741824 && valor < 1099511627776) {
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} GB`);
} else if (valor >= 1099511627776 && valor < 1125899906842624) {
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} TB`);
} else if (valor >= 1125899906842624 && valor < 1152921504606846976){
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} PB`);
} else if (valor >= 1152921504606846976 && valor < 1180591620717411203424) {
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} EB`);
} else if (valor >= 1180591620717411203424 && valor < 1208925819614629174706176) {
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} ZB`);
} else if (valor >= 1208925819614629174706176 && valor < 1237940039285380274899124224) {
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
valor/=1024;
return console.log(`${valor.toFixed(2)} YB`);
} else {
return console.log('Digite um número válido!');
} |
function generatePassword(length=8) {
let password = "";
const characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*()";
for (let i = 0; i < length; i++) {
let randNum = Math.floor(Math.random() * characters.length);
password += characters.charAt(randNum);
}
return password;
}
const result = generatePassword();
console.log(result); |
class DataStructure:
def __init__(self):
self.data_elements = []
def add_element(self, element):
self.data_elements.append(element)
@property
def datatype(self):
if not self.data_elements:
return "No data"
else:
data_types = {type(element) for element in self.data_elements}
if len(data_types) > 1:
return "Mixed data types"
else:
return data_types.pop().__name__ |
import asyncio
class TaskRunner:
def __init__(self, tasks):
self.tasks = tasks
async def _run(self, coros):
results = await asyncio.gather(*coros)
return results
async def run_tasks(self):
results = await self._run(self.tasks)
return results
# Example usage
async def task1():
await asyncio.sleep(1)
return "Task 1 completed"
async def task2():
await asyncio.sleep(2)
return "Task 2 completed"
async def task3():
await asyncio.sleep(0.5)
return "Task 3 completed"
async def main():
tasks = [task1(), task2(), task3()]
task_runner = TaskRunner(tasks)
results = await task_runner.run_tasks()
print(results)
asyncio.run(main()) |
<gh_stars>0
#!/bin/env ruby
# encoding: utf-8
require 'redmine'
require 'dispatcher' unless Rails::VERSION::MAJOR >= 3
require 'wiki_controller_patch'
require 'wiki_page_patch'
require_dependency 'redmine_wikicipher/hooks'
require_dependency 'redmine_wikicipher/macros'
require_dependency 'redmine/wiki_formatting/textile/wikicipher_helper'
Redmine::Plugin.register :redmine_wikicipher do
name 'Redmine Wikicipher plugin'
author '<NAME>'
author_url 'mailto:<EMAIL>'
description 'This plugin adds the ability to encrypt section of text'
version '0.0.10'
url 'https://github.com/keeps/redmine_wikicipher'
end
if Rails::VERSION::MAJOR >= 3
ActionDispatch::Callbacks.to_prepare do
require_dependency 'wiki_controller'
require_dependency 'wiki_page'
WikiController.send(:include, WikiControllerPatch)
WikiPage.send(:include, WikiPagePatch)
end
else
ApplicationController.class_eval do
filter_parameter_logging :password, :text
end
Dispatcher.to_prepare do
require_dependency 'wiki_controller'
require_dependency 'wiki_page'
WikiController.send(:include, WikiControllerPatch)
WikiPage.send(:include, WikiPagePatch)
end
end
|
/* ********************************************************
* This file provides the funciton prototypes for
* configuration and use of pwm module which is present on
* tm4c129encpdt
*
* Author: <NAME>
* Date created: 13th Dec 2020
* Last modified: 8th Jan 2021
*
* ********************************************************/
#ifndef _PWM_H
#define _PWM_H
/* ********************************************************************
* This function initializes the pwm unit and led
*
* param: period time period of the pwm in clock cycle.
*
* param: duty duty cycle in percentage for the pwm.
*
* return: void
*
* brief: This function configures and enables the pwm module and the
* gpio module required to send the pwm output to led on port F pin 4.
* port F pin 4 is configured as a tertiary function pin which is driven
* by the pwm module.
*
* ********************************************************************/
void PWMLedInit(uint32_t period, uint8_t duty);
/* ********************************************************************
* This function updates the duty cycle of pwm
*
* param: duty duty cycle in percentage for the pwm.
*
* return: void
*
* brief: Updates the compare register t update the duty cycle of the
* pwm signal.
*
* ********************************************************************/
void PWMLedDutyUpdate(uint8_t duty);
/* ********************************************************************
* This function initializes the pwm unit and led
*
* param: void
*
* return: duty returns the duty cycle of the pwm signal
*
* brief: Calculates and returns the current duty cycle value from
* compare and load registers.
*
* ********************************************************************/
uint8_t PWMGetDuty(void);
#endif
|
#!/bin/bash -e
# used pip packages
pip_packages="pillow jupyter numpy matplotlib torch torchvision webdataset pyyaml"
target_dir=./docs/examples
# populate epilog and prolog with variants to enable/disable conda
# every test will be executed for bellow configs
prolog=(enable_conda)
epilog=(disable_conda)
test_body() {
test_files=(
"custom_operations/python_operator.ipynb"
"use_cases/webdataset-externalsource.ipynb"
)
for f in ${test_files[@]}; do
jupyter nbconvert --to notebook --inplace --execute \
--ExecutePreprocessor.kernel_name=python${PYVER:0:1} \
--ExecutePreprocessor.timeout=300 $f;
done
}
pushd ../..
source ./qa/test_template.sh
popd
|
<gh_stars>0
import {Bud, factory} from '@repo/test-kit/bud'
describe('bud.alias', function () {
let bud: Bud
beforeAll(async () => {
bud = await factory()
})
it('is a function', () => {
expect(bud.alias).toBeInstanceOf(Function)
})
it('is configurable by bud.alias', async () => {
bud.alias({'@foo': 'bar'})
await bud.api.processQueue()
await bud.build.make()
const filteredAlias = await bud.hooks.filterAsync(
'build.resolve.alias',
)
expect(filteredAlias).toEqual({
'@foo': bud.path('project', 'bar'),
})
expect(bud.build.config.resolve.alias).toEqual({
'@foo': bud.path('project', 'bar'),
})
})
})
|
import mock from "jest-mock";
import expect from "expect";
export default function(history, done) {
const spy = mock.fn();
const unlisten = history.listen(spy);
expect(spy).not.toHaveBeenCalled();
unlisten();
done();
}
|
# Copyright 2018 The Simons Foundation, Inc. - All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import netket as nk
# 1D Periodic Lattice
g = nk.graph.Hypercube(length=12, n_dim=1, pbc=True)
# Boson Hilbert Space
hi = nk.hilbert.Boson(graph=g, n_max=3, n_bosons=12)
# Bose Hubbard Hamiltonian
ha = nk.operator.BoseHubbard(U=4.0, hilbert=hi)
# Jastrow Machine with Symmetry
ma = nk.machine.JastrowSymm(hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
# Stochastic gradient descent optimization
op = nk.optimizer.Sgd(learning_rate=0.1)
# Variational Monte Carlo
vmc = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=5e-3,
use_iterative=False,
method="Sr",
)
vmc.run(n_iter=4000, out="test")
|
<?php
$data = array("name" => "John", "age" => "20");
$url = "https://example.com/form";
$options = array(
'http' => array(
'header' => "Content-type: application/x-www-form-urlencoded\r\n",
'method' => 'POST',
'content' => http_build_query($data)
)
);
$context = stream_context_create($options);
$result = file_get_contents($url, false, $context);
echo $result;
?> |
#!/bin/bash
yellow=`tput setaf 3`
red=`tput setaf 1`
green=`tput setaf 2`
reset=`tput sgr0`
cracked=1
figlet cracker
function ctrlc(){
echo -e "\n${red}Ctrl-C caught. Quiting!${reset}"
exit 1
}
trap "ctrlc" 2
if [ $# -ne 2 ]
then
echo "${red}Usage: $0 7zipfile wordlist $reset";
exit 1
fi
while read word
do
echo -ne "\r${yellow}Trying Password: $word $reset"
7z x -p$word $1 -aoa &>/dev/null
if [ $? -eq 0 ]; then
cracked=0
echo -e "\n\n${green}Password is: $word $reset"
break
fi
done < $2
if [ $cracked -eq 1 ]; then
echo "${red}Couldn't Crack the password!. Try a different Dictonary. ${reset}"
fi
|
<reponame>ilaborie/catnip<gh_stars>1-10
import { html, TemplateResult } from "lit-html";
import { Constant, InstructionInstance } from "../models/input";
export const renderConstantPool = (constants: string): TemplateResult =>
html`
<details>
<summary>Constant Pool</summary>
<div class="constants">${constants}</div>
</details>
`;
// FIXME Frame, stack, locals, ... current step
export const renderMethodCode = (
code: InstructionInstance[],
position: number
): TemplateResult => {
const classes = (inst: InstructionInstance) =>
[
inst.instruction.type ? `inst-${inst.instruction.type}` : "",
position === inst.position ? "selected" : ""
]
.filter(s => s !== "")
.join(" ");
return html`
<ul class="code">
${code.map(
inst => html`
<li class="${classes(inst)}">
<div class="position">${inst.position}</div>
<div class="code">${inst.instruction.code}</div>
<div class="args">${inst.args}</div>
</li>
</ul>`
)}
</ul>
`;
};
|
from __init__ import *
from copy import copy
from bs4 import BeautifulSoup
import jieba
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--tp", default="train", help="the type of generated data type")
parser.add_argument("--data_dir", default="../DuReader/data/preprocessed/", help="train/dev/test based directory path")
args = parser.parse_args()
class Process:
def __init__(self, tp):
self.writers = {
"SEARCH": open("search.{}.net.json".format(tp), "w"),
"ZHIDAO": open("zhidao.{}.net.json".format(tp), "w"),
"TOTAL": open("total.{}.net.json".format(tp), "w"),
}
self.passage_id = 0
self.start(tp)
self.close()
def start(self, tp):
res = []
for _file, _mode in zip([args.data_dir + "{tp}set/search.{tp}.json".format(tp=tp), args.data_dir + "{tp}set/zhidao.{tp}.json".format(tp=tp)], ["SEARCH", "ZHIDAO"]):
with open(_file) as r:
for i, line in enumerate(r, 1):
if tp in ["test", "test2"]: d = self.test_process(json.loads(line))
elif tp in ["train", "dev"]: d = self.train_process(json.loads(line))
if d is None: continue
if tp not in ["test", "test2"]:
[self.writers[_m].write(json.dumps(d, ensure_ascii=False) + "\n") for _m in [_mode, "TOTAL"]]
else:
#res += d
[[self.writers[_m].write(json.dumps(_d, ensure_ascii=False) + "\n") for _m in [_mode, "TOTAL"]] for _d in d]
if False and i >= 101: break
if i % 100 == 0: print(i, tp, _mode)
def train_process(self, data):
if not data["match_scores"]: return
answer_spans = data["answer_spans"][0] # Actually it only ones
try: doc = data["documents"][data["answer_docs"][0]]
except: logger.info("error doc"); return
base_format = {
"question_type": data["question_type"], # YES_NO / ENTITY / DESCRIPTION$
"question_id": data["question_id"], # ID(int)$
"fact_or_opinion": data["fact_or_opinion"],# FACT / OPINION$
"segmented_p": doc["segmented_paragraphs"][doc["most_related_para"]], # Now, only one paragraph, shape [para_len]$
"match_scores": data["match_scores"],
"answer_spans": data["answer_spans"],
"segmented_q": data["segmented_question"]}
base_format["char_paragraph"] = [vocabulary.getCharID(word, True) for word in base_format["segmented_p"]]
base_format["char_question"] = [vocabulary.getCharID(word, True) for word in base_format["segmented_q"]]
base_format["segmented_paragraph"] = [vocabulary.getVocabID(v) for v in base_format["segmented_p"]]
base_format["segmented_question"] = [vocabulary.getVocabID(v) for v in base_format["segmented_q"]]
return base_format
def test_process(self, data):
res = []
format = {
"question_id": data["question_id"],
"question_type": data["question_type"],
"segmented_q": data["segmented_question"],
"segmented_question": [vocabulary.getVocabID(v) for v in data["segmented_question"]],
"char_question": [vocabulary.getCharID(word, True) for word in data["segmented_question"]],
}
for doc in data["documents"]:
p = []
for para in doc["segmented_paragraphs"]:
if len(para) and para[0] == "<": para = list(jieba.cut(BeautifulSoup("".join(para), "html.parser").text))
p += para
format["segmented_p"] = p
format["segmented_paragraph"] = [vocabulary.getVocabID(v) for v in p]
format["char_paragraph"] = [vocabulary.getCharID(word, True) for word in p]
format["passage_id"] = self.passage_id
self.passage_id += 1
f = copy(format)
res.append(f)
return res
def close(self):
for writer in self.writers.values():
writer.close()
if __name__ == "__main__":
Process(args.tp)
|
# -*- encoding: utf-8 -*-
# this is required because of the use of eval interacting badly with require_relative
require 'razor/acceptance/utils'
confine :except, :roles => %w{master dashboard database frictionless}
test_name 'C791 Set Node Power State with invalid path for JSON file'
step 'https://testrail.ops.puppetlabs.net/index.php?/cases/view/791'
reset_database
file = '/tmp/' + Dir::Tmpname.make_tmpname(['create-policy-', '.json'], nil)
step 'Ensure the temporary file is absolutely not present'
on agents, "rm -f #{file}"
step "create a node that we can set the power state of later"
json = {'installed' => true, 'hw-info' => {'net0' => '00:0c:29:08:06:e0'}}
razor agents, 'register-node', json do |node, text|
_, nodeid = text.match(/name: (node\d+)/).to_a
refute_nil nodeid, 'failed to extract node ID from output'
json = '{"name" => nodeid, "to" => on}'
razor node, 'set-node-desired-power-state', %W{--json #{file}}, exit: 1 do |node, text|
assert_match %r{Error: File /tmp/.*\.json not found}, text
end
end
|
#!/bin/sh
# Install libdb4.8 (Berkeley DB).
export LC_ALL=C
set -e
if [ -z "${1}" ]; then
echo "Usage: ./install_db4.sh <base-dir> [<extra-bdb-configure-flag> ...]"
echo
echo "Must specify a single argument: the directory in which db4 will be built."
echo "This is probably \`pwd\` if you're at the root of the ubicoin repository."
exit 1
fi
expand_path() {
echo "$(cd "${1}" && pwd -P)"
}
BDB_PREFIX="$(expand_path ${1})/db4"; shift;
BDB_VERSION='db-4.8.30.NC'
BDB_HASH='12edc0df75bf9abd7f82f821795bcee50f42cb2e5f76a6a281b85732798364ef'
BDB_URL="https://download.oracle.com/berkeley-db/${BDB_VERSION}.tar.gz"
check_exists() {
which "$1" >/dev/null 2>&1
}
sha256_check() {
# Args: <sha256_hash> <filename>
#
if check_exists sha256sum; then
echo "${1} ${2}" | sha256sum -c
elif check_exists sha256; then
if [ "$(uname)" = "FreeBSD" ]; then
sha256 -c "${1}" "${2}"
else
echo "${1} ${2}" | sha256 -c
fi
else
echo "${1} ${2}" | shasum -a 256 -c
fi
}
http_get() {
# Args: <url> <filename> <sha256_hash>
#
# It's acceptable that we don't require SSL here because we manually verify
# content hashes below.
#
if [ -f "${2}" ]; then
echo "File ${2} already exists; not downloading again"
elif check_exists curl; then
curl --insecure "${1}" -o "${2}"
else
wget --no-check-certificate "${1}" -O "${2}"
fi
sha256_check "${3}" "${2}"
}
mkdir -p "${BDB_PREFIX}"
http_get "${BDB_URL}" "${BDB_VERSION}.tar.gz" "${BDB_HASH}"
tar -xzvf ${BDB_VERSION}.tar.gz -C "$BDB_PREFIX"
cd "${BDB_PREFIX}/${BDB_VERSION}/"
# Apply a patch necessary when building with clang and c++11 (see https://community.oracle.com/thread/3952592)
CLANG_CXX11_PATCH_URL='https://gist.githubusercontent.com/LnL7/5153b251fd525fe15de69b67e63a6075/raw/7778e9364679093a32dec2908656738e16b6bdcb/clang.patch'
CLANG_CXX11_PATCH_HASH='7a9a47b03fd5fb93a16ef42235fa9512db9b0829cfc3bdf90edd3ec1f44d637c'
http_get "${CLANG_CXX11_PATCH_URL}" clang.patch "${CLANG_CXX11_PATCH_HASH}"
patch -p2 < clang.patch
cd build_unix/
"${BDB_PREFIX}/${BDB_VERSION}/dist/configure" \
--enable-cxx --disable-shared --disable-replication --with-pic --prefix="${BDB_PREFIX}" \
"${@}"
make install
echo
echo "db4 build complete."
echo
echo 'When compiling ubicoind, run `./configure` in the following way:'
echo
echo " export BDB_PREFIX='${BDB_PREFIX}'"
echo ' ./configure BDB_LIBS="-L${BDB_PREFIX}/lib -ldb_cxx-4.8" BDB_CFLAGS="-I${BDB_PREFIX}/include" ...'
|
package tests.bibliotecaUFMA;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import bibliotecaUFMA.DataControlClass;
import bibliotecaUFMA.biblioteca;
class DataControlTest {
DataControlClass control;
biblioteca b;
@BeforeEach
void setUp() throws Exception {
control = new DataControlClass();
b = new biblioteca();
}
//todas a funcoes a seguir se esperam que sua execucao
//retorne true, ou seja o procedimento foi bem executado
@Test
void testCarregaDados() {
assertNotNull(control.carregaDados(b));
//fail("Not yet implemented");
}
@Test
void testSalvaDados() {
//ha uma nota sobre erro neste treco de codigo no arquivo datacontrolclass
assertTrue(control.salvaDados(b));
//fail("Not yet implemented");
}
@Test
void testCarregaFuncionarios() {
assertTrue(control.carregaFuncionarios());
//fail("Not yet implemented");
}
@Test
void testCarregarLivros() {
assertTrue(control.carregarLivros());
//fail("Not yet implemented");
}
@Test
void testCarregaUsuarios() {
assertTrue(control.carregaUsuarios());
//fail("Not yet implemented");
}
@Test
void testSalvarFuncionarios() {
assertTrue(control.salvarFuncionarios(b));
//fail("Not yet implemented");
}
@Test
void testSalvarLivros() {
assertTrue(control.salvarLivros(b));
//fail("Not yet implemented");
}
@Test
void testSalvarUsuarios() {
assertTrue(control.salvarUsuarios(b));
//fail("Not yet implemented");
}
}
|
# platform = multi_platform_all
# packages = sudo
echo 'nobody ALL=/bin/ls, (!bob alice) /bin/dog !arg, /bin/cat' > /etc/sudoers
echo 'jen ALL, !SERVERS = ALL' >> /etc/sudoers
echo 'jen !fred ALL, !SERVERS = /bin/sh' >> /etc/sudoers
echo 'nobody ALL=/bin/ls, (bob !alice) /bin/dog, /bin/cat !arg' > /etc/sudoers.d/foo
|
fn process_image(physical_size: (u32, u32)) -> Vec<u8> {
let new_width = physical_size.0 as i32 * 2; // Double the width
let new_height = physical_size.1 as i32 * 2; // Double the height
// Assuming image data is represented as a vector of bytes
let modified_image_data: Vec<u8> = vec![0; (new_width * new_height) as usize]; // Create a new image data with the modified size
modified_image_data
} |
<reponame>nilslice/crates.io
ALTER TABLE users DROP CONSTRAINT IF EXISTS users_email_key; |
/*
* Copyright (C) 2018 SoftBank Robotics Europe
* See COPYING for the license
*/
package com.softbankrobotics.sample.returntomapframe.localization.gotoorigin;
import android.media.MediaPlayer;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.airbnb.lottie.LottieAnimationView;
import com.softbankrobotics.sample.returntomapframe.R;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RawRes;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentActivity;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import butterknife.Unbinder;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
/**
* The go to origin Fragment.
*/
public class GoToOriginFragment extends Fragment {
@NonNull
private static final String TAG = "GoToOriginFragment";
@Nullable
private GoToOriginScreen screen;
@Nullable
private GoToOriginMachine machine;
@Nullable
private Unbinder unbinder;
@BindView(R.id.startGoToButton)
Button startGoToButton;
@BindView(R.id.infoTextView)
TextView infoTextView;
@BindView(R.id.warningImage)
ImageView warningImage;
@BindView(R.id.successImage)
ImageView successImage;
@BindView(R.id.progressAnimationView)
LottieAnimationView progressAnimationView;
@Nullable
private Disposable disposable;
@Nullable
private MediaPlayer mediaPlayer;
@Override
public View onCreateView(@NonNull LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_go_to_origin, container, false);
unbinder = ButterKnife.bind(this, view);
return view;
}
@Override
public void onResume() {
super.onResume();
infoTextView.setVisibility(View.INVISIBLE);
startGoToButton.setVisibility(View.INVISIBLE);
warningImage.setVisibility(View.INVISIBLE);
successImage.setVisibility(View.INVISIBLE);
progressAnimationView.setVisibility(View.INVISIBLE);
if (machine != null) {
disposable = machine.goToOriginState()
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::onGoToOriginStateChanged);
}
}
@Override
public void onPause() {
if (disposable != null && !disposable.isDisposed()) {
disposable.dispose();
}
if (mediaPlayer != null) {
mediaPlayer.release();
mediaPlayer = null;
}
super.onPause();
}
@Override
public void onDestroyView() {
if (unbinder != null) {
unbinder.unbind();
}
super.onDestroyView();
}
@OnClick(R.id.startGoToButton)
public void onClickStartGoTo() {
if (machine != null) {
machine.post(GoToOriginEvent.START_GO_TO_ORIGIN);
}
}
@NonNull
static GoToOriginFragment newInstance(@NonNull GoToOriginScreen screen, @NonNull GoToOriginMachine machine) {
GoToOriginFragment fragment = new GoToOriginFragment();
fragment.screen = screen;
fragment.machine = machine;
return fragment;
}
private void playSound(@RawRes int soundResId) {
if (mediaPlayer != null) {
mediaPlayer.release();
}
FragmentActivity activity = getActivity();
if (activity != null) {
mediaPlayer = MediaPlayer.create(activity, soundResId);
mediaPlayer.start();
}
}
private void onGoToOriginStateChanged(@NonNull GoToOriginState goToOriginState) {
Log.d(TAG, "onGoToOriginStateChanged: " + goToOriginState);
switch (goToOriginState) {
case IDLE:
infoTextView.setVisibility(View.INVISIBLE);
startGoToButton.setVisibility(View.INVISIBLE);
warningImage.setVisibility(View.GONE);
successImage.setVisibility(View.INVISIBLE);
progressAnimationView.setVisibility(View.INVISIBLE);
break;
case BRIEFING:
infoTextView.setVisibility(View.VISIBLE);
startGoToButton.setVisibility(View.VISIBLE);
warningImage.setVisibility(View.GONE);
successImage.setVisibility(View.INVISIBLE);
infoTextView.setText(R.string.go_to_origin_briefing_text);
progressAnimationView.setVisibility(View.INVISIBLE);
break;
case MOVING:
infoTextView.setVisibility(View.VISIBLE);
startGoToButton.setVisibility(View.INVISIBLE);
warningImage.setVisibility(View.GONE);
successImage.setVisibility(View.INVISIBLE);
infoTextView.setText(R.string.go_to_origin_moving_text);
progressAnimationView.setVisibility(View.VISIBLE);
break;
case ERROR:
infoTextView.setVisibility(View.VISIBLE);
startGoToButton.setVisibility(View.VISIBLE);
warningImage.setVisibility(View.VISIBLE);
successImage.setVisibility(View.INVISIBLE);
infoTextView.setText(R.string.error_text);
progressAnimationView.setVisibility(View.INVISIBLE);
playSound(R.raw.error);
break;
case SUCCESS:
infoTextView.setVisibility(View.VISIBLE);
startGoToButton.setVisibility(View.INVISIBLE);
warningImage.setVisibility(View.GONE);
successImage.setVisibility(View.VISIBLE);
infoTextView.setText(R.string.success_text);
progressAnimationView.setVisibility(View.INVISIBLE);
playSound(R.raw.success);
break;
case END:
if (screen != null) {
screen.onGoToOriginEnd();
}
break;
}
}
}
|
def sortList(lst):
for i in range(len(lst)):
for j in range(i+1, len(lst)):
if lst[i] > lst[j]:
lst[i], lst[j] = lst[j], lst[i]
return lst |
<gh_stars>0
import java.util.HashSet;
import java.util.Scanner;
import java.util.Set;
public class Problem
{
public static boolean containsPairEqualToK(int[] numbers, int k)
{
for (int i = 0; numbers.length - 1 > i; ++i)
{
for (int j = i + 1; numbers.length > j; ++j)
{
if (numbers[i] + numbers[j] == k)
{
return true;
}
}
}
return false;
}
public static boolean containsPairEqualToKOnePass(int[] numbers, int k)
{
Set<Integer> remainders = new HashSet<>();
for (int number: numbers)
{
if (remainders.contains(number))
return true;
remainders.add(k - number);
}
return false;
}
public static void main(String[] args)
{
Scanner sc = new Scanner(System.in);
System.out.println("How many numbers would you like to enter?");
int[] numbers = new int[sc.nextInt()];
for (int i = 0; numbers.length > i; ++i)
{
System.out.println("Please enter number #" + (i + 1) + ":");
numbers[i] = sc.nextInt();
}
System.out.println("What is should the sum of two pairs be?");
int k = sc.nextInt();
sc.close();
if (containsPairEqualToKOnePass(numbers, k))
{
System.out.println("The entered numbers contain a pair which adds up to " + k + ".");
}
else
{
System.out.println("The entered numbers don't contain a pair which adds up to " + k + ".");
}
}
} |
<filename>sudokuSolver.cpp
#include <iostream>
#include <cstdlib>
#include <cstdio>
#include <cstring>
#include <cmath>
using namespace std;
bool populateBoard(signed char* board, int* mask, string input);
short checkHorizontal(signed char* board, char pos);
short checkVertical(signed char* board, char pos);
short check3x3Region(signed char* board, char pos);
signed char getNumberFromBitmask(short mask);
void setValue(int* mask, signed char* board, char value, char pos);
void removeValue(int* mask,signed char* board, char pos);
void printBoard(signed char* board);
void printBitmask(char size, int value);
signed char* board;
int* mask;
enum NUMBERS
{
N1 = 1,
N2 = 2,
N3 = 4,
N4 = 8,
N5 = 16,
N6 = 32,
N7 = 64,
N8 = 128,
N9 = 256
};
const int LOCKED = 0x80000000;
enum ORIENTATION
{
HORIZONTAL = 0,
VERTICAL = 9,
REGION3X3 = 18
};
int main(int argc, char* argv[])
{
board = new signed char[81];
mask = new int[81];
string input;
for(int i = 0; i < argc; i++)
{
printf("Parsing arg: %s\n",argv[i]);
if(argv[i][1] == 'b')
{
input = argv[i+1];
printf("Parsing arg: %s\n",argv[i+1]);
}
}
if (!(populateBoard(board,mask, input)))
{
printf("Invalid input.\n");
return 0;
}
printBoard(board);
//analyze board for numbers that are safe to place down
for(int i = 0; i < 81; i++)
{
//skip checks for numbers already on the board
if(board[i] > 0)
continue;
//retrieve bitmasks for current position
short numbersVert = (short)((mask[i] >> VERTICAL)&0x000001FF);
short numbersHori = (short)((mask[i] >> HORIZONTAL)&0x000001FF);
short numbers3x3R = (short)((mask[i] >> REGION3X3)&0x000001FF);
short fullCheck = numbersVert|numbersHori|numbers3x3R;
short fullCheckInv = ~fullCheck & 0x01FF;
//check if current placement have only one number to choose
if( (fullCheckInv == N1)||
(fullCheckInv == N2)||
(fullCheckInv == N3)||
(fullCheckInv == N4)||
(fullCheckInv == N5)||
(fullCheckInv == N6)||
(fullCheckInv == N7)||
(fullCheckInv == N8)||
(fullCheckInv == N9))
{
//place down found number and lock it
signed char val = getNumberFromBitmask(fullCheckInv);
setValue(mask,board,val ,i);
mask[i] = mask[i]|LOCKED;
//printout
printf("Placing %d in pos %d\t | ",board[i],i);
printBitmask(9,numbers3x3R);printf(" | ");
printBitmask(9,numbersHori);printf(" | ");
printBitmask(9,numbersVert);printf(" | ");
printBitmask(9,fullCheck);
printf("\n");
//start analyzing from first square after a new number has been placed down
i = -1;
}
}
printBoard(board);
char iteration = 1;
for(int n = 0; n < 81; n = n+iteration)
{
//locked numbers can be skipped, just make sure to not skip outside of board
if((mask[n]&LOCKED) == LOCKED)
{
if((iteration == -1) && (n == 0))
iteration = 1;
continue;
}
//if a 9 is found when reversing it can be removed and we can continue the reverse move
if((board[n] == 9) && (iteration == -1))
{
removeValue(mask, board,n);
continue;
}
//take the number in the square and iterate it once
for(signed char number = board[n]+1; number <= 9; number++)
{
//if reversing, remove the current number from the square
if(iteration == -1)
removeValue(mask, board, n);
//get the proper bitpattern for current number
NUMBERS flag = (NUMBERS)pow(2,number-1);
//check that current number can fit in square
short numbersVert = (short)((mask[n] >> VERTICAL)&0x000001FF);
short numbersHori = (short)((mask[n] >> HORIZONTAL)&0x000001FF);
short numbers3x3R = (short)((mask[n] >> REGION3X3)&0x000001FF);
short fullCheck = numbersVert|numbersHori|numbers3x3R;
if((fullCheck&flag) != flag)
{
//the number fits, place it, update bitmasks and start iterate forward again
setValue(mask, board, number, n);
iteration = 1;
break;
}
//no numbers fit and we need to continue to reverse
if(number == 9)
{
//printBoard(board);
removeValue(mask, board,n);
iteration = -1;
}
}
}
printBoard(board);
}
bool populateBoard(signed char* board, int* mask, string input)
{
//initalize arrays
for(int n = 0; n < 81; n++)
{
mask[n] = 0;
board[n] = 0;
}
//enter input values into board and mask arrays
for(int i = 0; i < 81; i++)
{
printf("%c ",input[i]);
//empty values are skipped
if(input[i] == '_'|| input[i] == '.')
continue;
//convert string numbers to integer type
char digit = input.at(i);
signed char converted = 0;
converted = atoi(&digit);
printf("%d\n",converted);
if(converted == NULL)
return false;
else
{
setValue(mask, board, converted,i);
mask[i] = mask[i]|LOCKED;
}
}
return true;
}
void printBoard(signed char* board)
{
printf("\n");
for(int y = 0; y < 9; y++)
{
printf("- - - - - - - - - - - - - - - - - - -\n");
for(int x = 0; x < 9; x++)
{
if(board[x+y*9] > 0)
printf("| %d ",board[x+y*9]);
else
printf("| ");
}
printf("|\n");
}
printf("- - - - - - - - - - - - - - - - - - -\n");
}
signed char getNumberFromBitmask(short mask)
{
signed char count = 0;
while(mask != 0)
{
mask = mask >> 1;
count++;
}
return count;
}
void printBitmask(char size, int value)
{
int mask = 0;
int count = 1;
while(count <= size)
{
mask = (value >> (size-count))&(int)1;
printf("%d",mask);
count++;
}
}
void setValue(int* mask, signed char* board, char value, char pos)
{
int valueMask = (int)pow(2,value-1);
board[pos] = value;
char firstPos = pos - pos%9;
char lastPos = firstPos + 9;
for(int i = firstPos; i < lastPos; i++)
mask[i] = mask[i]|(valueMask << HORIZONTAL);
firstPos = pos%9;
lastPos = firstPos + 72;
for(int n = firstPos; n <= lastPos; n = n+9)
mask[n] = mask[n]|(valueMask << VERTICAL);
firstPos = pos - pos%3 - (((pos - pos%3)/9)%3)*9;
lastPos = firstPos + 3*9;
for(int t = firstPos; t < lastPos; t+=9)
{
mask[t] = mask[t]|(valueMask << REGION3X3);
mask[t+1] = mask[t+1]|(valueMask << REGION3X3);
mask[t+2] = mask[t+2]|(valueMask << REGION3X3);
}
}
void removeValue(int* mask, signed char* board, char pos)
{
signed char value = board[pos];
board[pos] = 0;
int valueMask = (int)pow(2,value-1);
char firstPos = pos - pos%9;
char lastPos = firstPos + 9;
for(int i = firstPos; i < lastPos; i++)
mask[i] = mask[i]^(valueMask << HORIZONTAL);
firstPos = pos%9;
lastPos = firstPos + 72;
for(int n = firstPos; n <= lastPos; n = n+9)
mask[n] = mask[n]^(valueMask << VERTICAL);
firstPos = pos - pos%3 - (((pos - pos%3)/9)%3)*9;
lastPos = firstPos + 3*9;
for(int t = firstPos; t < lastPos; t+=9)
{
mask[t] = mask[t]^(valueMask << REGION3X3);
mask[t+1] = mask[t+1]^(valueMask << REGION3X3);
mask[t+2] = mask[t+2]^(valueMask << REGION3X3);
}
}
//--------------------DEPRECATED------------------------\\
short checkHorizontal(signed char* board, char pos)
{
short numbers = 0;
char firstPos = pos - pos%9;
char lastPos = firstPos + 9;
for(int i = firstPos; i < lastPos; i++)
if(board[i] > 0)
{
short number = (short)pow(2,board[i]-1);
//printf("Number found horizontal: %d\n",number);
numbers = numbers | number;
}
return numbers;
}
short checkVertical(signed char* board, char pos)
{
short numbers = 0;
char firstPos = pos%9;
char lastPos = firstPos + 72;
for(int i = firstPos; i <= lastPos; i = i+9)
if(board[i] > 0)
{
short number = (short)pow(2,board[i]-1);
//printf("Number found vertical: %d\n",number);
numbers = numbers | number;
}
return numbers;
}
short check3x3Region(signed char* board, char pos)
{
short numbers = 0;
int firstPos = pos - pos%3 - (((pos - pos%3)/9)%3)*9;
int lastPos = firstPos + 3*9;
for(int i = firstPos; i < lastPos; i+=9)
{
if(board[i] > 0)
{
short number = (short)pow(2,board[i]-1);
//printf("Number found 3x3: %d\n",number);
numbers = numbers | number;
}
if(board[i+1] > 0)
{
short number = (short)pow(2,board[i+1]-1);
//printf("Number found 3x3: %d\n",number);
numbers = numbers | number;
}
if(board[i+2] > 0)
{
short number = (short)pow(2,board[i+2]-1);
//printf("Number found 3x3: %d\n",number);
numbers = numbers | number;
}
}
return numbers;
} |
package com.intercpter;
public class Log {
public void before(){
System.out.println("Log start !");
}
}
|
<gh_stars>0
package com.jiulong.eureka.service.impl;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixProperty;
import org.springframework.stereotype.Component;
import java.util.Map;
/**
* 测试熔断
*
*/
@Component
public class StoreIntegration {
@HystrixCommand(fallbackMethod = "defaultStores", commandProperties = {
@HystrixProperty(name="execution.isolation.strategy", value="SEMAPHORE")
})
public String getStores(Map<String, Object> parameters) {
new RuntimeException("runtime exception");
return "getStores";
}
public String defaultStores(Map<String, Object> parameters) {
return "defaultStores";
}
} |
<gh_stars>1-10
call log('view_medgen_uid', 'MedGenUID:ConceptID');
select ' MedGen PubMed table is 80M+ rows, this will take a few minutes ' as fyi;
drop table if exists view_medgen_uid;
CREATE TABLE view_medgen_uid
select distinct UID as MedGenUID,
CUI as ConceptID from medgen_pubmed;
call utf8_unicode('view_medgen_uid');
call create_index('view_medgen_uid','MedGenUID');
call create_index('view_medgen_uid','ConceptID');
-- ###################################################
call log('view_medgen_uid', 'end');
|
<filename>test/desktopbrowsers.test.js
describe("desktopbrowsers",function(){
var $item;
var item;
before(function(){
$(document.body).append("<div id='desktopbrowsers'></div>");
$item=$("#desktopbrowsers");
item=$item.get(0);
});
function fakeMouseEvent(evt,target,x,y){
var mousedownEvent = document.createEvent ("MouseEvent");
mousedownEvent.initMouseEvent (evt, true, true, window, 0,
x,y,x,y,
0,0,0,0,
0, null);
target.dispatchEvent (mousedownEvent);
}
after(function(){
$("#desktopbrowsers").remove();
})
it("mouse click should trigger a touchstart",function(done){
$item.one("touchstart",function(){done();});
fakeMouseEvent("mousedown",item,1,1);
});
it("should trigger touchmove from mousemove",function(done){
$item.one("touchmove",function(){
done();
});
fakeMouseEvent("mousemove",item,2,2);
});
it("should NOT trigger touchmove from mousemove with no movement",function(done){
var bad=true;
$item.one("touchmove",function(){
bad=false;
});
setTimeout(function(){
expect(bad).to.be.true
done();
},100);
fakeMouseEvent("mousemove",item,1,1);
});
it("should trigger touchend from mouseup event",function(done){
$item.one("touchend",function(){done();});
fakeMouseEvent("mouseup",item,1,1);
});
it("should NOT trigger touchmove from mousemove with no mousedown",function(done){
var bad=true;
$item.one("touchmove",function(){
bad=false;
});
setTimeout(function(){
expect(bad).to.be.true
done();
},100);
fakeMouseEvent("mousemove",item,1,1);
});
}); |
<reponame>Gisson/jkargs
import ist.meic.pa.annotations.KeywordArgs;
public class KeyVisited extends KeyPlaces {
int visited;
@KeywordArgs("visited=0,second")
public KeyVisited(Object... args) {}
public String toString() {
return String.format("visited: %s, places: %s, %s, %s",
visited, first, second, third);
}
}
|
// To check if a library is compiled with CocoaPods you
// can use the `COCOAPODS` macro definition which is
// defined in the xcconfigs so it is available in
// headers also when they are imported in the client
// project.
// AFNetworking
#define COCOAPODS_POD_AVAILABLE_AFNetworking
#define COCOAPODS_VERSION_MAJOR_AFNetworking 2
#define COCOAPODS_VERSION_MINOR_AFNetworking 5
#define COCOAPODS_VERSION_PATCH_AFNetworking 4
// AFNetworking/NSURLConnection
#define COCOAPODS_POD_AVAILABLE_AFNetworking_NSURLConnection
#define COCOAPODS_VERSION_MAJOR_AFNetworking_NSURLConnection 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_NSURLConnection 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_NSURLConnection 4
// AFNetworking/NSURLSession
#define COCOAPODS_POD_AVAILABLE_AFNetworking_NSURLSession
#define COCOAPODS_VERSION_MAJOR_AFNetworking_NSURLSession 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_NSURLSession 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_NSURLSession 4
// AFNetworking/Reachability
#define COCOAPODS_POD_AVAILABLE_AFNetworking_Reachability
#define COCOAPODS_VERSION_MAJOR_AFNetworking_Reachability 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_Reachability 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_Reachability 4
// AFNetworking/Security
#define COCOAPODS_POD_AVAILABLE_AFNetworking_Security
#define COCOAPODS_VERSION_MAJOR_AFNetworking_Security 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_Security 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_Security 4
// AFNetworking/Serialization
#define COCOAPODS_POD_AVAILABLE_AFNetworking_Serialization
#define COCOAPODS_VERSION_MAJOR_AFNetworking_Serialization 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_Serialization 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_Serialization 4
// AFNetworking/UIKit
#define COCOAPODS_POD_AVAILABLE_AFNetworking_UIKit
#define COCOAPODS_VERSION_MAJOR_AFNetworking_UIKit 2
#define COCOAPODS_VERSION_MINOR_AFNetworking_UIKit 5
#define COCOAPODS_VERSION_PATCH_AFNetworking_UIKit 4
// Bolts/Tasks
#define COCOAPODS_POD_AVAILABLE_Bolts_Tasks
#define COCOAPODS_VERSION_MAJOR_Bolts_Tasks 1
#define COCOAPODS_VERSION_MINOR_Bolts_Tasks 2
#define COCOAPODS_VERSION_PATCH_Bolts_Tasks 1
// GUITabPagerViewController
#define COCOAPODS_POD_AVAILABLE_GUITabPagerViewController
#define COCOAPODS_VERSION_MAJOR_GUITabPagerViewController 0
#define COCOAPODS_VERSION_MINOR_GUITabPagerViewController 0
#define COCOAPODS_VERSION_PATCH_GUITabPagerViewController 7
// JTCalendar
#define COCOAPODS_POD_AVAILABLE_JTCalendar
#define COCOAPODS_VERSION_MAJOR_JTCalendar 2
#define COCOAPODS_VERSION_MINOR_JTCalendar 1
#define COCOAPODS_VERSION_PATCH_JTCalendar 1
// KVNProgress
#define COCOAPODS_POD_AVAILABLE_KVNProgress
#define COCOAPODS_VERSION_MAJOR_KVNProgress 2
#define COCOAPODS_VERSION_MINOR_KVNProgress 2
#define COCOAPODS_VERSION_PATCH_KVNProgress 1
// MBCalendarKit
#define COCOAPODS_POD_AVAILABLE_MBCalendarKit
#define COCOAPODS_VERSION_MAJOR_MBCalendarKit 3
#define COCOAPODS_VERSION_MINOR_MBCalendarKit 0
#define COCOAPODS_VERSION_PATCH_MBCalendarKit 4
// Parse
#define COCOAPODS_POD_AVAILABLE_Parse
#define COCOAPODS_VERSION_MAJOR_Parse 1
#define COCOAPODS_VERSION_MINOR_Parse 8
#define COCOAPODS_VERSION_PATCH_Parse 2
// RDImageViewerController
#define COCOAPODS_POD_AVAILABLE_RDImageViewerController
#define COCOAPODS_VERSION_MAJOR_RDImageViewerController 0
#define COCOAPODS_VERSION_MINOR_RDImageViewerController 3
#define COCOAPODS_VERSION_PATCH_RDImageViewerController 7
// SDWebImage
#define COCOAPODS_POD_AVAILABLE_SDWebImage
#define COCOAPODS_VERSION_MAJOR_SDWebImage 3
#define COCOAPODS_VERSION_MINOR_SDWebImage 7
#define COCOAPODS_VERSION_PATCH_SDWebImage 3
// SDWebImage/Core
#define COCOAPODS_POD_AVAILABLE_SDWebImage_Core
#define COCOAPODS_VERSION_MAJOR_SDWebImage_Core 3
#define COCOAPODS_VERSION_MINOR_SDWebImage_Core 7
#define COCOAPODS_VERSION_PATCH_SDWebImage_Core 3
// XCDYouTubeKit
#define COCOAPODS_POD_AVAILABLE_XCDYouTubeKit
#define COCOAPODS_VERSION_MAJOR_XCDYouTubeKit 2
#define COCOAPODS_VERSION_MINOR_XCDYouTubeKit 2
#define COCOAPODS_VERSION_PATCH_XCDYouTubeKit 0
|
def odd_elements(arr):
odd_arr = []
for num in arr:
if num % 2 != 0:
odd_arr.append(num)
return odd_arr
odd_elements([2, 4, 3, 5, 6, 7, 8, 9, 10]) |
<reponame>Eldius/minecraft-manager-go
package config
import (
"fmt"
"os"
"path/filepath"
"github.com/mitchellh/go-homedir"
"github.com/spf13/viper"
)
func init() {
_ = os.MkdirAll(GetWorkspaceFolder(), os.ModePerm)
}
/*
GetWorkspaceFolder returns the workspace folder
~/.minecraft-manager/workspace
*/
func GetWorkspaceFolder() string {
home, err := homedir.Dir()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
return filepath.Join(home, folderName, "workspace")
}
/*
GetAnsibleRoleGitURL returns the Git URL to
fetch the Ansible installation role
*/
func GetAnsibleRoleGitURL() string {
return viper.GetString("minemanager.ansible.role.git-url")
}
/*
GetMojangVersionsURL returns the URL to fetch versions
*/
func GetMojangVersionsURL() string {
return viper.GetString("minemanager.mojang.versions-url")
}
|
<filename>app/src/main/java/com/example/wesense_wearos/beans/Combine_u_ut.java
package com.example.wesense_wearos.beans;
public class Combine_u_ut {
private User u;
private User_Task ut;
public Combine_u_ut(User u, User_Task ut) {
this.u = u;
this.ut = ut;
}
public User getU() {
return u;
}
public User_Task getUt() {
return ut;
}
public void setU(User u) {
this.u = u;
}
public void setUt(User_Task ut) {
this.ut = ut;
}
@Override
public String toString() {
return "Combine_u_ut{" +
"u=" + u +
", ut=" + ut +
'}';
}
}
|
require 'net/http'
require 'json'
# Make an API request
uri = URI('http://example.com/api/endpoint')
response = Net::HTTP.get(uri)
# Parse the response
res = JSON.parse(response)
# Iterate through the objects
arr = res.map do |obj|
# Return each string from the data
obj['string']
end
puts arr |
<reponame>infamousSs/zod
package com.infamous.zod.ftp.model;
import java.util.Objects;
import lombok.Getter;
public class FTPUserName {
private @Getter final String m_username;
public FTPUserName(String userName) {
if (userName == null || userName.isEmpty()) {
throw new IllegalArgumentException("Username is mandatory");
}
if (userName.length() < 4) {
throw new IllegalArgumentException("Username must be at least 4 character");
}
for (int i = 0, len = userName.length(); i < len; i++) {
char c = userName.charAt(i);
if (c == '@') {
throw new IllegalArgumentException("Username can not contain '@'");
}
if (c == '/') {
throw new IllegalArgumentException("Username can not contain '/'");
}
if (c == '\\') {
throw new IllegalArgumentException("Username can not contain '\\");
}
}
this.m_username = userName;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FTPUserName that = (FTPUserName) o;
return Objects.equals(m_username, that.m_username);
}
@Override
public int hashCode() {
return Objects.hash(m_username);
}
}
|
// +build daemon
package utils
import (
"os"
"syscall"
)
// IsFileOwner checks whether the current user is the owner of the given file.
func IsFileOwner(f string) bool {
if fileInfo, err := os.Stat(f); err == nil && fileInfo != nil {
if stat, ok := fileInfo.Sys().(*syscall.Stat_t); ok && int(stat.Uid) == os.Getuid() {
return true
}
}
return false
}
|
#!/bin/bash
cargo build --target wasm32-unknown-unknown
wasm-pack build
#### hack !!!!
echo "---------------------"
echo " UGLY HACK ... (due to incorrect usage of webpack or wasm_bindgen?)"
echo "---------------------"
cd pkg
mv sandbox_bg.js sandbox_bg.js.tmp
echo "import {update_message} from '../index.js';" > tmp.txt
cat tmp.txt sandbox_bg.js.tmp > sandbox_bg.js
rm tmp.txt sandbox_bg.js.tmp
cd ..
|
JSONAPI.configure do |config|
# Keying
config.json_key_format = :camelized_key
# Pagination
config.default_paginator = :offset
config.default_page_size = 10
config.maximum_page_size = 20
# Caching
config.resource_cache = Rails.cache
# Metadata
config.top_level_meta_include_record_count = true
config.top_level_meta_record_count_key = :count
end
|
#include "global.h"
#include "LowLevelWindow_X11.h"
#include "RageLog.h"
#include "RageException.h"
#include "archutils/Unix/X11Helper.h"
#include "PrefsManager.h" // XXX
#include "RageDisplay.h" // VideoModeParams
#include "DisplayResolutions.h"
#include "LocalizedString.h"
#include "RageDisplay_OGL_Helpers.h"
using namespace RageDisplay_Legacy_Helpers;
using namespace X11Helper;
#include <stack>
#include <math.h> // ceil()
#define GLX_GLXEXT_PROTOTYPES
#include <GL/glx.h> // All sorts of stuff...
#include <X11/Xlib.h>
#include <X11/Xatom.h>
#include <X11/extensions/Xrandr.h>
#if defined(HAVE_LIBXTST)
#include <X11/extensions/XTest.h>
#endif
static GLXContext g_pContext = NULL;
static GLXContext g_pBackgroundContext = NULL;
static Window g_AltWindow = None;
static Rotation g_OldRotation;
static int g_iOldSize;
XRRScreenConfiguration *g_pScreenConfig = NULL;
static LocalizedString FAILED_CONNECTION_XSERVER( "LowLevelWindow_X11", "Failed to establish a connection with the X server" );
LowLevelWindow_X11::LowLevelWindow_X11()
{
if( !OpenXConnection() )
RageException::Throw( "%s", FAILED_CONNECTION_XSERVER.GetValue().c_str() );
const int iScreen = DefaultScreen( Dpy );
int iXServerVersion = XVendorRelease( Dpy ); /* eg. 40201001 */
int iMajor = iXServerVersion / 10000000; iXServerVersion %= 10000000;
int iMinor = iXServerVersion / 100000; iXServerVersion %= 100000;
int iRevision = iXServerVersion / 1000; iXServerVersion %= 1000;
int iPatch = iXServerVersion;
LOG->Info( "Display: %s (screen %i)", DisplayString(Dpy), iScreen );
LOG->Info( "X server vendor: %s [%i.%i.%i.%i]", XServerVendor( Dpy ), iMajor, iMinor, iRevision, iPatch );
LOG->Info( "Server GLX vendor: %s [%s]", glXQueryServerString( Dpy, iScreen, GLX_VENDOR ), glXQueryServerString( Dpy, iScreen, GLX_VERSION ) );
LOG->Info( "Client GLX vendor: %s [%s]", glXGetClientString( Dpy, GLX_VENDOR ), glXGetClientString( Dpy, GLX_VERSION ) );
m_bWasWindowed = true;
g_pScreenConfig = XRRGetScreenInfo( Dpy, RootWindow(Dpy, DefaultScreen(Dpy)) );
g_iOldSize = XRRConfigCurrentConfiguration( g_pScreenConfig, &g_OldRotation );
}
LowLevelWindow_X11::~LowLevelWindow_X11()
{
// Reset the display
if( !m_bWasWindowed )
{
XRRSetScreenConfig( Dpy, g_pScreenConfig, RootWindow(Dpy, DefaultScreen(Dpy)), g_iOldSize, g_OldRotation, CurrentTime );
XUngrabKeyboard( Dpy, CurrentTime );
}
if( g_pContext )
{
glXDestroyContext( Dpy, g_pContext );
g_pContext = NULL;
}
if( g_pBackgroundContext )
{
glXDestroyContext( Dpy, g_pBackgroundContext );
g_pBackgroundContext = NULL;
}
XRRFreeScreenConfigInfo( g_pScreenConfig );
g_pScreenConfig = NULL;
XDestroyWindow( Dpy, Win );
Win = None;
XDestroyWindow( Dpy, g_AltWindow );
g_AltWindow = None;
CloseXConnection();
}
void *LowLevelWindow_X11::GetProcAddress( RString s )
{
// XXX: We should check whether glXGetProcAddress or
// glXGetProcAddressARB is available/not NULL, and go by that,
// instead of assuming like this.
return (void*) glXGetProcAddressARB( (const GLubyte*) s.c_str() );
}
RString LowLevelWindow_X11::TryVideoMode( const VideoModeParams &p, bool &bNewDeviceOut )
{
#if defined(UNIX)
/* nVidia cards:
* This only works the first time we set up a window; after that, the
* drivers appear to cache the value, so you have to actually restart
* the program to change it again. */
static char buf[128];
strcpy( buf, "__GL_SYNC_TO_VBLANK=" );
strcat( buf, p.vsync?"1":"0" );
putenv( buf );
#endif
if( g_pContext == NULL || p.bpp != CurrentParams.bpp || m_bWasWindowed != p.windowed )
{
// Different depth, or we didn't make a window before. New context.
bNewDeviceOut = true;
int visAttribs[32];
int i = 0;
ASSERT( p.bpp == 16 || p.bpp == 32 );
if( p.bpp == 32 )
{
visAttribs[i++] = GLX_RED_SIZE; visAttribs[i++] = 8;
visAttribs[i++] = GLX_GREEN_SIZE; visAttribs[i++] = 8;
visAttribs[i++] = GLX_BLUE_SIZE; visAttribs[i++] = 8;
}
else
{
visAttribs[i++] = GLX_RED_SIZE; visAttribs[i++] = 5;
visAttribs[i++] = GLX_GREEN_SIZE; visAttribs[i++] = 6;
visAttribs[i++] = GLX_BLUE_SIZE; visAttribs[i++] = 5;
}
visAttribs[i++] = GLX_DEPTH_SIZE; visAttribs[i++] = 16;
visAttribs[i++] = GLX_RGBA;
visAttribs[i++] = GLX_DOUBLEBUFFER;
visAttribs[i++] = None;
XVisualInfo *xvi = glXChooseVisual( Dpy, DefaultScreen(Dpy), visAttribs );
if( xvi == NULL )
return "No visual available for that depth.";
// I get strange behavior if I add override redirect after creating the window.
// So, let's recreate the window when changing that state.
if( !MakeWindow(Win, xvi->screen, xvi->depth, xvi->visual, p.width, p.height, !p.windowed) )
return "Failed to create the window.";
if( !MakeWindow(g_AltWindow, xvi->screen, xvi->depth, xvi->visual, p.width, p.height, !p.windowed) )
FAIL_M( "Failed to create the alt window." ); // Should this be fatal?
char *szWindowTitle = const_cast<char *>( p.sWindowTitle.c_str() );
XChangeProperty( Dpy, Win, XA_WM_NAME, XA_STRING, 8, PropModeReplace,
reinterpret_cast<unsigned char*>(szWindowTitle), strlen(szWindowTitle) );
if( g_pContext )
glXDestroyContext( Dpy, g_pContext );
if( g_pBackgroundContext )
glXDestroyContext( Dpy, g_pBackgroundContext );
g_pContext = glXCreateContext( Dpy, xvi, NULL, True );
g_pBackgroundContext = glXCreateContext( Dpy, xvi, g_pContext, True );
glXMakeCurrent( Dpy, Win, g_pContext );
// Map the window, ensuring we get the MapNotify event
XWindowAttributes winAttrib;
XGetWindowAttributes( Dpy, Win, &winAttrib );
XSelectInput( Dpy, Win, winAttrib.your_event_mask | StructureNotifyMask );
XMapWindow( Dpy, Win );
// Wait until we actually have a mapped window before trying to
// use it!
XEvent event;
do
{
XNextEvent( Dpy, &event );
} while (event.type != MapNotify);
// Set the event mask back to what it was
XSelectInput( Dpy, Win, winAttrib.your_event_mask );
}
else
{
// We're remodeling the existing window, and not touching the context.
bNewDeviceOut = false;
}
if( !p.windowed )
{
if( m_bWasWindowed )
{
// If the user changed the resolution while StepMania was windowed we overwrite the resolution to restore with it at exit.
g_iOldSize = XRRConfigCurrentConfiguration( g_pScreenConfig, &g_OldRotation );
m_bWasWindowed = false;
}
// Find a matching mode.
int iSizesXct;
XRRScreenSize *pSizesX = XRRSizes( Dpy, DefaultScreen(Dpy), &iSizesXct );
ASSERT_M( iSizesXct != 0, "Couldn't get resolution list from X server" );
int iSizeMatch = -1;
for( int i = 0; i < iSizesXct; ++i )
{
if( pSizesX[i].width == p.width && pSizesX[i].height == p.height )
{
iSizeMatch = i;
break;
}
}
// Set this mode.
// XXX: This doesn't handle if the config has changed since we queried it (see man Xrandr)
XRRSetScreenConfig( Dpy, g_pScreenConfig, RootWindow(Dpy, DefaultScreen(Dpy)), iSizeMatch, 1, CurrentTime );
XRaiseWindow( Dpy, Win );
// We want to prevent the WM from catching anything that comes from the keyboard.
// We should do this every time on fullscreen and not only we entering from windowed mode because we could lose focus at resolution change and that will leave the user input locked.
XGrabKeyboard( Dpy, Win, True, GrabModeAsync, GrabModeAsync, CurrentTime );
}
else
{
if( !m_bWasWindowed )
{
XRRSetScreenConfig( Dpy, g_pScreenConfig, RootWindow(Dpy, DefaultScreen(Dpy)), g_iOldSize, g_OldRotation, CurrentTime );
// In windowed mode, we actually want the WM to function normally.
// Release any previous grab.
XUngrabKeyboard( Dpy, CurrentTime );
m_bWasWindowed = true;
}
}
// NOTE: nVidia's implementation of this is broken by default.
// The only ways around this are mucking with xorg.conf or querying
// nvidia-settings with "$ nvidia-settings -t -q RefreshRate".
int rate = XRRConfigCurrentRate( g_pScreenConfig );
// Make a window fixed size, don't let resize it or maximize it.
// Do this before resizing the window so that pane-style WMs (Ion,
// ratpoison) don't resize us back inappropriately.
{
XSizeHints hints;
hints.flags = PMinSize|PMaxSize|PWinGravity;
hints.min_width = hints.max_width = p.width;
hints.min_height = hints.max_height = p.height;
hints.win_gravity = CenterGravity;
XSetWMNormalHints( Dpy, Win, &hints );
}
/* Workaround for metacity and compiz: if the window have the same
* resolution or higher than the screen, it gets automaximized even
* when the window is set to not let it happen. This happens when
* changing from fullscreen to window mode and our screen resolution
* is bigger. */
{
XEvent xev;
Atom wm_state = XInternAtom(Dpy, "_NET_WM_STATE", False);
Atom maximized_vert = XInternAtom(Dpy, "_NET_WM_STATE_MAXIMIZED_VERT", False);
Atom maximized_horz = XInternAtom(Dpy, "_NET_WM_STATE_MAXIMIZED_HORZ", False);
memset(&xev, 0, sizeof(xev));
xev.type = ClientMessage;
xev.xclient.window = Win;
xev.xclient.message_type = wm_state;
xev.xclient.format = 32;
xev.xclient.data.l[0] = 1;
xev.xclient.data.l[1] = maximized_vert;
xev.xclient.data.l[2] = 0;
XSendEvent(Dpy, DefaultRootWindow(Dpy), False, SubstructureNotifyMask, &xev);
xev.xclient.data.l[1] = maximized_horz;
XSendEvent(Dpy, DefaultRootWindow(Dpy), False, SubstructureNotifyMask, &xev);
// This one is needed for compiz, if the window reaches out of bounds of the screen it becames destroyed, only the window, the program is left running.
// Commented out per the patch at http://ssc.ajworld.net/sm-ssc/bugtracker/view.php?id=398
//XMoveWindow( Dpy, Win, 0, 0 );
}
// Resize the window.
XResizeWindow( Dpy, Win, p.width, p.height );
CurrentParams = p;
CurrentParams.rate = rate;
return ""; // Success
}
void LowLevelWindow_X11::LogDebugInformation() const
{
LOG->Info( "Direct rendering: %s", glXIsDirect( Dpy, glXGetCurrentContext() )? "yes":"no" );
}
bool LowLevelWindow_X11::IsSoftwareRenderer( RString &sError )
{
if( glXIsDirect( Dpy, glXGetCurrentContext() ) )
return false;
sError = "Direct rendering is not available.";
return true;
}
void LowLevelWindow_X11::SwapBuffers()
{
glXSwapBuffers( Dpy, Win );
if( PREFSMAN->m_bDisableScreenSaver )
{
// Disable the screensaver.
#if defined(HAVE_LIBXTST)
// This causes flicker.
// XForceScreenSaver( Dpy, ScreenSaverReset );
/* Instead, send a null relative mouse motion, to trick X into thinking
* there has been user activity.
*
* This also handles XScreenSaver; XForceScreenSaver only handles the
* internal X11 screen blanker.
*
* This will delay the X blanker, DPMS and XScreenSaver from activating,
* and will disable the blanker and XScreenSaver if they're already active
* (unless XSS is locked). For some reason, it doesn't un-blank DPMS if
* it's already active.
*/
XLockDisplay( Dpy );
int event_base, error_base, major, minor;
if( XTestQueryExtension( Dpy, &event_base, &error_base, &major, &minor ) )
{
XTestFakeRelativeMotionEvent( Dpy, 0, 0, 0 );
XSync( Dpy, False );
}
XUnlockDisplay( Dpy );
#endif
}
}
void LowLevelWindow_X11::GetDisplayResolutions( DisplayResolutions &out ) const
{
int iSizesXct;
XRRScreenSize *pSizesX = XRRSizes( Dpy, DefaultScreen( Dpy ), &iSizesXct );
ASSERT_M( iSizesXct != 0, "Couldn't get resolution list from X server" );
for( int i = 0; i < iSizesXct; ++i )
{
DisplayResolution res = { pSizesX[i].width, pSizesX[i].height, true };
out.insert( res );
}
}
bool LowLevelWindow_X11::SupportsThreadedRendering()
{
return g_pBackgroundContext != NULL;
}
class RenderTarget_X11: public RenderTarget
{
public:
RenderTarget_X11( LowLevelWindow_X11 *pWind );
~RenderTarget_X11();
void Create( const RenderTargetParam ¶m, int &iTextureWidthOut, int &iTextureHeightOut );
unsigned GetTexture() const { return m_iTexHandle; }
void StartRenderingTo();
void FinishRenderingTo();
// Copying from the Pbuffer to the texture flips Y.
virtual bool InvertY() const { return true; }
private:
int m_iWidth, m_iHeight;
LowLevelWindow_X11 *m_pWind;
GLXPbuffer m_iPbuffer;
GLXContext m_pPbufferContext;
unsigned int m_iTexHandle;
GLXContext m_pOldContext;
GLXDrawable m_pOldDrawable;
};
RenderTarget_X11::RenderTarget_X11( LowLevelWindow_X11 *pWind )
{
m_pWind = pWind;
m_iPbuffer = 0;
m_pPbufferContext = NULL;
m_iTexHandle = 0;
m_pOldContext = NULL;
m_pOldDrawable = 0;
}
RenderTarget_X11::~RenderTarget_X11()
{
if( m_pPbufferContext )
glXDestroyContext( Dpy, m_pPbufferContext );
if( m_iPbuffer )
glXDestroyPbuffer( Dpy, m_iPbuffer );
if( m_iTexHandle )
glDeleteTextures( 1, reinterpret_cast<GLuint*>(&m_iTexHandle) );
}
/* Note that although the texture size may need to be a power of 2,
* the Pbuffer does not. */
void RenderTarget_X11::Create( const RenderTargetParam ¶m, int &iTextureWidthOut, int &iTextureHeightOut )
{
//ASSERT( param.iWidth == power_of_two(param.iWidth) && param.iHeight == power_of_two(param.iHeight) );
m_iWidth = param.iWidth;
m_iHeight = param.iHeight;
/* NOTE: int casts on GLX_DONT_CARE are for -Werror=narrowing */
int pConfigAttribs[] =
{
GLX_DRAWABLE_TYPE, GLX_PBUFFER_BIT,
GLX_RENDER_TYPE, GLX_RGBA_BIT,
GLX_RED_SIZE, 8,
GLX_GREEN_SIZE, 8,
GLX_BLUE_SIZE, 8,
GLX_ALPHA_SIZE, param.bWithAlpha? 8: (int) GLX_DONT_CARE,
GLX_DOUBLEBUFFER, False,
GLX_DEPTH_SIZE, param.bWithDepthBuffer? 16: (int) GLX_DONT_CARE,
None
};
int iConfigs;
GLXFBConfig *pConfigs = glXChooseFBConfig( Dpy, DefaultScreen(Dpy), pConfigAttribs, &iConfigs );
ASSERT( pConfigs );
const int pPbufferAttribs[] =
{
GLX_PBUFFER_WIDTH, param.iWidth,
GLX_PBUFFER_HEIGHT, param.iHeight,
None
};
for( int i = 0; i < iConfigs; ++i )
{
m_iPbuffer = glXCreatePbuffer( Dpy, pConfigs[i], pPbufferAttribs );
if( m_iPbuffer == 0 )
continue;
XVisualInfo *pVisual = glXGetVisualFromFBConfig( Dpy, pConfigs[i] );
m_pPbufferContext = glXCreateContext( Dpy, pVisual, g_pContext, True );
ASSERT( m_pPbufferContext );
XFree( pVisual );
break;
}
ASSERT( m_iPbuffer );
// allocate OpenGL texture resource
glGenTextures( 1, reinterpret_cast<GLuint*>(&m_iTexHandle) );
glBindTexture( GL_TEXTURE_2D, m_iTexHandle );
LOG->Trace( "n %i, %ix%i", m_iTexHandle, param.iWidth, param.iHeight );
while( glGetError() != GL_NO_ERROR )
;
int iTextureWidth = power_of_two( param.iWidth );
int iTextureHeight = power_of_two( param.iHeight );
iTextureWidthOut = iTextureWidth;
iTextureHeightOut = iTextureHeight;
glTexImage2D( GL_TEXTURE_2D, 0, param.bWithAlpha? GL_RGBA8:GL_RGB8,
iTextureWidth, iTextureHeight, 0, param.bWithAlpha? GL_RGBA:GL_RGB, GL_UNSIGNED_BYTE, NULL );
GLenum error = glGetError();
ASSERT_M( error == GL_NO_ERROR, GLToString(error) );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
}
void RenderTarget_X11::StartRenderingTo()
{
m_pOldContext = glXGetCurrentContext();
m_pOldDrawable = glXGetCurrentDrawable();
glXMakeCurrent( Dpy, m_iPbuffer, m_pPbufferContext );
glViewport( 0, 0, m_iWidth, m_iHeight );
}
void RenderTarget_X11::FinishRenderingTo()
{
glFlush();
glBindTexture( GL_TEXTURE_2D, m_iTexHandle );
while( glGetError() != GL_NO_ERROR )
;
glCopyTexSubImage2D( GL_TEXTURE_2D, 0, 0, 0, 0, 0, m_iWidth, m_iHeight );
GLenum error = glGetError();
ASSERT_M( error == GL_NO_ERROR, GLToString(error) );
glBindTexture( GL_TEXTURE_2D, 0 );
glXMakeCurrent( Dpy, m_pOldDrawable, m_pOldContext );
m_pOldContext = NULL;
m_pOldDrawable = 0;
}
bool LowLevelWindow_X11::SupportsRenderToTexture() const
{
// Server must support pbuffers:
const int iScreen = DefaultScreen( Dpy );
float fVersion = strtof( glXQueryServerString(Dpy, iScreen, GLX_VERSION), NULL );
if( fVersion < 1.3f )
return false;
return true;
}
RenderTarget *LowLevelWindow_X11::CreateRenderTarget()
{
return new RenderTarget_X11( this );
}
void LowLevelWindow_X11::BeginConcurrentRenderingMainThread()
{
/* Move the main thread, which is going to be loading textures, etc.
* but not rendering, to an undisplayed window. This results in
* smoother rendering. */
bool b = glXMakeCurrent( Dpy, g_AltWindow, g_pContext );
ASSERT(b);
}
void LowLevelWindow_X11::EndConcurrentRenderingMainThread()
{
bool b = glXMakeCurrent( Dpy, Win, g_pContext );
ASSERT(b);
}
void LowLevelWindow_X11::BeginConcurrentRendering()
{
bool b = glXMakeCurrent( Dpy, Win, g_pBackgroundContext );
ASSERT(b);
}
void LowLevelWindow_X11::EndConcurrentRendering()
{
bool b = glXMakeCurrent( Dpy, None, NULL );
ASSERT(b);
}
/*
* (c) 2005 <NAME>
* All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, and/or sell copies of the Software, and to permit persons to
* whom the Software is furnished to do so, provided that the above
* copyright notice(s) and this permission notice appear in all copies of
* the Software and that both the above copyright notice(s) and this
* permission notice appear in supporting documentation.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
* THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS
* INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT
* OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
* OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
* OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*/
|
<reponame>wnbx/snail
package com.acgist.snail.downloader.http;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import org.junit.jupiter.api.Test;
import com.acgist.snail.context.ProtocolContext;
import com.acgist.snail.context.exception.DownloadException;
import com.acgist.snail.protocol.http.HttpProtocol;
import com.acgist.snail.utils.FileUtils;
import com.acgist.snail.utils.Performance;
public class HttpDownloaderTest extends Performance {
@Test
public void testHttpDownloaderBuild() throws DownloadException {
final String url = "https://mirrors.bfsu.edu.cn/apache/tomcat/tomcat-9/v9.0.41/bin/apache-tomcat-9.0.41.zip";
ProtocolContext.getInstance().register(HttpProtocol.getInstance()).available(true);
final var taskSession = HttpProtocol.getInstance().buildTaskSession(url);
final var downloader = taskSession.buildDownloader();
// downloader.run(); // 不下载
assertNotNull(downloader);
// taskSession.delete();
}
@Test
public void testHttpDownloader() throws DownloadException {
if(SKIP_COSTED) {
this.log("跳过testHttpDownloader测试");
return;
}
final String url = "https://mirrors.bfsu.edu.cn/apache/tomcat/tomcat-9/v9.0.41/bin/apache-tomcat-9.0.41.zip";
ProtocolContext.getInstance().register(HttpProtocol.getInstance()).available(true);
final var taskSession = HttpProtocol.getInstance().buildTaskSession(url);
final var downloader = taskSession.buildDownloader();
downloader.run();
final var file = new File(taskSession.getFile());
assertTrue(file.exists());
FileUtils.delete(taskSession.getFile());
taskSession.delete();
}
}
|
package quarksjob
import (
"context"
"fmt"
"path/filepath"
"github.com/pkg/errors"
batchv1 "k8s.io/api/batch/v1"
corev1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
crc "sigs.k8s.io/controller-runtime/pkg/client"
qjv1a1 "code.cloudfoundry.org/quarks-job/pkg/kube/apis/quarksjob/v1alpha1"
"code.cloudfoundry.org/quarks-utils/pkg/config"
"code.cloudfoundry.org/quarks-utils/pkg/ctxlog"
"code.cloudfoundry.org/quarks-utils/pkg/names"
"code.cloudfoundry.org/quarks-utils/pkg/podref"
vss "code.cloudfoundry.org/quarks-utils/pkg/versionedsecretstore"
)
const (
mountPath = "/mnt/quarks/"
// EnvNamespace is the namespace in which the jobs run, used by
// persist-output to create the secrets
EnvNamespace = "NAMESPACE"
)
type setOwnerReferenceFunc func(owner, object metav1.Object, scheme *runtime.Scheme) error
// NewJobCreator returns a new job creator
func NewJobCreator(client crc.Client, scheme *runtime.Scheme, f setOwnerReferenceFunc, config *config.Config, store vss.VersionedSecretStore) JobCreator {
return jobCreatorImpl{
client: client,
scheme: scheme,
setOwnerReference: f,
config: config,
store: store,
}
}
// JobCreator is the interface that wraps the basic Create method.
type JobCreator interface {
Create(ctx context.Context, qJob qjv1a1.QuarksJob) (retry bool, err error)
}
type jobCreatorImpl struct {
client crc.Client
scheme *runtime.Scheme
setOwnerReference setOwnerReferenceFunc
config *config.Config
store vss.VersionedSecretStore
}
// Create satisfies the JobCreator interface. It creates a Job to complete ExJob. It returns the
// retry if one of the references are not present.
func (j jobCreatorImpl) Create(ctx context.Context, qJob qjv1a1.QuarksJob) (bool, error) {
namespace := qJob.Namespace
template := qJob.Spec.Template.DeepCopy()
serviceAccount, err := j.getServiceAccountName(ctx, namespace)
if err != nil {
return false, err
}
serviceAccountVolume, serviceAccountVolumeMount, err := j.serviceAccountMount(ctx, namespace, serviceAccount)
if err != nil {
return false, err
}
// Set serviceaccount to the container
template.Spec.Template.Spec.Volumes = append(template.Spec.Template.Spec.Volumes, *serviceAccountVolume)
ctxlog.Debugf(ctx, "Add persist output container, using DOCKER_IMAGE_TAG=%s", config.GetOperatorDockerImage())
// Create a container for persisting output
outputPersistContainer := corev1.Container{
Name: "output-persist",
Image: config.GetOperatorDockerImage(),
ImagePullPolicy: config.GetOperatorImagePullPolicy(),
Args: []string{"persist-output"},
Env: []corev1.EnvVar{
{
Name: EnvNamespace,
Value: namespace,
},
},
VolumeMounts: []corev1.VolumeMount{*serviceAccountVolumeMount},
}
// Loop through containers and add quarks logging volume specs.
for containerIndex, container := range template.Spec.Template.Spec.Containers {
// Add pod volume specs to the pod
podVolumeSpec := corev1.Volume{
Name: names.Sanitize(fmt.Sprintf("%s%s", "output-", container.Name)),
VolumeSource: corev1.VolumeSource{EmptyDir: &corev1.EmptyDirVolumeSource{}},
}
template.Spec.Template.Spec.Volumes = append(template.Spec.Template.Spec.Volumes, podVolumeSpec)
// Add container volume specs to container
containerVolumeMountSpec := corev1.VolumeMount{
Name: names.Sanitize(fmt.Sprintf("%s%s", "output-", container.Name)),
MountPath: mountPath,
}
template.Spec.Template.Spec.Containers[containerIndex].VolumeMounts = append(template.Spec.Template.Spec.Containers[containerIndex].VolumeMounts, containerVolumeMountSpec)
// Add container volume spec to output persist container
containerVolumeMountSpec.MountPath = filepath.Join(mountPath, container.Name)
outputPersistContainer.VolumeMounts = append(outputPersistContainer.VolumeMounts, containerVolumeMountSpec)
}
// Add output persist container to the pod template
template.Spec.Template.Spec.Containers = append(template.Spec.Template.Spec.Containers, outputPersistContainer)
if template.Spec.Template.Labels == nil {
template.Spec.Template.Labels = map[string]string{}
}
template.Spec.Template.Labels[qjv1a1.LabelQJobName] = qJob.Name
if err := j.store.SetSecretReferences(ctx, qJob.Namespace, &template.Spec.Template.Spec); err != nil {
return false, err
}
// Validate quarks job configmap and secrets references
err = j.validateReferences(ctx, qJob)
if err != nil {
if apierrors.IsNotFound(err) {
// Requeue the job without error.
return true, nil
}
return false, err
}
// Create k8s job
name, err := names.JobName(qJob.Name)
if err != nil {
return false, errors.Wrapf(err, "could not generate job name for qJob '%s'", qJob.GetNamespacedName())
}
job := &batchv1.Job{
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: qJob.Namespace,
Labels: map[string]string{qjv1a1.LabelQJobName: qJob.Name},
},
Spec: template.Spec,
}
if err := j.setOwnerReference(&qJob, job, j.scheme); err != nil {
return false, ctxlog.WithEvent(&qJob, "SetOwnerReferenceError").Errorf(ctx, "failed to set owner reference on job for '%s': %s", qJob.GetNamespacedName(), err)
}
if err := j.client.Create(ctx, job); err != nil {
if apierrors.IsAlreadyExists(err) {
ctxlog.WithEvent(&qJob, "AlreadyRunning").Infof(ctx, "Skip '%s': already running", qJob.GetNamespacedName())
// Don't requeue the job.
return false, nil
}
return true, err
}
return false, nil
}
func (j jobCreatorImpl) validateReferences(ctx context.Context, qJob qjv1a1.QuarksJob) error {
configMaps := podref.GetConfMapRefFromPod(qJob.Spec.Template.Spec.Template.Spec)
configMap := &corev1.ConfigMap{}
for configMapName := range configMaps {
if err := j.client.Get(ctx, crc.ObjectKey{Name: configMapName, Namespace: qJob.Namespace}, configMap); err != nil {
if apierrors.IsNotFound(err) {
ctxlog.Debugf(ctx, "Skip create job '%s' due to configMap '%s' not found", qJob.GetNamespacedName(), configMapName)
}
return err
}
}
secrets := podref.GetSecretRefFromPodSpec(qJob.Spec.Template.Spec.Template.Spec)
secret := &corev1.Secret{}
for secretName := range secrets {
if err := j.client.Get(ctx, crc.ObjectKey{Name: secretName, Namespace: qJob.Namespace}, secret); err != nil {
if apierrors.IsNotFound(err) {
ctxlog.Debugf(ctx, "Skip create job '%s' due to secret '%s' not found", qJob.GetNamespacedName(), secretName)
}
return err
}
}
return nil
}
|
<reponame>diegoperezl/cf4j
package es.upm.etsisi.cf4j.util.optimization;
import es.upm.etsisi.cf4j.data.DataModel;
import es.upm.etsisi.cf4j.qualityMeasure.QualityMeasure;
import es.upm.etsisi.cf4j.recommender.Recommender;
import org.apache.commons.math3.util.Pair;
import java.lang.reflect.InvocationTargetException;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.*;
/**
* Utility class to performs a grid search over a Recommender instance. The Recommender class used
* during the grid search must contains a constructor with the signature
* Recommender::<init>(DataModel, Map<String, Object>) that initializes the Recommender
* using the attributes defined in the Map object. The parameters used in the search process, i.e.
* the development set, must be defined in a ParamsGrid instance. The random search is executed in
* such a way that it minimizes (by default) or maximizes a QualityMeasure instance over the test
* set of the DataModel instance. If the QualityMeasure requires parameters to work, it must contains
* a constructor with the signature QualityMeasure::<init>(Recommender, Map<String, Object>)
* that initializes the QualityMeasure using the attributes defined in the Map object.
*/
public class GridSearch extends RandomSearch {
/**
* GridSearch constructor
*
* @param datamodel DataModel instance
* @param grid ParamsGrid instance containing the development set
* @param recommenderClass Recommender class to be evaluated
* @param qualityMeasureClass QualityMeasure class used to evaluate the Recommender
*/
public GridSearch(
DataModel datamodel,
ParamsGrid grid,
Class<? extends Recommender> recommenderClass,
Class<? extends QualityMeasure> qualityMeasureClass) {
super(datamodel, grid, recommenderClass, qualityMeasureClass, grid.getDevelopmentSetSize());
}
/**
* GridSearch constructor
*
* @param datamodel DataModel instance
* @param grid ParamsGrid instance containing the development set
* @param recommenderClass Recommender class to be evaluated. This class must contains a
* constructor with the signature Recommender.<init>(DataModel, Map<String,
* Object>)
* @param qualityMeasureClass QualityMeasure class used to evaluate the Recommender. This class
* must contains a constricutor with the signautre QualityMeasure.<init>(Recommender,
* Map<String, Object>)
* @param qualityMeasureParams Map object containing the quality measure parameters names (keys)
* and values (value)
*/
public GridSearch(
DataModel datamodel,
ParamsGrid grid,
Class<? extends Recommender> recommenderClass,
Class<? extends QualityMeasure> qualityMeasureClass,
Map<String, Object> qualityMeasureParams) {
super(datamodel, grid, recommenderClass, qualityMeasureClass, qualityMeasureParams, grid.getDevelopmentSetSize());
}
}
|
public class StringComparison {
public static void main(String[] args) {
String str1 = "Stackoverflow";
String str2 = "Overflow";
String result = "";
for (int i=0; i<str1.length(); i++) {
boolean found = false;
for (int j=0; j<str2.length(); j++) {
if (str1.charAt(i) == str2.charAt(j)) {
found = true;
break;
}
}
if (!found) {
result += str1.charAt(i);
}
}
System.out.println(result);
}
} |
<filename>client/nuxt-web/mi/node_modules/videojs-contrib-media-sources/test/html.test.js
import document from 'global/document';
import window from 'global/window';
import QUnit from 'qunit';
import sinon from 'sinon';
import videojs from 'video.js';
import HtmlMediaSource from '../src/html-media-source';
import {
gopsSafeToAlignWith,
updateGopBuffer,
removeGopBuffer
} from '../src/virtual-source-buffer';
// we disable this because browserify needs to include these files
// but the exports are not important
/* eslint-disable no-unused-vars */
import {MediaSource, URL} from '../src/videojs-contrib-media-sources.js';
/* eslint-disable no-unused-vars */
QUnit.module('videojs-contrib-media-sources - HTML', {
beforeEach() {
this.fixture = document.getElementById('qunit-fixture');
this.video = document.createElement('video');
this.fixture.appendChild(this.video);
this.source = document.createElement('source');
this.player = videojs(this.video);
// add a fake source so that we can get this.player_ on sourceopen
this.url = 'fake.ts';
this.source.src = this.url;
this.video.appendChild(this.source);
// Mock the environment's timers because certain things - particularly
// player readiness - are asynchronous in video.js 5.
this.clock = sinon.useFakeTimers();
this.oldMediaSource = window.MediaSource || window.WebKitMediaSource;
window.MediaSource = videojs.extend(videojs.EventTarget, {
constructor() {
this.isNative = true;
this.sourceBuffers = [];
this.duration = NaN;
},
addSourceBuffer(type) {
let buffer = new (videojs.extend(videojs.EventTarget, {
type,
appendBuffer() {}
}))();
this.sourceBuffers.push(buffer);
return buffer;
}
});
window.MediaSource.isTypeSupported = function(mime) {
return true;
};
window.WebKitMediaSource = window.MediaSource;
},
afterEach() {
this.clock.restore();
this.player.dispose();
window.MediaSource = this.oldMediaSource;
window.WebKitMediaSource = window.MediaSource;
}
});
QUnit.test('constructs a native MediaSource', function() {
QUnit.ok(
new videojs.MediaSource().nativeMediaSource_.isNative,
'constructed a MediaSource'
);
});
const createDataMessage = function(type, typedArray, extraObject) {
let message = {
data: {
action: 'data',
segment: {
type,
data: typedArray.buffer,
initSegment: {
data: typedArray.buffer,
byteOffset: typedArray.byteOffset,
byteLength: typedArray.byteLength
}
},
byteOffset: typedArray.byteOffset,
byteLength: typedArray.byteLength
}
};
return Object.keys(extraObject || {}).reduce(function(obj, key) {
obj.data.segment[key] = extraObject[key];
return obj;
}, message);
};
// Create a WebWorker-style message that signals the transmuxer is done
const doneMessage = {
data: {
action: 'done'
}
};
// send fake data to the transmuxer to trigger the creation of the
// native source buffers
const initializeNativeSourceBuffers = function(sourceBuffer) {
// initialize an audio source buffer
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
// initialize a video source buffer
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
// instruct the transmuxer to flush the "data" it has buffered so
// far
sourceBuffer.transmuxer_.onmessage(doneMessage);
};
QUnit.test('creates mp4 source buffers for mp2t segments', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.4d400d"',
'video buffer has the default codec'
);
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.2"',
'audio buffer has the default codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
QUnit.ok(sourceBuffer.transmuxer_, 'created a transmuxer');
});
QUnit.test(
'the terminate is called on the transmuxer when the media source is killed',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let terminates = 0;
sourceBuffer.transmuxer_ = {
terminate() {
terminates++;
}
};
mediaSource.trigger('sourceclose');
QUnit.equal(terminates, 1, 'called terminate on transmux web worker');
});
QUnit.test('duration is faked when playing a live stream', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
mediaSource.duration = Infinity;
mediaSource.nativeMediaSource_.duration = 100;
QUnit.equal(mediaSource.nativeMediaSource_.duration, 100,
'native duration was not set to infinity');
QUnit.equal(mediaSource.duration, Infinity,
'the MediaSource wrapper pretends it has an infinite duration');
});
QUnit.test(
'duration uses the underlying MediaSource\'s duration when not live', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
mediaSource.duration = 100;
mediaSource.nativeMediaSource_.duration = 120;
QUnit.equal(mediaSource.duration, 120,
'the MediaSource wrapper returns the native duration');
});
QUnit.test('abort on the fake source buffer calls abort on the real ones', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let messages = [];
let aborts = 0;
initializeNativeSourceBuffers(sourceBuffer);
sourceBuffer.transmuxer_.postMessage = function(message) {
messages.push(message);
};
sourceBuffer.bufferUpdating_ = true;
sourceBuffer.videoBuffer_.abort = function() {
aborts++;
};
sourceBuffer.audioBuffer_.abort = function() {
aborts++;
};
sourceBuffer.abort();
QUnit.equal(aborts, 2, 'called abort on both');
QUnit.equal(
sourceBuffer.bufferUpdating_,
false,
'set updating to false'
);
QUnit.equal(messages.length, 1, 'has one message');
QUnit.equal(messages[0].action, 'reset', 'reset called on transmuxer');
});
QUnit.test(
'calling remove deletes cues and invokes remove on any extant source buffers',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let removedCue = [];
let removes = 0;
initializeNativeSourceBuffers(sourceBuffer);
sourceBuffer.inbandTextTracks_ = {
CC1: {
removeCue(cue) {
removedCue.push(cue);
this.cues.splice(this.cues.indexOf(cue), 1);
},
cues: [
{startTime: 10, endTime: 20, text: 'delete me'},
{startTime: 0, endTime: 2, text: 'save me'}
]
}
};
mediaSource.videoBuffer_.remove = function(start, end) {
if (start === 3 && end === 10) {
removes++;
}
};
mediaSource.audioBuffer_.remove = function(start, end) {
if (start === 3 && end === 10) {
removes++;
}
};
sourceBuffer.remove(3, 10);
QUnit.equal(removes, 2, 'called remove on both sourceBuffers');
QUnit.equal(
sourceBuffer.inbandTextTracks_.CC1.cues.length,
1,
'one cue remains after remove'
);
QUnit.equal(
removedCue[0].text,
'delete me',
'the cue that overlapped the remove region was removed'
);
});
QUnit.test(
'calling remove property handles absence of cues (null)',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
initializeNativeSourceBuffers(sourceBuffer);
sourceBuffer.inbandTextTracks_ = {
CC1: {
cues: null
}
};
mediaSource.videoBuffer_.remove = function(start, end) {
// pass
};
mediaSource.audioBuffer_.remove = function(start, end) {
// pass
};
// this call should not raise an exception
sourceBuffer.remove(3, 10);
QUnit.equal(
sourceBuffer.inbandTextTracks_.CC1.cues,
null,
'cues are still null'
);
});
QUnit.test('removing doesn\'t happen with audio disabled', function() {
let mediaSource = new videojs.MediaSource();
let muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
// creating this audio buffer disables audio in the muxed one
let audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
let removedCue = [];
let removes = 0;
initializeNativeSourceBuffers(muxedBuffer);
muxedBuffer.inbandTextTracks_ = {
CC1: {
removeCue(cue) {
removedCue.push(cue);
this.cues.splice(this.cues.indexOf(cue), 1);
},
cues: [
{startTime: 10, endTime: 20, text: 'delete me'},
{startTime: 0, endTime: 2, text: 'save me'}
]
}
};
mediaSource.videoBuffer_.remove = function(start, end) {
if (start === 3 && end === 10) {
removes++;
}
};
mediaSource.audioBuffer_.remove = function(start, end) {
if (start === 3 && end === 10) {
removes++;
}
};
muxedBuffer.remove(3, 10);
QUnit.equal(removes, 1, 'called remove on only one source buffer');
QUnit.equal(muxedBuffer.inbandTextTracks_.CC1.cues.length,
1,
'one cue remains after remove');
QUnit.equal(removedCue[0].text,
'delete me',
'the cue that overlapped the remove region was removed');
});
QUnit.test('readyState delegates to the native implementation', function() {
let mediaSource = new HtmlMediaSource();
QUnit.equal(
mediaSource.readyState,
mediaSource.nativeMediaSource_.readyState,
'readyStates are equal'
);
mediaSource.nativeMediaSource_.readyState = 'nonsense stuff';
QUnit.equal(
mediaSource.readyState,
mediaSource.nativeMediaSource_.readyState,
'readyStates are equal'
);
});
QUnit.test('addSeekableRange_ throws an error for media with known duration', function() {
let mediaSource = new videojs.MediaSource();
mediaSource.duration = 100;
QUnit.throws(function() {
mediaSource.addSeekableRange_(0, 100);
}, 'cannot add seekable range');
});
QUnit.test('addSeekableRange_ adds to the native MediaSource duration', function() {
let mediaSource = new videojs.MediaSource();
mediaSource.duration = Infinity;
mediaSource.addSeekableRange_(120, 240);
QUnit.equal(mediaSource.nativeMediaSource_.duration, 240, 'set native duration');
QUnit.equal(mediaSource.duration, Infinity, 'emulated duration');
mediaSource.addSeekableRange_(120, 220);
QUnit.equal(mediaSource.nativeMediaSource_.duration, 240, 'ignored the smaller range');
QUnit.equal(mediaSource.duration, Infinity, 'emulated duration');
});
QUnit.test('appendBuffer error triggers on the player', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let error = false;
mediaSource.player_ = this.player;
initializeNativeSourceBuffers(sourceBuffer);
sourceBuffer.videoBuffer_.appendBuffer = () => {
throw new Error();
};
this.player.on('error', () => error = true);
// send fake data to the source buffer from the transmuxer to append to native buffer
// initializeNativeSourceBuffers does the same thing to trigger the creation of
// native source buffers.
let fakeTransmuxerMessage = initializeNativeSourceBuffers;
fakeTransmuxerMessage(sourceBuffer);
this.clock.tick(1);
QUnit.ok(error, 'error triggered on player');
});
QUnit.test('transmuxes mp2t segments', function() {
let mp2tSegments = [];
let mp4Segments = [];
let data = new Uint8Array(1);
let mediaSource;
let sourceBuffer;
mediaSource = new videojs.MediaSource();
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
sourceBuffer.transmuxer_.postMessage = function(segment) {
if (segment.action === 'push') {
let buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
mp2tSegments.push(buffer);
}
};
sourceBuffer.concatAndAppendSegments_ = function(segmentObj, destinationBuffer) {
mp4Segments.push(segmentObj);
};
sourceBuffer.appendBuffer(data);
QUnit.equal(mp2tSegments.length, 1, 'transmuxed one segment');
QUnit.equal(mp2tSegments[0].length, 1, 'did not alter the segment');
QUnit.equal(mp2tSegments[0][0], data[0], 'did not alter the segment');
// an init segment
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1)));
// a media segment
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', new Uint8Array(1)));
// Segments are concatenated
QUnit.equal(
mp4Segments.length,
0,
'segments are not appended until after the `done` message'
);
// send `done` message
sourceBuffer.transmuxer_.onmessage(doneMessage);
// Segments are concatenated
QUnit.equal(mp4Segments.length, 2, 'appended the segments');
});
QUnit.test(
'handles typed-arrays that are subsets of their underlying buffer',
function() {
let mp2tSegments = [];
let mp4Segments = [];
let dataBuffer = new Uint8Array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
let data = dataBuffer.subarray(5, 7);
let mediaSource;
let sourceBuffer;
mediaSource = new videojs.MediaSource();
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
sourceBuffer.transmuxer_.postMessage = function(segment) {
if (segment.action === 'push') {
let buffer = new Uint8Array(segment.data, segment.byteOffset, segment.byteLength);
mp2tSegments.push(buffer);
}
};
sourceBuffer.concatAndAppendSegments_ = function(segmentObj, destinationBuffer) {
mp4Segments.push(segmentObj.segments[0]);
};
sourceBuffer.appendBuffer(data);
QUnit.equal(mp2tSegments.length, 1, 'emitted the fragment');
QUnit.equal(
mp2tSegments[0].length,
2,
'correctly handled a typed-array that is a subset'
);
QUnit.equal(mp2tSegments[0][0], 5, 'fragment contains the correct first byte');
QUnit.equal(mp2tSegments[0][1], 6, 'fragment contains the correct second byte');
// an init segment
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data));
// Segments are concatenated
QUnit.equal(
mp4Segments.length,
0,
'segments are not appended until after the `done` message'
);
// send `done` message
sourceBuffer.transmuxer_.onmessage(doneMessage);
// Segments are concatenated
QUnit.equal(mp4Segments.length, 1, 'emitted the fragment');
QUnit.equal(
mp4Segments[0].length,
2,
'correctly handled a typed-array that is a subset'
);
QUnit.equal(mp4Segments[0][0], 5, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[0][1], 6, 'fragment contains the correct second byte');
});
QUnit.test(
'only appends audio init segment for first segment or on audio/media changes',
function() {
let mp4Segments = [];
let initBuffer = new Uint8Array([0, 1]);
let dataBuffer = new Uint8Array([2, 3]);
let mediaSource;
let sourceBuffer;
mediaSource = new videojs.MediaSource();
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
sourceBuffer.audioDisabled_ = false;
mediaSource.player_ = this.player;
mediaSource.url_ = this.url;
mediaSource.trigger('sourceopen');
sourceBuffer.concatAndAppendSegments_ = function(segmentObj, destinationBuffer) {
let segment = segmentObj.segments.reduce((seg, arr) => seg.concat(Array.from(arr)),
[]);
mp4Segments.push(segment);
};
QUnit.ok(sourceBuffer.appendAudioInitSegment_, 'will append init segment next');
// an init segment
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
// Segments are concatenated
QUnit.equal(
mp4Segments.length,
0,
'segments are not appended until after the `done` message'
);
// send `done` message
sourceBuffer.transmuxer_.onmessage(doneMessage);
// Segments are concatenated
QUnit.equal(mp4Segments.length, 1, 'emitted the fragment');
// Contains init segment on first segment
QUnit.equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
QUnit.ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
dataBuffer = new Uint8Array([4, 5]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 2, 'emitted the fragment');
// does not contain init segment on next segment
QUnit.equal(mp4Segments[1][0], 4, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[1][1], 5, 'fragment contains the correct second byte');
// audio track change
this.player.audioTracks().trigger('change');
sourceBuffer.audioDisabled_ = false;
QUnit.ok(sourceBuffer.appendAudioInitSegment_, 'audio change sets appendAudioInitSegment_');
dataBuffer = new Uint8Array([6, 7]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 3, 'emitted the fragment');
// contains init segment after audio track change
QUnit.equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
QUnit.ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
dataBuffer = new Uint8Array([8, 9]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 4, 'emitted the fragment');
// does not contain init segment in next segment
QUnit.equal(mp4Segments[3][0], 8, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[3][1], 9, 'fragment contains the correct second byte');
QUnit.ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
// rendition switch
this.player.trigger('mediachange');
QUnit.ok(sourceBuffer.appendAudioInitSegment_, 'media change sets appendAudioInitSegment_');
dataBuffer = new Uint8Array([10, 11]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 5, 'emitted the fragment');
// contains init segment after audio track change
QUnit.equal(mp4Segments[4][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[4][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[4][2], 10, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[4][3], 11, 'fragment contains the correct fourth byte');
QUnit.ok(!sourceBuffer.appendAudioInitSegment_, 'will not append init segment next');
});
QUnit.test(
'appends video init segment for every segment',
function() {
let mp4Segments = [];
let initBuffer = new Uint8Array([0, 1]);
let dataBuffer = new Uint8Array([2, 3]);
let mediaSource;
let sourceBuffer;
mediaSource = new videojs.MediaSource();
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
mediaSource.player_ = this.player;
mediaSource.url_ = this.url;
mediaSource.trigger('sourceopen');
sourceBuffer.concatAndAppendSegments_ = function(segmentObj, destinationBuffer) {
let segment = segmentObj.segments.reduce((seg, arr) => seg.concat(Array.from(arr)),
[]);
mp4Segments.push(segment);
};
// an init segment
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
// Segments are concatenated
QUnit.equal(
mp4Segments.length,
0,
'segments are not appended until after the `done` message'
);
// send `done` message
sourceBuffer.transmuxer_.onmessage(doneMessage);
// Segments are concatenated
QUnit.equal(mp4Segments.length, 1, 'emitted the fragment');
// Contains init segment on first segment
QUnit.equal(mp4Segments[0][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[0][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[0][2], 2, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[0][3], 3, 'fragment contains the correct fourth byte');
dataBuffer = new Uint8Array([4, 5]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 2, 'emitted the fragment');
QUnit.equal(mp4Segments[1][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[1][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[1][2], 4, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[1][3], 5, 'fragment contains the correct fourth byte');
dataBuffer = new Uint8Array([6, 7]);
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', dataBuffer, {
initSegment: {
data: initBuffer.buffer,
byteOffset: initBuffer.byteOffset,
byteLength: initBuffer.byteLength
}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(mp4Segments.length, 3, 'emitted the fragment');
// contains init segment after audio track change
QUnit.equal(mp4Segments[2][0], 0, 'fragment contains the correct first byte');
QUnit.equal(mp4Segments[2][1], 1, 'fragment contains the correct second byte');
QUnit.equal(mp4Segments[2][2], 6, 'fragment contains the correct third byte');
QUnit.equal(mp4Segments[2][3], 7, 'fragment contains the correct fourth byte');
});
QUnit.test('handles empty codec string value', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs=""');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.4d400d"',
'video buffer has the default codec'
);
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.2"',
'audio buffer has the default codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
});
QUnit.test('can create an audio buffer by itself', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.2"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(!mediaSource.videoBuffer_, 'did not create a video buffer');
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.2"',
'audio buffer has the default codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
});
QUnit.test('can create an video buffer by itself', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.4d400d"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(!mediaSource.audioBuffer_, 'did not create an audio buffer');
QUnit.ok(mediaSource.videoBuffer_, 'created an video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.4d400d"',
'video buffer has the codec that was passed'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
});
QUnit.test('handles invalid codec string', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="nope"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.4d400d"',
'video buffer has the default codec'
);
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.2"',
'audio buffer has the default codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
});
QUnit.test('handles codec strings in reverse order', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5,avc1.64001f"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.64001f"',
'video buffer has the passed codec'
);
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.5"',
'audio buffer has the passed codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 1, 'created one virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer'
);
QUnit.ok(sourceBuffer.transmuxer_, 'created a transmuxer');
});
QUnit.test('forwards codec strings to native buffers when specified', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.64001f"',
'passed the video codec along');
QUnit.ok(mediaSource.audioBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.5"',
'passed the audio codec along');
});
QUnit.test('parses old-school apple codec strings to the modern standard', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.100.31,mp4a.40.5"');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.64001f"',
'passed the video codec along');
QUnit.ok(mediaSource.audioBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.5"',
'passed the audio codec along');
});
QUnit.test('specifies reasonable codecs if none are specified', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
initializeNativeSourceBuffers(sourceBuffer);
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.4d400d"',
'passed the video codec along');
QUnit.ok(mediaSource.audioBuffer_, 'created a video buffer');
QUnit.equal(mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.2"',
'passed the audio codec along');
});
QUnit.test('virtual buffers are updating if either native buffer is', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
initializeNativeSourceBuffers(sourceBuffer);
mediaSource.videoBuffer_.updating = true;
mediaSource.audioBuffer_.updating = false;
QUnit.equal(sourceBuffer.updating, true, 'virtual buffer is updating');
mediaSource.audioBuffer_.updating = true;
QUnit.equal(sourceBuffer.updating, true, 'virtual buffer is updating');
mediaSource.videoBuffer_.updating = false;
QUnit.equal(sourceBuffer.updating, true, 'virtual buffer is updating');
mediaSource.audioBuffer_.updating = false;
QUnit.equal(sourceBuffer.updating, false, 'virtual buffer is not updating');
});
QUnit.test(
'virtual buffers have a position buffered if both native buffers do',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
initializeNativeSourceBuffers(sourceBuffer);
mediaSource.videoBuffer_.buffered = videojs.createTimeRanges([
[0, 10],
[20, 30]
]);
mediaSource.audioBuffer_.buffered = videojs.createTimeRanges([
[0, 7],
[11, 15],
[16, 40]
]);
QUnit.equal(sourceBuffer.buffered.length, 2, 'two buffered ranges');
QUnit.equal(sourceBuffer.buffered.start(0), 0, 'first starts at zero');
QUnit.equal(sourceBuffer.buffered.end(0), 7, 'first ends at seven');
QUnit.equal(sourceBuffer.buffered.start(1), 20, 'second starts at twenty');
QUnit.equal(sourceBuffer.buffered.end(1), 30, 'second ends at 30');
});
QUnit.test('disabled audio does not affect buffered property', function() {
let mediaSource = new videojs.MediaSource();
let muxedBuffer = mediaSource.addSourceBuffer('video/mp2t');
// creating a separate audio buffer disables audio on the muxed one
let audioBuffer = mediaSource.addSourceBuffer('audio/mp2t; codecs="mp4a.40.2"');
initializeNativeSourceBuffers(muxedBuffer);
mediaSource.videoBuffer_.buffered = videojs.createTimeRanges([[1, 10]]);
mediaSource.audioBuffer_.buffered = videojs.createTimeRanges([[2, 11]]);
QUnit.equal(audioBuffer.buffered.length, 1, 'one buffered range');
QUnit.equal(audioBuffer.buffered.start(0), 2, 'starts at two');
QUnit.equal(audioBuffer.buffered.end(0), 11, 'ends at eleven');
QUnit.equal(muxedBuffer.buffered.length, 1, 'one buffered range');
QUnit.equal(muxedBuffer.buffered.start(0), 1, 'starts at one');
QUnit.equal(muxedBuffer.buffered.end(0), 10, 'ends at ten');
});
QUnit.test('sets transmuxer baseMediaDecodeTime on appends', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let resets = [];
sourceBuffer.transmuxer_.postMessage = function(message) {
if (message.action === 'setTimestampOffset') {
resets.push(message.timestampOffset);
}
};
sourceBuffer.timestampOffset = 42;
QUnit.equal(
resets.length,
1,
'reset called'
);
QUnit.equal(
resets[0],
42,
'set the baseMediaDecodeTime based on timestampOffset'
);
});
QUnit.test('aggregates source buffer update events', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let updates = 0;
let updateends = 0;
let updatestarts = 0;
initializeNativeSourceBuffers(sourceBuffer);
mediaSource.player_ = this.player;
sourceBuffer.addEventListener('updatestart', function() {
updatestarts++;
});
sourceBuffer.addEventListener('update', function() {
updates++;
});
sourceBuffer.addEventListener('updateend', function() {
updateends++;
});
QUnit.equal(updatestarts, 0, 'no updatestarts before a `done` message is received');
QUnit.equal(updates, 0, 'no updates before a `done` message is received');
QUnit.equal(updateends, 0, 'no updateends before a `done` message is received');
// the video buffer begins updating first:
sourceBuffer.videoBuffer_.updating = true;
sourceBuffer.audioBuffer_.updating = false;
sourceBuffer.videoBuffer_.trigger('updatestart');
QUnit.equal(updatestarts, 1, 'aggregated updatestart');
sourceBuffer.audioBuffer_.updating = true;
sourceBuffer.audioBuffer_.trigger('updatestart');
QUnit.equal(updatestarts, 1, 'aggregated updatestart');
// the audio buffer finishes first:
sourceBuffer.audioBuffer_.updating = false;
sourceBuffer.videoBuffer_.updating = true;
sourceBuffer.audioBuffer_.trigger('update');
QUnit.equal(updates, 0, 'waited for the second update');
sourceBuffer.videoBuffer_.updating = false;
sourceBuffer.videoBuffer_.trigger('update');
QUnit.equal(updates, 1, 'aggregated update');
// audio finishes first:
sourceBuffer.videoBuffer_.updating = true;
sourceBuffer.audioBuffer_.updating = false;
sourceBuffer.audioBuffer_.trigger('updateend');
QUnit.equal(updateends, 0, 'waited for the second updateend');
sourceBuffer.videoBuffer_.updating = false;
sourceBuffer.videoBuffer_.trigger('updateend');
QUnit.equal(updateends, 1, 'aggregated updateend');
});
QUnit.test('translates caption events into WebVTT cues', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let types = [];
let hls608 = 0;
mediaSource.player_ = {
addRemoteTextTrack(options) {
types.push(options.kind);
return {
track: {
kind: options.kind,
label: options.label,
cues: [],
addCue(cue) {
this.cues.push(cue);
}
}
};
},
textTracks() {
return {
getTrackById() {}
};
},
remoteTextTracks() {
},
tech_: new videojs.EventTarget()
};
mediaSource.player_.tech_.on('usage', (event) => {
if (event.name === 'hls-608') {
hls608++;
}
});
sourceBuffer.timestampOffset = 10;
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
captions: [{
startTime: 1,
endTime: 3,
text: 'This is an in-band caption in CC1',
stream: 'CC1'
}],
captionStreams: {CC1: true}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
let cues = sourceBuffer.inbandTextTracks_.CC1.cues;
QUnit.equal(hls608, 1, 'one hls-608 event was triggered');
QUnit.equal(types.length, 1, 'created one text track');
QUnit.equal(types[0], 'captions', 'the type was captions');
QUnit.equal(cues.length, 1, 'created one cue');
QUnit.equal(cues[0].text, 'This is an in-band caption in CC1', 'included the text');
QUnit.equal(cues[0].startTime, 11, 'started at eleven');
QUnit.equal(cues[0].endTime, 13, 'ended at thirteen');
});
QUnit.test('captions use existing tracks with id equal to CC#', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let addTrackCalled = 0;
let tracks = {
CC1: {
kind: 'captions',
label: 'CC1',
id: 'CC1',
cues: [],
addCue(cue) {
this.cues.push(cue);
}
},
CC2: {
kind: 'captions',
label: 'CC2',
id: 'CC2',
cues: [],
addCue(cue) {
this.cues.push(cue);
}
}
};
mediaSource.player_ = {
addRemoteTextTrack(options) {
addTrackCalled++;
},
textTracks() {
return {
getTrackById(id) {
return tracks[id];
}
};
},
remoteTextTracks() {
},
tech_: new videojs.EventTarget()
};
sourceBuffer.timestampOffset = 10;
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
captions: [{
stream: 'CC1',
startTime: 1,
endTime: 3,
text: 'This is an in-band caption in CC1'
}, {
stream: 'CC2',
startTime: 1,
endTime: 3,
text: 'This is an in-band caption in CC2'
}],
captionStreams: {CC1: true, CC2: true}
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
let cues = sourceBuffer.inbandTextTracks_.CC1.cues;
QUnit.equal(addTrackCalled, 0, 'no tracks were created');
QUnit.equal(tracks.CC1.cues.length, 1, 'CC1 contains 1 cue');
QUnit.equal(tracks.CC2.cues.length, 1, 'CC2 contains 1 cue');
QUnit.equal(tracks.CC1.cues[0].text, 'This is an in-band caption in CC1', 'CC1 contains the right cue');
QUnit.equal(tracks.CC2.cues[0].text, 'This is an in-band caption in CC2', 'CC2 contains the right cue');
});
QUnit.test('translates metadata events into WebVTT cues', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
mediaSource.duration = Infinity;
mediaSource.nativeMediaSource_.duration = 60;
let types = [];
let metadata = [{
cueTime: 2,
frames: [{
url: 'This is a url tag'
}, {
value: 'This is a text tag'
}]
}, {
cueTime: 12,
frames: [{
data: 'This is a priv tag'
}]
}];
metadata.dispatchType = 0x10;
mediaSource.player_ = {
addRemoteTextTrack(options) {
types.push(options.kind);
return {
track: {
kind: options.kind,
label: options.label,
cues: [],
addCue(cue) {
this.cues.push(cue);
}
}
};
},
remoteTextTracks() {
}
};
sourceBuffer.timestampOffset = 10;
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', new Uint8Array(1), {
metadata
}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(
sourceBuffer.metadataTrack_.inBandMetadataTrackDispatchType,
16,
'in-band metadata track dispatch type correctly set'
);
let cues = sourceBuffer.metadataTrack_.cues;
QUnit.equal(types.length, 1, 'created one text track');
QUnit.equal(types[0], 'metadata', 'the type was metadata');
QUnit.equal(cues.length, 3, 'created three cues');
QUnit.equal(cues[0].text, 'This is a url tag', 'included the text');
QUnit.equal(cues[0].startTime, 12, 'started at twelve');
QUnit.equal(cues[0].endTime, 22, 'ended at StartTime of next cue(22)');
QUnit.equal(cues[1].text, 'This is a text tag', 'included the text');
QUnit.equal(cues[1].startTime, 12, 'started at twelve');
QUnit.equal(cues[1].endTime, 22, 'ended at the startTime of next cue(22)');
QUnit.equal(cues[2].text, 'This is a priv tag', 'included the text');
QUnit.equal(cues[2].startTime, 22, 'started at twenty two');
QUnit.equal(cues[2].endTime, Number.MAX_VALUE, 'ended at the maximum value');
mediaSource.duration = 100;
mediaSource.trigger('sourceended');
QUnit.equal(cues[2].endTime, mediaSource.duration, 'sourceended is fired');
});
QUnit.test('does not wrap mp4 source buffers', function() {
let mediaSource = new videojs.MediaSource();
mediaSource.addSourceBuffer('video/mp4;codecs=avc1.4d400d');
mediaSource.addSourceBuffer('audio/mp4;codecs=mp4a.40.2');
QUnit.equal(
mediaSource.sourceBuffers.length,
mediaSource.nativeMediaSource_.sourceBuffers.length,
'did not need virtual buffers'
);
QUnit.equal(mediaSource.sourceBuffers.length, 2, 'created native buffers');
});
QUnit.test('can get activeSourceBuffers', function() {
let mediaSource = new videojs.MediaSource();
// although activeSourceBuffers should technically be a SourceBufferList, we are
// returning it as an array, and users may expect it to behave as such
QUnit.ok(Array.isArray(mediaSource.activeSourceBuffers));
});
QUnit.test('active source buffers are updated on each buffer\'s updateend',
function() {
let mediaSource = new videojs.MediaSource();
let updateCallCount = 0;
let sourceBuffer;
mediaSource.updateActiveSourceBuffers_ = () => {
updateCallCount++;
};
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
mediaSource.player_ = this.player;
mediaSource.url_ = this.url;
mediaSource.trigger('sourceopen');
QUnit.equal(updateCallCount, 0,
'active source buffers not updated on adding source buffer');
mediaSource.player_.audioTracks().trigger('addtrack');
QUnit.equal(updateCallCount, 1,
'active source buffers updated after addtrack');
sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
QUnit.equal(updateCallCount, 1,
'active source buffers not updated on adding second source buffer');
mediaSource.player_.audioTracks().trigger('removetrack');
QUnit.equal(updateCallCount, 2,
'active source buffers updated after removetrack');
mediaSource.player_.audioTracks().trigger('change');
QUnit.equal(updateCallCount, 3,
'active source buffers updated after change');
});
QUnit.test('combined buffer is the only active buffer when main track enabled',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBufferAudio;
let sourceBufferCombined;
let audioTracks = [{
enabled: true,
kind: 'main',
label: 'main'
}, {
enabled: false,
kind: 'alternative',
label: 'English (UK)'
}];
this.player.audioTracks = () => audioTracks;
mediaSource.player_ = this.player;
sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferCombined.videoCodec_ = true;
sourceBufferCombined.audioCodec_ = true;
sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferAudio.videoCodec_ = false;
sourceBufferAudio.audioCodec_ = true;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 1,
'active source buffers starts with one source buffer');
QUnit.equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined,
'active source buffers starts with combined source buffer');
});
QUnit.test('combined & audio buffers are active when alternative track enabled',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBufferAudio;
let sourceBufferCombined;
let audioTracks = [{
enabled: false,
kind: 'main',
label: 'main'
}, {
enabled: true,
kind: 'alternative',
label: 'English (UK)'
}];
this.player.audioTracks = () => audioTracks;
mediaSource.player_ = this.player;
sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferCombined.videoCodec_ = true;
sourceBufferCombined.audioCodec_ = true;
sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferAudio.videoCodec_ = false;
sourceBufferAudio.audioCodec_ = true;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 2,
'active source buffers includes both source buffers');
// maintains same order as source buffers were created
QUnit.equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined,
'active source buffers starts with combined source buffer');
QUnit.equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio,
'active source buffers ends with audio source buffer');
});
QUnit.test('video only & audio only buffers are always active',
function() {
let mediaSource = new videojs.MediaSource();
let sourceBufferAudio;
let sourceBufferCombined;
let audioTracks = [{
enabled: false,
kind: 'main',
label: 'main'
}, {
enabled: true,
kind: 'alternative',
label: 'English (UK)'
}];
this.player.audioTracks = () => audioTracks;
mediaSource.player_ = this.player;
sourceBufferCombined = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferCombined.videoCodec_ = true;
sourceBufferCombined.audioCodec_ = false;
sourceBufferAudio = mediaSource.addSourceBuffer('video/m2pt');
sourceBufferAudio.videoCodec_ = false;
sourceBufferAudio.audioCodec_ = true;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 2,
'active source buffers includes both source buffers');
// maintains same order as source buffers were created
QUnit.equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined,
'active source buffers starts with combined source buffer');
QUnit.equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio,
'active source buffers ends with audio source buffer');
audioTracks[0].enabled = true;
audioTracks[1].enabled = false;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 2,
'active source buffers includes both source buffers');
// maintains same order as source buffers were created
QUnit.equal(mediaSource.activeSourceBuffers[0], sourceBufferCombined,
'active source buffers starts with combined source buffer');
QUnit.equal(mediaSource.activeSourceBuffers[1], sourceBufferAudio,
'active source buffers ends with audio source buffer');
});
QUnit.test('Single buffer always active. Audio disabled depends on audio codec',
function() {
let mediaSource = new videojs.MediaSource();
let audioTracks = [{
enabled: true,
kind: 'main',
label: 'main'
}];
this.player.audioTracks = () => audioTracks;
mediaSource.player_ = this.player;
let sourceBuffer = mediaSource.addSourceBuffer('video/m2pt');
// video only
sourceBuffer.videoCodec_ = true;
sourceBuffer.audioCodec_ = false;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
QUnit.ok(mediaSource.activeSourceBuffers[0].audioDisabled_,
'audio is disabled on video only active sourceBuffer');
// audio only
sourceBuffer.videoCodec_ = false;
sourceBuffer.audioCodec_ = true;
mediaSource.updateActiveSourceBuffers_();
QUnit.equal(mediaSource.activeSourceBuffers.length, 1, 'sourceBuffer is active');
QUnit.notOk(mediaSource.activeSourceBuffers[0].audioDisabled_,
'audio not disabled on audio only active sourceBuffer');
});
QUnit.test('video segments with info trigger videooinfo event', function() {
let data = new Uint8Array(1);
let infoEvents = [];
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let info = {width: 100};
let newinfo = {width: 225};
mediaSource.on('videoinfo', (e) => infoEvents.push(e));
// send an audio segment with info, then send done
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, {info}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(infoEvents.length, 1, 'video info should trigger');
QUnit.deepEqual(infoEvents[0].info, info, 'video info = muxed info');
// send an audio segment with info, then send done
sourceBuffer.transmuxer_.onmessage(createDataMessage('video', data, {info: newinfo}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(infoEvents.length, 2, 'video info should trigger');
QUnit.deepEqual(infoEvents[1].info, newinfo, 'video info = muxed info');
});
QUnit.test('audio segments with info trigger audioinfo event', function() {
let data = new Uint8Array(1);
let infoEvents = [];
let mediaSource = new videojs.MediaSource();
let sourceBuffer = mediaSource.addSourceBuffer('video/mp2t');
let info = {width: 100};
let newinfo = {width: 225};
mediaSource.on('audioinfo', (e) => infoEvents.push(e));
// send an audio segment with info, then send done
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, {info}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(infoEvents.length, 1, 'audio info should trigger');
QUnit.deepEqual(infoEvents[0].info, info, 'audio info = muxed info');
// send an audio segment with info, then send done
sourceBuffer.transmuxer_.onmessage(createDataMessage('audio', data, {info: newinfo}));
sourceBuffer.transmuxer_.onmessage(doneMessage);
QUnit.equal(infoEvents.length, 2, 'audio info should trigger');
QUnit.deepEqual(infoEvents[1].info, newinfo, 'audio info = muxed info');
});
QUnit.test('creates native SourceBuffers immediately if a second ' +
'VirtualSourceBuffer is created', function() {
let mediaSource = new videojs.MediaSource();
let sourceBuffer =
mediaSource.addSourceBuffer('video/mp2t; codecs="avc1.64001f,mp4a.40.5"');
let sourceBuffer2 =
mediaSource.addSourceBuffer('video/mp2t; codecs="mp4a.40.5"');
QUnit.ok(mediaSource.videoBuffer_, 'created a video buffer');
QUnit.equal(
mediaSource.videoBuffer_.type,
'video/mp4;codecs="avc1.64001f"',
'video buffer has the specified codec'
);
QUnit.ok(mediaSource.audioBuffer_, 'created an audio buffer');
QUnit.equal(
mediaSource.audioBuffer_.type,
'audio/mp4;codecs="mp4a.40.5"',
'audio buffer has the specified codec'
);
QUnit.equal(mediaSource.sourceBuffers.length, 2, 'created two virtual buffers');
QUnit.equal(
mediaSource.sourceBuffers[0],
sourceBuffer,
'returned the virtual buffer');
QUnit.equal(
mediaSource.sourceBuffers[1],
sourceBuffer2,
'returned the virtual buffer');
QUnit.equal(
sourceBuffer.audioDisabled_,
true,
'first source buffer\'s audio is automatically disabled');
QUnit.ok(
sourceBuffer2.audioBuffer_,
'second source buffer has an audio source buffer');
});
QUnit.module('VirtualSourceBuffer - Isolated Functions');
QUnit.test('gopsSafeToAlignWith returns correct list', function() {
// gopsSafeToAlignWith uses a 3 second safetyNet so that gops very close to the playhead
// are not considered safe to append to
const safetyNet = 3;
const pts = (time) => Math.ceil(time * 90000);
let mapping = 0;
let currentTime = 0;
let buffer = [];
let player;
let actual;
let expected;
expected = [];
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected, 'empty array when player is undefined');
player = { currentTime: () => currentTime };
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected, 'empty array when buffer is empty');
buffer = expected = [
{ pts: pts(currentTime + safetyNet + 1) },
{ pts: pts(currentTime + safetyNet + 2) },
{ pts: pts(currentTime + safetyNet + 3) }
];
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected,
'entire buffer considered safe when all gops come after currentTime + safetyNet');
buffer = [
{ pts: pts(currentTime + safetyNet) },
{ pts: pts(currentTime + safetyNet + 1) },
{ pts: pts(currentTime + safetyNet + 2) }
];
expected = [
{ pts: pts(currentTime + safetyNet + 1) },
{ pts: pts(currentTime + safetyNet + 2) }
];
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected, 'safetyNet comparison is not inclusive');
currentTime = 10;
mapping = -5;
buffer = [
{ pts: pts(currentTime - mapping + safetyNet - 2) },
{ pts: pts(currentTime - mapping + safetyNet - 1) },
{ pts: pts(currentTime - mapping + safetyNet) },
{ pts: pts(currentTime - mapping + safetyNet + 1) },
{ pts: pts(currentTime - mapping + safetyNet + 2) }
];
expected = [
{ pts: pts(currentTime - mapping + safetyNet + 1) },
{ pts: pts(currentTime - mapping + safetyNet + 2) }
];
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected, 'uses mapping to shift currentTime');
currentTime = 20;
expected = [];
actual = gopsSafeToAlignWith(buffer, player, mapping);
QUnit.deepEqual(actual, expected,
'empty array when no gops in buffer come after currentTime');
});
QUnit.test('updateGopBuffer correctly processes new gop information', function() {
let buffer = [];
let gops = [];
let replace = true;
let actual;
let expected;
buffer = expected = [{ pts: 100 }, { pts: 200 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected, 'returns buffer when no new gops');
gops = expected = [{ pts: 300 }, { pts: 400 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected, 'returns only new gops when replace is true');
replace = false;
buffer = [];
gops = [{ pts: 100 }];
expected = [{ pts: 100 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected, 'appends new gops to empty buffer');
buffer = [{ pts: 100 }, { pts: 200 }];
gops = [{ pts: 300 }, { pts: 400 }];
expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected, 'appends new gops at end of buffer when no overlap');
buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
gops = [{ pts: 250 }, { pts: 300 }, { pts: 350 }];
expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }, { pts: 300 }, { pts: 350 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected,
'slices buffer at point of overlap and appends new gops');
buffer = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 400 }];
gops = [{ pts: 200 }, { pts: 300 }, { pts: 350 }];
expected = [{ pts: 100 }, { pts: 200 }, { pts: 300 }, { pts: 350 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected, 'overlap slice is inclusive');
buffer = [{ pts: 300 }, { pts: 400 }, { pts: 500 }, { pts: 600 }];
gops = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
expected = [{ pts: 100 }, { pts: 200 }, { pts: 250 }];
actual = updateGopBuffer(buffer, gops, replace);
QUnit.deepEqual(actual, expected,
'completely replaces buffer with new gops when all gops come before buffer');
});
QUnit.test('removeGopBuffer correctly removes range from buffer', function() {
const pts = (time) => Math.ceil(time * 90000);
let buffer = [];
let start = 0;
let end = 0;
let mapping = -5;
let actual;
let expected;
expected = [];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'returns empty array when buffer empty');
start = 0;
end = 8;
buffer = expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected,
'no removal when remove range comes before start of buffer');
start = 22;
end = 30;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected,
'removes last gop when remove range is after end of buffer');
start = 0;
end = 10;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps start range to begining of buffer');
start = 0;
end = 12;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps start range to begining of buffer');
start = 0;
end = 14;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps start range to begining of buffer');
start = 15;
end = 30;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps end range to end of buffer');
start = 17;
end = 30;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps end range to end of buffer');
start = 20;
end = 30;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'clamps end range to end of buffer');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 12;
end = 15;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 12;
end = 14;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 13;
end = 14;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 13;
end = 15;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 12;
end = 17;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
start = 13;
end = 16;
expected = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected, 'removes gops that remove range intersects with');
start = 10;
end = 20;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected,
'removes entire buffer when buffer inside remove range');
start = 0;
end = 30;
buffer = [
{ pts: pts(10 - mapping) },
{ pts: pts(11 - mapping) },
{ pts: pts(12 - mapping) },
{ pts: pts(15 - mapping) },
{ pts: pts(18 - mapping) },
{ pts: pts(20 - mapping) }
];
expected = [];
actual = removeGopBuffer(buffer, start, end, mapping);
QUnit.deepEqual(actual, expected,
'removes entire buffer when buffer inside remove range');
});
|
export default {
autoCheckPermissions: true,
ringBack: "incallmanager_ringback.mp3", // tên file nhạc chờ
ringTone: "incallmanager_ringtone.mp3", // tên file nhạc chuông
busyTone: "_DTMF_", // tên file nhạc máy bận
hangupTone: "incallmanager_busytone.mp3", // tên file nhạc hangup
vibrateRingingPattern: [2000, 1000], // thời gian rung chuông
vibrateAlertPattern: [1000], // thời gian rung cảnh báo
autoEndCallWhenReconnect: true, // khi nhấn kết thúc nhưng không có kết nối, thì khi có kết nối lại sẽ tự gọi lại
endCallWhenDisconnect: true, // kết thúc cuộc gọi khi mất kết nối
permistionRationale: {},
permissionDeniedCallback: () => {}
}; |
<html>
<head>
<title>Name and Email Form</title>
</head>
<body>
<form>
<h1>Name and Email Form</h1>
<div>
<label for="name">Name:</label>
<input type="text" name="name" id="name" />
</div>
<div>
<label for="email">Email:</label>
<input type="email" name="email" id="email" />
</div>
<div>
<button type="submit">Submit</button>
</div>
</form>
</body>
</html> |
/*Shop Management System Project Source Code
Developed using concepts of file handling and oops concepts
Username: Admin
Password: <PASSWORD>
*/
#include<iostream>
#include<fstream>
#include<conio.h>
#include<process.h> // exit() fuction
#include<string.h> // strcmp() and strcpy() function
#include<stdio.h>
#include<iomanip> //to use setw manipulator
#include<iostream>
using namespace std; // gets() and puts()
class earn
{
protected:
static int profit;
int profitx;
int salary;
public:
void dis()
{
cout<<"\n###################################################\n\n";
cout<<"\nTOTAL MONTHLY PROFIT: "<<profit<<"$"<<endl;
cout<<"\n###################################################\n\n";
}
};
int earn::profit;
class manager:public earn // 1st class for manager for products and details
{
private:
int pcode; //Product code
char pname[20]; //Product name
long long int cost_price; //Product Cost price
long long int sell_price; //Product Sell price
//Profit
int pquantity; //Product Quantity
public:
void input(); //Adding data of products
void output(); //displaying data of product
void readfile(); //reading the manager.dat file
void writefile(); //writing into the manager.dat file
void modifile(); //modify the manager.dat file
void deletefile(); //delete into the manager.dat file
void searchfile(); //search onto the manager.dat file
};
void manager :: input() // Function 1.1
{
cout<<"\nEnter the Product Code: ";
cin>>pcode;
cout<<"Enter the Product Name: ";
cin>>pname;
cout<<"Enter the cost price of Product: ";
cin>>cost_price;
cout<<"Enter the Sell price of Product: ";
cin>>sell_price;
cout<<"Enter the Quantity of Product: ";
cin>>pquantity;
cout<<"\n--------------------------------";
}
void manager :: output() // Function 1.2
{
cout<<"\n Product Code: "<<pcode;
cout<<"\n Product Name : "<<pname;
cout<<"\n Product Cost Price: "<<cost_price;
cout<<"\n Product Sell Price: "<<sell_price;
cout<<"\n Product Quantity: "<<pquantity;
profitx=(sell_price-cost_price)*pquantity;
cout<<"\n Profit on the Product: "<<profitx;
profit+=profitx;
cout<<"\n--------------------------------";
}
void manager :: readfile() // Function 1.3
{
manager c;
ifstream ifile("manager.dat",ios::binary|ios::in|ios::app);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Total Products in the Shop!!\n";
while(ifile.read((char*)&c,sizeof(manager)))
{
c.output();
}
ifile.close();
}
}
void manager :: writefile() // Function 1.4
{
int i,n;
cout<<"!!Enter total Number of Products to Insert: !!\n";
cin>>n;
manager c[n];
ofstream ofile("manager.dat",ios::binary|ios::out|ios::app);
for(i=0;i<n;i++)
{
c[i].input();
ofile.write((char*)&c[i],sizeof(manager));
}
ofile.close();
}
void manager :: modifile() // Function 1.5
{
manager c1,c2;
int f=0;
ifstream ifile("manager.dat",ios::binary|ios::in);
ofstream ofile("temp.dat",ios::binary|ios::out);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Product Code That Need To Be Modified!!\n";
c1.input();
while(ifile.read((char*)&c2,sizeof(c2)))
{
if(c1.pcode==c2.pcode)
{
f=1;
ofile.write((char*)&c1,sizeof(c1));
}
else
ofile.write((char*)&c2,sizeof(c2));
}
ifile.close();
ofile.close();
if(f==1)
{
remove("manager.dat");
rename("temp.dat","manager.dat");
}
else
cout<<"!!Information Does Not Exist!!";
}
}
void manager :: deletefile() // Function 1.6
{
manager c;
int f=0,PC;
ifstream ifile("manager.dat",ios::binary|ios::in);
ofstream ofile("temp.dat",ios::binary|ios::out);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Any Product Code That Need To Be Deleted!!: \n";
cin>>PC;
while(ifile.read((char*)&c,sizeof(c)))
{
if(c.pcode==PC)
{
f=1;
c.output();
}
else
ofile.write((char*)&c,sizeof(c));
}
ifile.close();
ofile.close();
if(f==1)
{
remove("manager.dat");
rename("temp.dat","manager.dat");
cout<<"\n\n!!Record Delete!!";
}
else
cout<<"!!Information Does Not Exist!!";
}
}
void manager :: searchfile() // Function 1.7
{
int PC,f=0;
manager c;
ifstream ifile("manager.dat",ios::binary|ios::in);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Any Product Code That Need To Be Seached!!: \n";
cin>>PC;
while(ifile.read((char*)&c,sizeof(c))&&(f==0))
{
if(PC==c.pcode)
{
c.output();
f=1;
}
}
ifile.close();
if(f==0)
cout<<"!!Information Does Not Exist!!";
}
}
class employee:public earn // 2nd class for employee details
{
private:
int c_code; //Employee code
char c_name[20]; //Employee name
char phnumber[20]; //Employee phone number
char address[100]; //Employee address
char doj[10]; //Employee Date of joining
//salary of employee
public:
void input(); //Adding data of employees
void output(); //displaying data of employees
void readfile(); //reading the employee.dat file
void writefile(); //writing into the employee.dat file
void modifile(); //modify the employee.dat file
void deletefile(); //delete into the employee.dat file
void searchfile(); //search onto the employee.dat file
};
void employee :: input() // Function 1.1
{
cout<<"\nEnter Employee Code: ";
cin>>c_code;
cout<<"Enter Employee Name: ";
cin>>c_name;
cout<<"Enter Employee Phone Number: ";
cin>>phnumber;
cout<<"Enter Employee Address: ";
cin>>address;
cout<<"Enter D.O.J: ";
cin>>doj;
cout<<"Enter the Salary of the employee: ";
cin>>salary;
cout<<"\n--------------------------------";
}
void employee :: output() // Function 1.2
{
cout<<"\nEmployee Code: "<<c_code;
cout<<"\nEmployee Name: "<<c_name;
cout<<"\nEmployee Phone Number: "<<phnumber;
cout<<"\nEmployee Address: "<<address;
cout<<"\nEnter D.O.J: "<<doj;
cout<<"\nSalary of the employee: "<<salary;
profit=(profit-salary);
cout<<"\n--------------------------------";
}
void employee :: readfile() // Function 1.3
{
ifstream ifile("employee.dat",ios::binary|ios::in);
employee c;
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Total Number Of Employee Entered!!\n";
while(ifile.read((char*)&c,sizeof(employee)))
{
c.output();
}
ifile.close();
}
}
void employee :: writefile() // Function 1.4
{
int i,n;
cout<<"!!Enter total Number of Employees!!: \n";
cin>>n;
employee c[n];
ofstream ofile("employee.dat",ios::binary|ios::out|ios::app);
for(i=0;i<n;i++)
{
c[i].input();
ofile.write((char*)&c[i],sizeof(employee));
}
ofile.close();
}
void employee :: modifile() // Function 1.5
{
employee c1,c2;
int f=0;
ifstream ifile("employee.dat",ios::binary|ios::in);
ofstream ofile("temp.dat",ios::binary|ios::out);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Employee Number That Need To Be Modified!!\n";
c1.input();
while(ifile.read((char*)&c2,sizeof(c2)))
{
if(c1.c_code==c2.c_code)
{
f=1;
ofile.write((char*)&c1,sizeof(c1));
}
else
ofile.write((char*)&c2,sizeof(c2));
}
ifile.close();
ofile.close();
if(f==1)
{
remove("employee.dat");
rename("temp.dat","croma.dat");
}
else
cout<<"!!Information Does Not Exist!!";
}
}
void employee :: deletefile() // Function 1.6
{
employee c;
int f=0,CC;
ifstream ifile("employee.dat",ios::binary|ios::in);
ofstream ofile("temp.dat",ios::binary|ios::out);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Any Employee Code That Need To Be Deleted!!: \n";
cin>>CC;
while(ifile.read((char*)&c,sizeof(employee)))
{
if(c.c_code==CC)
{
f=1;
c.output();
}
else
ofile.write((char*)&c,sizeof(employee));
}
ifile.close();
ofile.close();
if(f==1)
{
remove("employee.dat");
rename("temp.dat","employee.dat");
cout<<"\n\n!!Record Delete!!";
}
else
cout<<"!!Information Does Not Exist!!";
}
}
void employee :: searchfile() // Function 1.7
{
int CC,f=0;
employee c;
ifstream ifile("employee.dat",ios::binary|ios::in);
if(!ifile)
cout<<"!!File Does Not Exist!!";
else
{
cout<<"!!Enter Any employee Code That Need To Be Seached!!";
cin>>CC;
while(ifile.read((char*)&c,sizeof(c))&&(f==0))
{
if(CC==c.c_code)
{
c.output();
f=1;
}
}
ifile.close();
if(f==0)
cout<<endl<<"!!Information Does Not Exist!!";
}
}
int main()
{
// manager`s object
manager c1;
// employee`s object
employee C1;
earn e1;
int choice,ch1,ch2,f=1,i=0,j=0;
char ch3[4];
system("cls");
//mains screen
cout<<setw(100)<<"DEVELOPED BY \n "<<setw(97)<<"<NAME>"<<endl;
cout<<"\n\n\n\n\n\n\n\n\n\n\n\n\n"<<endl;
cout<<setw(105)<<"********************************************"<<endl;
cout<<setw(92)<<"WELCOME TO THE "<<endl;
cout<<setw(97)<< "NAV ELECTRONICS MEGASTORE"<<endl;
cout<<setw(105)<<"********************************************"<<endl;
cout<<"\n\n\n\n\n\n\n\n\nPRESS ANY KEY TO CONTINUE ";
getch(); //1st login screen
do{
system("cls");
char username[20],password[20];
cout<<"\n\n\n\n\n\t\t LOGIN ";
cout<<"\n\t\t\t USERNAME:- ";
cin>>username;
cout<<"\n\t\t\t PASSWORD:-";
while(i<=5)
{
password[i]=getch();
cout<<"*";
++i;
}
password[i]='\0';
getch();
if(strcmp(username,"Admin")==0 && strcmp(password,"<PASSWORD>")==0)
{
f=0;
goto I;
}
else
{
cout<<setw(60)<<"\nINVALID USERNAME OR PASSWORD!!";
cout<<setw(60)<<"\n\n\n\n\n\nDO YOU WANT TO EXIT(y/n):- ";
cin>>ch3;
if(strcmp(ch3,"y")==0)
{
exit(0);
}
}
}while(f!=0);
getch();
system("cls");
do
{
I:
system("cls");
cout<<"\n\t\t\t\t\t 1.Manager"<<endl;
cout<<"\n\t\t\t\t\t 2.EXIT"<<endl;
cout<<"\nEnter your choice: "<<endl;
cin>>choice;
switch(choice)
{
A:
case 1:
do
{
system("cls");
cout<<"\n 1.Product"<<endl;
cout<<"\n 2.Employee"<<endl;
cout<<"\n 3.Profit"<<endl;
cout<<"\n 4.Exit"<<endl;
cout<<"\nEnter your choice: "<<endl;
cin>>ch1;
switch(ch1)
{
case 1:
do
{
// 2nd menu
system("cls");
cout<<"\n\n\n\n\t\t================================";
cout<<"\n\n\n\n\t\t 1.ADD PRODUCTS";
cout<<"\n\t\t 2.DETAILS OF PRODUCTS";
cout<<"\n\t\t 3.EDIT PRODUCTS DETAILS";
cout<<"\n\t\t 4.DELETE PRODUCTS";
cout<<"\n\t\t 5.SEARCH PRODUCTS";
cout<<"\n\t\t 6.EXIT";
cout<<"\n\n\n\t\t=====================================";
cout<<"\n\t\tENTER YOUR CHOICE:-";
cin>>ch2;
system("cls");
switch(ch2)
{
case 1:
system("cls");
c1.writefile();
getch();
break;
case 2:
system("cls");
c1.readfile();
getch();
break;
case 3:
system("cls");
c1.modifile();
getch();
break;
case 4:
system("cls");
c1.deletefile();
getch();
break;
case 5:
system("cls");
c1.searchfile();
getch();
break;
case 6:
goto I;
}
}while(ch2!=6);
case 2:
do
{
//3rd menu
system("cls");
cout<<"\n\n\n\t\t===============EMPLOYEE===============";
cout<<"\n\n\t\t1.ADD EMPLOYEE";
cout<<"\n\t\t2.DETAILS OF EMPLOYEE ";
cout<<"\n\t\t3.EDIT EMPLOYEE";
cout<<"\n\t\t4.DELETE EMPLOYEE";
cout<<"\n\t\t5.SEARCH EMPLOYEE";
cout<<"\n\t\t6.BACK";
cout<<"\n\n\n\t\t======================================";
cout<<"\n\t\tENTER YOUR CHOICE:-";
cin>>ch2;
system("cls");
switch(ch2)
{
case 1:
system("cls");
C1.writefile();
getch();
break;
case 2:
system("cls");
C1.readfile();
getch();
break;
case 3:
system("cls");
C1.modifile();
getch();
break;
case 4:
system("cls");
C1.deletefile();
getch();
break;
case 5:
system("cls");
C1.searchfile();
getch();
break;
case 6:
goto I;
}
}while(ch2!=6);
case 3:
system("cls");
e1.dis();
getch();
break;
case 4:
system("cls");
}
}while(ch1!=4);
case 2:
system("cls");
cout<<setw(60)<<"END OF PROGRAM"<<endl;
cout<<setw(62)<<"BY <NAME>"<<endl;
cout<<setw(60)<<"UID: 20BCS4148"<<endl;
cout<<setw(62)<<"THANKS FOR USING"<<endl;
}
}while(choice!=2);
}
|
<filename>idseq_pipeline/commands/host_indexing_functions.py
import os
import multiprocessing
from .common import *
MAX_STAR_PART_SIZE = 3252010122
# data directories
# from common import ROOT_DIR
DEST_DIR = ROOT_DIR + '/idseq/indexes' # generated indexes go here
# arguments from environment variables
INPUT_FASTA_S3 = os.environ.get('INPUT_FASTA_S3')
INPUT_GTF_S3 = os.environ.get('INPUT_GTF_S3')
OUTPUT_PATH_S3 = os.environ.get('OUTPUT_PATH_S3').rstrip('/')
HOST_NAME = os.environ.get('HOST_NAME')
# executables
STAR = "STAR"
BOWTIE2_BUILD = "bowtie2-build"
# output names
STAR_INDEX_OUT = 'STAR_genome.tar'
BOWTIE2_INDEX_OUT = 'bowtie2_genome.tar'
### Functions
def split_fasta(fasta_file, max_fasta_part_size):
fasta_file_list = []
part_idx = 0
current_size = 0
current_output_file_name = "%s.%d" % (fasta_file, part_idx)
current_output_file = open(current_output_file_name, 'wb')
fasta_file_list.append(current_output_file_name)
with open(fasta_file, 'rb') as input_f:
current_read = input_f.readline()
for line in input_f:
# Check if we have to switch different output fasta file
if current_size > max_fasta_part_size:
current_output_file.close()
part_idx += 1
current_size = 0
current_output_file_name = "%s.%d" % (fasta_file, part_idx)
current_output_file = open(current_output_file_name, 'wb')
fasta_file_list.append(current_output_file_name)
if line[0] == '>': # got a new read
current_output_file.write(current_read)
current_size += len(current_read)
current_read = line
else:
current_read += line
current_output_file.write(current_read)
current_output_file.close()
return fasta_file_list
def upload_star_index(result_dir, scratch_dir, star_genome_dir_name):
# archive
execute_command("tar cvf %s/%s -C %s %s" %
(result_dir, STAR_INDEX_OUT, scratch_dir,
star_genome_dir_name))
# copy to S3
execute_command("aws s3 cp --quiet %s/%s %s/;" %
(result_dir, STAR_INDEX_OUT, OUTPUT_PATH_S3))
# cleanup
execute_command("cd %s; rm -rf *" % scratch_dir)
def make_star_index(fasta_file, gtf_file, result_dir, scratch_dir, lazy_run):
if lazy_run:
output = os.path.join(result_dir, STAR_INDEX_OUT)
if os.path.isfile(output):
return 1
star_genome_dir_name = STAR_INDEX_OUT.split('.')[0]
# star genome organization
# STAR_genome/part-${i}, parts.txt
fasta_file_list = []
if os.path.getsize(fasta_file) > MAX_STAR_PART_SIZE:
fasta_file_list = split_fasta(fasta_file, MAX_STAR_PART_SIZE)
else:
fasta_file_list.append(fasta_file)
for i in range(len(fasta_file_list)):
write_to_log("start making STAR index part %d" % i)
gtf_command_part = ''
if i == 0 and gtf_file:
gtf_command_part = '--sjdbGTFfile %s' % gtf_file
star_genome_part_dir = "%s/part-%d" % (star_genome_dir_name, i)
star_command_params = [
'cd', scratch_dir, ';', 'mkdir -p ', star_genome_part_dir, ';',
STAR, '--runThreadN',
str(multiprocessing.cpu_count()), '--runMode', 'genomeGenerate',
gtf_command_part, '--genomeDir', star_genome_part_dir,
'--genomeFastaFiles', fasta_file_list[i]
]
execute_command(" ".join(star_command_params))
write_to_log("finished making STAR index part %d " % i)
# record # parts into parts.txt
execute_command(" echo %d > %s/%s/parts.txt" %
(len(fasta_file_list), scratch_dir, star_genome_dir_name))
t = MyThread(
target=upload_star_index,
args=[result_dir, scratch_dir, star_genome_dir_name])
t.start()
return t
def make_bowtie2_index(host_name, fasta_file, result_dir, scratch_dir,
lazy_run):
if lazy_run:
output = os.path.join(result_dir, BOWTIE2_INDEX_OUT)
if os.path.isfile(output):
return 1
bowtie2_genome_dir_name = BOWTIE2_INDEX_OUT.split('.')[0]
bowtie2_command_params = [
'cd', scratch_dir, ';'
'mkdir', bowtie2_genome_dir_name, ';', 'cd', bowtie2_genome_dir_name,
';', BOWTIE2_BUILD, fasta_file, host_name
]
execute_command(" ".join(bowtie2_command_params))
write_to_log("finished making bowtie2 index")
# archive
execute_command("tar cvf %s/%s -C %s %s" %
(result_dir, BOWTIE2_INDEX_OUT, scratch_dir,
bowtie2_genome_dir_name))
# copy to S3
execute_command("aws s3 cp --quiet %s/%s %s/;" %
(result_dir, BOWTIE2_INDEX_OUT, OUTPUT_PATH_S3))
# cleanup
execute_command("cd %s; rm -rf *" % scratch_dir)
def make_indexes(version, lazy_run=False):
# Set up
input_fasta_name = os.path.basename(INPUT_FASTA_S3)
host_name = os.path.splitext(input_fasta_name)[0]
host_dir = os.path.join(DEST_DIR, host_name)
fasta_dir = os.path.join(host_dir, 'fastas')
result_dir = os.path.join(host_dir, 'results')
scratch_dir_star = os.path.join(host_dir, 'scratch_star')
scratch_dir_bowtie2 = os.path.join(host_dir, 'scratch_bowtie2')
execute_command("mkdir -p %s %s %s %s" % (host_dir, fasta_dir, result_dir,
scratch_dir_star))
execute_command("mkdir -p %s %s %s %s" % (host_dir, fasta_dir, result_dir,
scratch_dir_bowtie2))
input_gtf_local = None
write_to_log(INPUT_GTF_S3)
if INPUT_GTF_S3:
input_gtf_local, _version_number = download_ref_local_with_version_any_type(
INPUT_GTF_S3, fasta_dir, scratch_dir_star, auto_unzip=True)
input_fasta_local, version_number = download_ref_local_with_version_any_type(
INPUT_FASTA_S3, fasta_dir, scratch_dir_star, auto_unzip=True)
# unzip if necessary --- this is only necessary when the data did not come from S3, and should really
# not happen like this here --- should be a streaming unzip instead, like in the fetch function in common.py
if os.path.splitext(input_fasta_local)[1] == ".gz":
execute_command("gunzip -f %s" % input_fasta_local)
input_fasta_local = os.path.splitext(input_fasta_local)[0]
# handle lazy_run
if lazy_run:
# Download existing files and see what has been done
command = "aws s3 cp --quiet %s %s --recursive" % (OUTPUT_PATH_S3,
result_dir)
execute_command(command)
# make STAR index
star_upload_thread = make_star_index(input_fasta_local, input_gtf_local,
result_dir, scratch_dir_star,
lazy_run)
# make bowtie2 index
make_bowtie2_index(host_name, input_fasta_local, result_dir,
scratch_dir_bowtie2, lazy_run)
star_upload_thread.join()
assert not star_upload_thread.exception
# upload version tracker file
if not lazy_run:
upload_version_tracker(INPUT_FASTA_S3, HOST_NAME, version_number,
OUTPUT_PATH_S3, version)
|
<gh_stars>0
package com.qht.biz;
import com.qht.dto.MyCollectlistDto;
import org.apache.ibatis.annotations.Param;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.github.wxiaoqi.security.common.biz.BaseBiz;
import com.qht.entity.Collect;
import com.qht.mapper.CollectMapper;
import com.qht.model.AppInsertCollectParam;
import com.qht.model.MyIndexCourseCelcollectParam;
import com.qht.model.UidAndTenantIDParam;
import java.util.List;
/**
* 收藏表
*
* @author yangtonggan
* @email <EMAIL>
* @date 2018-11-05 18:55:41
*/
@Service
public class CollectBiz extends BaseBiz<CollectMapper,Collect> {
@Autowired
private CollectMapper collectMapper;
/**
* 我的收藏列表
* @param uid
* @param tid
* @return
*/
public List<MyCollectlistDto> myCollectlist(String uid, String tid){
return collectMapper.myCollectlist(uid,tid);
}
/**
* 学生端-个人中心--首页--课程详情--收藏
* @param param
* @return
*/
public Integer insertMyIndexCourseCelcollect(MyIndexCourseCelcollectParam param) {
return collectMapper.insertMyIndexCourseCelcollect(param);
}
/**
* 添加收藏
* @return
*/
public Integer appInsertCollect(AppInsertCollectParam param) {
return collectMapper.appInsertCollect(param);
}
/**
* 取消收藏
* @return
*/
public Integer appUpdateCOllect(UidAndTenantIDParam param) {
return collectMapper.appUpdateCOllect(param);
}
/**
* count根据学生id和课程包id
*/
public Integer selectCollectCount(String stuId,String pgkId){
return collectMapper.selectCollectCount(stuId,pgkId);
}
} |
package com.example.demo.config;
import lombok.Data;
import org.springframework.stereotype.Component;
@Data
@Component
public class MassageDto {
private String id;
private String massage;
private Long timeRequest;
private Long timeResponse;
}
|
<gh_stars>1-10
/*
* Copyright 2022 <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __RPIGPIOPP_LIB_TM1637_DEF_H__
#define __RPIGPIOPP_LIB_TM1637_DEF_H__
// TM1637 Comamnd mode
#define TM1637_CMD_DATA 0x40 // 0b0100 ----
#define TM1637_CMD_DISPLAY 0x80 // 0b1000 ----
#define TM1637_CMD_ADDR 0xC0 // 0b1100 ----
// TM1637_CMD_DATA mode
#define TM1637_DATA_WRITE 0x00 // 0b---- --00
#define TM1637_DATA_READ 0x02 // 0b---- --10
#define TM1637_DATA_AUTOINC 0x00 // 0b---- -0--
#define TM1637_DATA_FIXADDR 0x04 // 0b---- -1--
#define TM1637_DATA_NORMAL 0x00 // 0b---- 0---
#define TM1637_DATA_TEST 0x08 // 0b---- 1---
// TM1637_CMD_DISPLAY mode
#define TM1637_DISPLAY_OFF 0x00 // 0b---- 0---
#define TM1637_DISPLAY_ON 0x08 // 0b---- 1---
// TM1637_CMD_ADDR mode
#define TM1637_ADDR_C0H 0x00 // 0b---- 0000
#define TM1637_ADDR_C1H 0x01 // 0b---- 0001
#define TM1637_ADDR_C2H 0x02 // 0b---- 0010
#define TM1637_ADDR_C3H 0x03 // 0b---- 0011
#define TM1637_ADDR_C4H 0x04 // 0b---- 0100
#define TM1637_ADDR_C5H 0x05 // 0b---- 0101
#define TM1637_DBIT_COLON 0x80 // colon led on if given
#endif // __RPIGPIOPP_LIB_TM1637_DEF_H__
|
#!/usr/bin/env bash
##
## Copyright (c) 2017-19, Lawrence Livermore National Security, LLC.
##
## Produced at the Lawrence Livermore National Laboratory.
##
## LLNL-CODE-738930
##
## All rights reserved.
##
## This file is part of the RAJA Performance Suite.
##
## For details about use and distribution, please read RAJAPerf/LICENSE.
##
BUILD_SUFFIX=sycl
: ${BUILD_TYPE:=RelWithDebInfo}
RAJA_HOSTCONFIG=../host-configs/alcf-builds/sycl.cmake
rm -rf build_${BUILD_SUFFIX}_${USER} >/dev/null
mkdir build_${BUILD_SUFFIX}_${USER} && cd build_${BUILD_SUFFIX}_${USER}
## NOTE: RAJA tests are turned off due to compilation issues.
cmake \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-C ${RAJA_HOSTCONFIG} \
-DENABLE_OPENMP=Off \
-DENABLE_CUDA=Off \
-DENABLE_TARGET_OPENMP=Off \
-DENABLE_ALL_WARNINGS=Off \
-DENABLE_NO_LIBS=On \
-DENABLE_SYCL=On \
-DCMAKE_LINKER=clang++ \
-DENABLE_TESTS=Off \
-DENABLE_EXAMPLES=Off \
"$@" \
..
make -j 2
|
function isPrime(n) {
if(n<2) return false;
for(let i=2; i<n; i++) {
if(n%i===0) {
return false;
}
}
return true;
}
//Driver
let n = 7;
console.log(isPrime(n)); |
def animate(self, canvas: Canvas, y: int, x: int, frame: Frame, negative: bool) -> None:
if negative:
frame.display_negative(canvas, y, x)
else:
frame.display(canvas, y, x) |
<reponame>anticipasean/girakkafunc
package cyclops.container.immutable.impl;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import cyclops.function.companion.Comparators;
import cyclops.container.immutable.impl.base.RedBlackTree;
import cyclops.container.immutable.impl.base.redblacktree.Tree;
import org.junit.Test;
public class RBTTest {
<T> Tree<T, T> empty() {
return RedBlackTree.empty(Comparators.naturalOrderIdentityComparator());
}
<T> Tree<T, T> of(T... values) {
Tree<T, T> x = empty();
for (T next : values) {
x = RedBlackTree.rootIsBlack(x.plus(next,
next));
}
return x;
}
@Test
public void emptyProperties() {
assertTrue(empty().isBlack());
assertTrue(empty().isEmpty());
assertTrue(empty().size() == 0);
assertThat(empty().getOrElse(1,
-1),
equalTo(-1));
}
@Test
public void oneProperties() {
assertTrue(of(1).isBlack());
assertFalse(of(1).isEmpty());
assertTrue(of(1).size() == 1);
assertThat(of(1).getOrElse(1,
-1),
equalTo(1));
}
@Test
public void balanceCheckNew() {
Tree<Integer, Integer> tree = empty();
assertThat(tree.tree(),
equalTo("{LEAF}"));
assertThat(tree.size(),
equalTo(0));
tree = RedBlackTree.rootIsBlack(tree.plus(100,
100));
assertThat(tree.tree(),
equalTo("{BLACK:100}"));
assertThat(tree.size(),
equalTo(1));
tree = RedBlackTree.rootIsBlack(tree.plus(5,
5));
assertThat(tree.tree(),
equalTo("{BLACK:100 {RED:5}}"));
assertThat(tree.size(),
equalTo(2));
tree = RedBlackTree.rootIsBlack(tree.plus(-5,
-5));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5} {BLACK:100}}"));
assertThat(tree.size(),
equalTo(3));
tree = RedBlackTree.rootIsBlack(tree.plus(7,
7));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5} {BLACK:100 {RED:7}}}"));
assertThat(tree.size(),
equalTo(4));
tree = RedBlackTree.rootIsBlack(tree.plus(-7,
-7));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5 {RED:-7}} {BLACK:100 {RED:7}}}"));
assertThat(tree.size(),
equalTo(5));
tree = RedBlackTree.rootIsBlack(tree.plus(100,
100));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5 {RED:-7}} {BLACK:100 {RED:7}}}"));
assertThat(tree.size(),
equalTo(5));
tree = RedBlackTree.rootIsBlack(tree.plus(101,
101));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5 {RED:-7}} {BLACK:100 {RED:7} {RED:101}}}"));
assertThat(tree.size(),
equalTo(6));
tree = RedBlackTree.rootIsBlack(tree.plus(102,
102));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5 {RED:-7}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(7));
tree = RedBlackTree.rootIsBlack(tree.plus(-1,
-1));
assertThat(tree.tree(),
equalTo("{BLACK:5 {BLACK:-5 {RED:-7} {RED:-1}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(8));
tree = RedBlackTree.rootIsBlack(tree.plus(-2,
-2));
assertThat(tree.tree(),
equalTo("{BLACK:5 {RED:-2 {BLACK:-5 {RED:-7}} {BLACK:-1}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(9));
tree = RedBlackTree.rootIsBlack(tree.plus(-3,
-3));
assertThat(tree.tree(),
equalTo("{BLACK:5 {RED:-2 {BLACK:-5 {RED:-7} {RED:-3}} {BLACK:-1}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(10));
tree = RedBlackTree.rootIsBlack(tree.minus(-3));
assertThat(tree.tree(),
equalTo("{BLACK:5 {RED:-2 {BLACK:-5 {RED:-7}} {BLACK:-1}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(9));
tree = RedBlackTree.rootIsBlack(tree.minus(-3));
assertThat(tree.tree(),
equalTo("{BLACK:5 {RED:-2 {BLACK:-5 {RED:-7}} {BLACK:-1}} {RED:101 {BLACK:100 {RED:7}} {BLACK:102}}}"));
assertThat(tree.size(),
equalTo(9));
tree = RedBlackTree.rootIsBlack(tree.minus(102));
assertThat(tree.tree(),
equalTo("{BLACK:5 {RED:-2 {BLACK:-5 {RED:-7}} {BLACK:-1}} {RED:101 {BLACK:100 {RED:7}}}}"));
assertThat(tree.size(),
equalTo(8));
}
@Test
public void balanceCheckTo6() {
Tree<Integer, Integer> tree = empty();
assertThat(tree.tree(),
equalTo("{LEAF}"));
assertThat(tree.size(),
equalTo(0));
tree = RedBlackTree.rootIsBlack(tree.plus(0,
0));
assertThat(tree.tree(),
equalTo("{BLACK:0}"));
assertThat(tree.size(),
equalTo(1));
tree = RedBlackTree.rootIsBlack(tree.plus(1,
1));
assertThat(tree.tree(),
equalTo("{BLACK:0 {RED:1}}"));
assertThat(tree.size(),
equalTo(2));
tree = RedBlackTree.rootIsBlack(tree.plus(2,
2));
assertThat(tree.tree(),
equalTo("{BLACK:1 {BLACK:0} {BLACK:2}}"));
assertThat(tree.size(),
equalTo(3));
tree = RedBlackTree.rootIsBlack(tree.plus(3,
3));
assertThat(tree.tree(),
equalTo("{BLACK:1 {BLACK:0} {BLACK:2 {RED:3}}}"));
assertThat(tree.size(),
equalTo(4));
tree = RedBlackTree.rootIsBlack(tree.plus(4,
4));
assertThat(tree.tree(),
equalTo("{BLACK:1 {BLACK:0} {RED:3 {BLACK:2} {BLACK:4}}}"));
assertThat(tree.size(),
equalTo(5));
tree = RedBlackTree.rootIsBlack(tree.plus(5,
5));
assertThat(tree.tree(),
equalTo("{BLACK:1 {BLACK:0} {RED:3 {BLACK:2} {BLACK:4 {RED:5}}}}"));
assertThat(tree.size(),
equalTo(6));
tree = RedBlackTree.rootIsBlack(tree.plus(6,
6));
assertThat(tree.tree(),
equalTo("{BLACK:3 {BLACK:1 {BLACK:0} {BLACK:2}} {BLACK:5 {BLACK:4} {BLACK:6}}}"));
assertThat(tree.size(),
equalTo(7));
}
}
|
#!/usr/bin/env bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
set -euo pipefail
CUSTOM_REGISTRY_URL="http://localhost:4873"
NEW_VERSION="$(node -p "require('./packages/docusaurus/package.json').version").NEW"
CONTAINER_NAME="verdaccio"
EXTRA_OPTS=""
if getopts ":n" arg; then
EXTRA_OPTS="--use-npm"
fi
# Run Docker container with private npm registry Verdaccio
docker run -d --rm --name "$CONTAINER_NAME" -p 4873:4873 -v "$PWD/admin/verdaccio.yaml":/verdaccio/conf/config.yaml verdaccio/verdaccio:latest
# Build packages
yarn build:packages
# Publish the monorepo
npx --no-install lerna publish --exact --yes --no-verify-access --no-git-reset --no-git-tag-version --no-push --registry "$CUSTOM_REGISTRY_URL" "$NEW_VERSION"
# Revert version changes
git diff --name-only -- '*.json' | sed 's, ,\\&,g' | xargs git checkout --
# Build skeleton website with new version
npm_config_registry="$CUSTOM_REGISTRY_URL" npx @docusaurus/init@"$NEW_VERSION" init test-website classic $EXTRA_OPTS
# Stop Docker container
if [[ -z "${KEEP_CONTAINER:-}" ]] && ( $(docker container inspect "$CONTAINER_NAME" > /dev/null 2>&1) ); then
# Remove Docker container
docker container stop $CONTAINER_NAME > /dev/null
fi
echo "The website with to-be published packages was successfully build to the $(tput setaf 2)test-website$(tput sgr 0) directory."
|
package migrate
import (
"context"
"database/sql"
"encoding/base64"
"encoding/json"
"errors"
"reflect"
"github.com/go-gorp/gorp"
"github.com/ovh/cds/engine/api/application"
"github.com/ovh/cds/engine/api/database/gorpmapping"
"github.com/ovh/cds/engine/api/project"
"github.com/ovh/cds/engine/api/secret"
"github.com/ovh/cds/sdk"
"github.com/ovh/cds/sdk/log"
)
// RefactorApplicationCrypto .
func RefactorApplicationCrypto(ctx context.Context, db *gorp.DbMap) error {
query := "SELECT id FROM application WHERE sig IS NULL"
rows, err := db.Query(query)
if err == sql.ErrNoRows {
return nil
}
if err != nil {
return sdk.WithStack(err)
}
var ids []int64
for rows.Next() {
var id int64
if err := rows.Scan(&id); err != nil {
rows.Close() // nolint
return sdk.WithStack(err)
}
ids = append(ids, id)
}
if err := rows.Close(); err != nil {
return sdk.WithStack(err)
}
var mError = new(sdk.MultiError)
for _, id := range ids {
if err := refactorApplicationCrypto(ctx, db, id); err != nil {
mError.Append(err)
log.Error(ctx, "migrate.RefactorApplicationCrypto> unable to migrate application %d: %v", id, err)
}
}
if mError.IsEmpty() {
return nil
}
return mError
}
func refactorApplicationCrypto(ctx context.Context, db *gorp.DbMap, id int64) error {
log.Info(ctx, "migrate.refactorApplicationCrypto> application %d migration begin", id)
tx, err := db.Begin()
if err != nil {
return sdk.WithStack(err)
}
defer tx.Rollback() // nolint
// First part is application encryption and signature for vcs_strategy
query := "SELECT project_id, name, vcs_strategy FROM application WHERE id = $1 AND sig IS NULL FOR UPDATE SKIP LOCKED"
var projectID int64
var btes []byte
var name string
if err := tx.QueryRow(query, id).Scan(&projectID, &name, &btes); err != nil {
if err == sql.ErrNoRows {
return nil
}
return sdk.WrapError(err, "unable to select and lock application %d", id)
}
var vcsStrategy sdk.RepositoryStrategy
var clearPWD []byte
if len(btes) != 0 {
if err := json.Unmarshal(btes, &vcsStrategy); err != nil {
return sdk.WrapError(err, "unable to unmarshal application RepositoryStrategy %d", id)
}
encryptedPassword, err := base64.StdEncoding.DecodeString(vcsStrategy.Password)
if err != nil {
return sdk.WrapError(err, "unable to decode password for application %d", id)
}
clearPWD, err = secret.Decrypt([]byte(encryptedPassword))
if err != nil {
return sdk.WrapError(err, "Unable to decrypt password for application %d", id)
}
vcsStrategy.Password = string(clearPWD)
}
var tmpApp = sdk.Application{
ID: id,
Name: name,
ProjectID: projectID,
RepositoryStrategy: vcsStrategy,
}
// We are faking the DAO layer with updating only the name to perform updating of the encrypted columns and signature
var vcsStrategyColFilter = func(col *gorp.ColumnMap) bool {
return col.ColumnName == "name"
}
if err := application.UpdateColumns(tx, &tmpApp, vcsStrategyColFilter); err != nil {
return sdk.WrapError(err, "Unable to update application %d", id)
}
// No it is time to validate by loading from the DAO
app, err := application.LoadByIDWithClearVCSStrategyPassword(tx, id)
if err != nil {
return sdk.WrapError(err, "Unable to reload application %d", id)
}
if app.RepositoryStrategy.Password != string(clearPWD) {
return sdk.WrapError(errors.New("verification error"), "Application %d migration failure", id)
}
// Second part is application_deployment_strategy
deploymentStragegies, err := loadApplicationDeploymentStrategies(tx, id)
if err != nil {
return sdk.WrapError(err, "unable to load application_deployment_strategy for application %d", id)
}
proj, err := project.LoadByID(tx, projectID, project.LoadOptions.WithIntegrations)
if err != nil {
return sdk.WrapError(err, "unable to load project %d", projectID)
}
for pfName := range deploymentStragegies {
var pf *sdk.ProjectIntegration
for i := range proj.Integrations {
if proj.Integrations[i].Name == pfName {
pf = &proj.Integrations[i]
break
}
}
if err := application.SetDeploymentStrategy(tx, proj.ID, id, pf.Model.ID, pfName, deploymentStragegies[pfName]); err != nil {
return sdk.WrapError(err, "unable to set deployment strategy")
}
}
// Reload all the things to check all deployments strategies
app, err = application.LoadByID(tx, id, application.LoadOptions.WithClearDeploymentStrategies)
if err != nil {
return err
}
if !reflect.DeepEqual(deploymentStragegies, app.DeploymentStrategies) {
log.Debug("expected: %+v", deploymentStragegies)
log.Debug("actual: %+v", app.DeploymentStrategies)
return sdk.WrapError(err, "deployment strategies are not equals...")
}
log.Info(ctx, "migrate.refactorApplicationCrypto> application %d migration end", id)
if err := tx.Commit(); err != nil {
return sdk.WithStack(err)
}
return nil
}
// loadApplicationDeploymentStrategies loads the deployment strategies for an application
func loadApplicationDeploymentStrategies(db gorp.SqlExecutor, appID int64) (map[string]sdk.IntegrationConfig, error) {
query := `SELECT project_integration.name, application_deployment_strategy.config
FROM application_deployment_strategy
JOIN project_integration ON project_integration.id = application_deployment_strategy.project_integration_id
JOIN integration_model ON integration_model.id = project_integration.integration_model_id
WHERE application_deployment_strategy.application_id = $1`
res := []struct {
Name string `db:"name"`
Config sql.NullString `db:"config"`
}{}
if _, err := db.Select(&res, query, appID); err != nil {
return nil, sdk.WrapError(err, "unable to load deployment strategies")
}
deps := make(map[string]sdk.IntegrationConfig, len(res))
for _, r := range res {
cfg := sdk.IntegrationConfig{}
if err := gorpmapping.JSONNullString(r.Config, &cfg); err != nil {
return nil, sdk.WrapError(err, "unable to parse config")
}
//Parse the config and replace password values
newCfg := sdk.IntegrationConfig{}
for k, v := range cfg {
if v.Type == sdk.IntegrationConfigTypePassword {
s, err := base64.StdEncoding.DecodeString(v.Value)
if err != nil {
return nil, sdk.WrapError(err, "unable to decode encrypted value")
}
decryptedValue, err := secret.Decrypt(s)
if err != nil {
return nil, sdk.WrapError(err, "unable to decrypt secret value")
}
newCfg[k] = sdk.IntegrationConfigValue{
Type: sdk.IntegrationConfigTypePassword,
Value: string(decryptedValue),
}
} else {
newCfg[k] = v
}
}
deps[r.Name] = newCfg
}
return deps, nil
}
|
#! /bin/bash
STATUS=$(curl localhost:4444/wd/hub/status | ../../bin/json.js value.ready 2> /dev/null)
if [ "$STATUS" == "true" ];then
echo "ready"
else
echo "starting"
docker run -d -p 4444:4444 --shm-size=2g selenium/standalone-chrome:3.8.1-erbium
fi
|
package io.opensphere.controlpanels.layers.tagmanager;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.util.List;
import javax.imageio.ImageIO;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.DefaultListModel;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.border.TitledBorder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import io.opensphere.controlpanels.layers.tagmanager.TagManagerController.TagManagerControllerListener;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.image.IconUtil;
import io.opensphere.core.util.image.IconUtil.IconType;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
/**
* The Class ActiveGroupBookmarkPanel.
*/
public class TagManagerPanel extends JPanel implements TagManagerControllerListener
{
/** serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The add button. */
private JButton myAddButton;
/** The controller. */
private final transient TagManagerController myController;
/** The delete button. */
private JButton myDeleteButton;
/** The DGI. */
private final transient DataGroupInfo myDGI;
/** The DTI. */
private final transient DataTypeInfo myDTI;
/** The tags list. */
private JList<String> myTagList;
/** The tag scroll pane. */
private JScrollPane myTagScrollPane;
/** The tags panel. */
private Box myTagsPanel;
/** The toolbox. */
private final transient Toolbox myToolbox;
/**
* Instantiates a new tag manager for a {@link DataGroupInfo}.
*
* @param tb the {@link Toolbox}
* @param dgi the {@link DataGroupInfo}
*/
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public TagManagerPanel(Toolbox tb, DataGroupInfo dgi)
{
this(tb, dgi, null);
}
/**
* Instantiates a new active layer book mark panel.
*
* @param tb the {@link Toolbox}
* @param dgi the dgi
* @param dti the dti
*/
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public TagManagerPanel(Toolbox tb, DataGroupInfo dgi, DataTypeInfo dti)
{
super();
myToolbox = tb;
myDTI = dti;
myDGI = dgi;
myController = dgi != null ? new TagManagerController(myToolbox, dgi) : new TagManagerController(tb, dti);
setBorder(BorderFactory.createEmptyBorder(10, 5, 5, 5));
myController.addListener(this);
setLayout(new BorderLayout());
Box labelBox = Box.createVerticalBox();
JLabel name = new JLabel(getTagItemName());
name.setFont(name.getFont().deriveFont(Font.BOLD, name.getFont().getSize() + 2));
Box nameBox = Box.createHorizontalBox();
nameBox.add(Box.createHorizontalGlue());
nameBox.add(name);
nameBox.add(Box.createHorizontalGlue());
labelBox.add(nameBox);
labelBox.add(Box.createVerticalStrut(5));
Box topBox = Box.createHorizontalBox();
topBox.add(getTagsPanel());
add(labelBox, BorderLayout.NORTH);
add(topBox, BorderLayout.CENTER);
tagsChanged();
getDeleteTagButton().setEnabled(false);
setPreferredSize(new Dimension(350, 300));
setMinimumSize(new Dimension(350, 300));
}
/**
* Instantiates a new tag manager for a {@link DataTypeInfo}.
*
* @param tb the {@link Toolbox}
* @param dti the {@link DataTypeInfo}
*/
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public TagManagerPanel(Toolbox tb, DataTypeInfo dti)
{
this(tb, null, dti);
}
/**
* Gets the adds the book mark button.
*
* @return the adds the book mark button
*/
public JButton getAddBookmarkButton()
{
if (myAddButton == null)
{
myAddButton = new JButton();
IconUtil.setIcons(myAddButton, IconType.PLUS, Color.GREEN);
myAddButton.setToolTipText("Create a new tag.");
myAddButton.setMargin(new Insets(3, 3, 3, 3));
myAddButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
if (myDGI != null)
{
TagUtility.showTagDataGroupDialog(TagManagerPanel.this, myDGI, myController);
}
else
{
TagUtility.showTagDataTypeDialog(TagManagerPanel.this, myDTI, myController);
}
}
});
}
return myAddButton;
}
/**
* Gets the book mark scroll pane.
*
* @return the book mark scroll pane
*/
public JScrollPane getBookmarkScrollPane()
{
if (myTagScrollPane == null)
{
myTagScrollPane = new JScrollPane(getTagList());
}
return myTagScrollPane;
}
/**
* Gets the delete book mark button.
*
* @return the delete book mark button
*/
public JButton getDeleteTagButton()
{
if (myDeleteButton == null)
{
myDeleteButton = new JButton();
myDeleteButton.setToolTipText("Delete the selected tags(s)");
try
{
ImageIcon normal = new ImageIcon(ImageIO.read(TagManagerPanel.class.getResource("/images/minus_big.png")));
ImageIcon over = new ImageIcon(ImageIO.read(TagManagerPanel.class.getResource("/images/minus_big_over.png")));
ImageIcon press = new ImageIcon(ImageIO.read(TagManagerPanel.class.getResource("/images/minus_big_press.png")));
myDeleteButton.setIcon(normal);
myDeleteButton.setRolloverIcon(over);
myDeleteButton.setPressedIcon(press);
myDeleteButton.setMargin(new Insets(3, 3, 2, 3));
}
catch (IOException e)
{
myDeleteButton.setText("Delete");
}
myDeleteButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
List<String> values = getTagList().getSelectedValuesList();
for (String tagName : values)
{
myController.deleteTag(tagName);
}
}
});
}
return myDeleteButton;
}
/**
* Gets the book mark list.
*
* @return the book mark list
*/
public JList<String> getTagList()
{
if (myTagList == null)
{
myTagList = new JList<>();
myTagList.getSelectionModel().addListSelectionListener(new ListSelectionListener()
{
@Override
public void valueChanged(ListSelectionEvent e)
{
if (!e.getValueIsAdjusting())
{
int[] selectedIndexes = myTagList.getSelectedIndices();
int count = selectedIndexes.length;
getDeleteTagButton().setEnabled(count >= 1);
}
}
});
}
return myTagList;
}
/**
* Gets the tags panel.
*
* @return the tags panel
*/
public Box getTagsPanel()
{
if (myTagsPanel == null)
{
myTagsPanel = Box.createVerticalBox();
myTagsPanel.setBorder(new TitledBorder("Tags"));
myTagsPanel.setMinimumSize(new Dimension(0, 250));
myTagsPanel.setPreferredSize(new Dimension(0, 250));
myTagsPanel.add(Box.createVerticalStrut(3));
myTagsPanel.add(getBookmarkScrollPane());
myTagsPanel.add(Box.createVerticalStrut(3));
Box buttonBox = Box.createHorizontalBox();
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getAddBookmarkButton());
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getDeleteTagButton());
buttonBox.add(Box.createHorizontalGlue());
myTagsPanel.add(buttonBox);
}
return myTagsPanel;
}
@Override
public void tagsChanged()
{
EventQueueUtilities.runOnEDT(new Runnable()
{
@Override
public void run()
{
Object lastSelectedBookmark = null;
List<String> values = getTagList().getSelectedValuesList();
if (values.size() == 1)
{
lastSelectedBookmark = values.get(0);
}
List<String> setNames = myController.getTags();
DefaultListModel<String> model = new DefaultListModel<>();
if (setNames != null && !setNames.isEmpty())
{
model.ensureCapacity(setNames.size());
for (String name : setNames)
{
model.addElement(name);
}
}
getTagList().setModel(model);
if (lastSelectedBookmark != null)
{
getTagList().setSelectedValue(lastSelectedBookmark, true);
}
}
});
}
/**
* Gets the tag item name.
*
* @return the tag item name
*/
private String getTagItemName()
{
if (myDGI != null)
{
return myDGI.getDisplayName();
}
else if (myDTI != null)
{
return myDTI.getDisplayName();
}
else
{
return "UNKNOWN";
}
}
}
|
<reponame>pinnackl/paper-paypal-component<filename>app/server.js<gh_stars>1-10
var paypal = require('../modules/paypal/index');
var express = require('express');
var bodyParser = require('body-parser');
var app = express();
var http = require('http').Server(app);
app.use(bodyParser());
app.use("/src", express.static(__dirname + '/src'));
app.use("/css", express.static(__dirname + '/css'));
app.use("/js", express.static(__dirname + '/js'));
app.use("/images", express.static(__dirname + '/images'));
app.use("/bower_components", express.static(__dirname + '/bower_components'));
app.get('/', function(req, res) {
res.sendFile(__dirname + '/index.html');
paypal.init(app, __dirname);
});
http.listen(8001, function() {
console.log('listening on *:8001');
}); |
import numpy as np
data = [[1, 2, 3], [4, 5, 6, 7], [8, 9, 10], [11, 12, 13]]
time_steps = 3
names = ["Profile1", "Profile2", "Profile3", "Profile4"]
profiles = []
totals = []
averages = []
counter = 0
for profile in data:
if len(profile) == time_steps:
profiles.append(profile)
totals.append(np.sum(profile))
averages.append(np.mean(profile))
print("Writing: ", names[counter] + ".csv")
counter += 1
print("Filtered Profiles:", profiles)
print("Totals:", totals)
print("Averages:", averages) |
package io.github.rcarlosdasilva.weixin.model.response.menu.bean.complate;
import java.util.List;
import com.google.gson.annotations.SerializedName;
public class MediaCollection {
@SerializedName("list")
private List<Media> media;
/**
* 多媒体列表.
*
* @return list of {@link Media}
*/
public List<Media> getMedia() {
return media;
}
}
|
import pandas as pd
# read in the data
data = pd.read_csv('customer_data.csv')
# define the features and targets
X = data.drop(['likelihood_of_return'], axis=1)
y = data[['likelihood_of_return']]
# split data into training and test sets
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# train the model
from sklearn.ensemble import RandomForestClassifier
clf = RandomForestClassifier(n_estimators=100, random_state=42)
clf.fit(X_train, y_train)
# make predictions on the test set
y_pred = clf.predict(X_test) |
const { get } = require("../request");
const cheerio = require("cheerio");
// Path matching
test("Html file in root folder", () => {
// expect.assertions(1);
return get("/heading").then(data => {
const $ = cheerio.load(data);
expect(
$("h1")
.text()
.trim()
).toBe("title");
});
});
test("Txt file in root folder", () => {
// expect.assertions(1);
return get("/text.txt").then(data => {
expect(data).toBe("text content");
});
});
test("Hidden files are hidden", () => {
// expect.assertions(1);
return get("/_hidden/config.txt").then(data => {
expect(data && data.indexOf("secret") === -1).toBe(true);
});
});
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { QualityRoutingModule } from './quality-routing.module';
import { AddEditQualityComponent } from './add-edit-quality/add-edit-quality.component';
import { QualityComponent } from './quality.component';
import { FormsModule } from '@angular/forms';
import { Ng2SmartTableModule } from 'ng2-smart-table';
import { AgGridModule } from 'ag-grid-angular';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { ViewQualityComponent, MyLinkRendererComponent } from './view-quality/view-quality.component';
import { NgxDatatableModule } from '@swimlane/ngx-datatable';
import { NgSelectModule } from '@ng-select/ng-select';
import { ThemeModule } from '../../@theme/theme.module';
@NgModule({
imports: [
CommonModule,
FormsModule,
AgGridModule,
NgxDatatableModule,
NgSelectModule,
ThemeModule,
QualityRoutingModule,
NgbModule,
Ng2SmartTableModule
],
declarations: [AddEditQualityComponent, QualityComponent,MyLinkRendererComponent, ViewQualityComponent],
entryComponents: [MyLinkRendererComponent]
})
export class QualityModule { }
|
<reponame>day20180721/Guli
package com.littlejenny.gulimall.product.app;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.littlejenny.common.validgroup.AddGroup;
import com.littlejenny.common.validgroup.UpdateGroup;
import com.littlejenny.common.validgroup.UpdateStatusGroup;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import com.littlejenny.gulimall.product.entity.BrandEntity;
import com.littlejenny.gulimall.product.service.BrandService;
import com.littlejenny.common.utils.PageUtils;
import com.littlejenny.common.utils.R;
import javax.validation.Valid;
/**
* 品牌
*
* @author littlejenny
* @email <EMAIL>
* @date 2021-07-16 16:20:50
*/
@RestController
@RequestMapping("product/brand")
public class BrandController {
/*
開啟分頁Mybatis插件
啟用模糊查詢
*/
@Autowired
private BrandService brandService;
/**
* 列表
*/
@RequestMapping("/list")
public R list(@RequestParam Map<String, Object> params){
PageUtils page = brandService.queryPage(params);
return R.ok().put("page", page);
}
@GetMapping("/infos")
public R infos(@RequestParam List<Long> brandIds){
List<BrandEntity> brand = brandService.getbyIds(brandIds);
return R.ok().put("brand", brand);
}
/**
* 信息
*/
@RequestMapping("/info/{brandId}")
public R info(@PathVariable("brandId") Long brandId){
BrandEntity brand = brandService.getById(brandId);
return R.ok().put("brand", brand);
}
/**
* 方法一
* 要驗證的Bean後面緊跟result,可以取得結果,如果不加會直接報一大頁的400錯誤,加了我們能夠用我
* 們原本的方法
* 方法二
* 使用統一處理異常類來處理驗證異常,處理異常類位於exception.GulimallExceptionControllerAdvice
*/
@RequestMapping("/save")
public R save(@Validated(value={AddGroup.class}) @Valid @RequestBody BrandEntity brand/*, BindingResult result*/){
// if(result.hasErrors()){
// Map errorField = new HashMap();
// result.getFieldErrors().forEach((item)->{
// //獲取到錯誤的提示
// String message = item.getDefaultMessage();
// //獲取到錯誤的屬性名稱
// String field = item.getField();
// errorField.put(field,message);
// });
// return R.error(400,"提交的數據不合法").put("data",errorField);
// }else {
//
// }
brandService.save(brand);
return R.ok();
}
/**
* 修改
*/
@RequestMapping("/update")
public R update(@Validated(value={UpdateGroup.class}) @Valid @RequestBody BrandEntity brand){
//如果只有傳遞ID進來,會因為UPDATE pms_brand WHERE brand_id=?造成語法錯誤,因為沒有set的值
//update時必須同時維護categoryBrandRelation的名稱
// brandService.updateById(brand);
brandService.updateDetailByID(brand);
return R.ok();
}
@RequestMapping("/update/status")
public R updateStatus(@Validated(value={UpdateStatusGroup.class}) @Valid @RequestBody BrandEntity brand){
brandService.updateById(brand);
return R.ok();
}
/**
* 删除
*/
@RequestMapping("/delete")
public R delete(@RequestBody Long[] brandIds){
brandService.removeByIds(Arrays.asList(brandIds));
return R.ok();
}
}
|
/*!
* Clustery.js 基于Clusterize.js修改而来
* Clusterize.js 基于DOM, 参数rows传入列表数组或者库自行根据已有的DOM结构解析
* Clustery.js 基于数据, 参数rows必须传入数组, 不再是操作DOM结构, 而是返回
* 操作列表的数据
*
* @author darkzone
*/
/*! Clusterize.js - v0.16.1 - 2016-08-16
* http://NeXTs.github.com/Clusterize.js/
* Copyright (c) 2015 <NAME>; Licensed GPLv3 */
;(function(name, definition) {
if (typeof module != 'undefined') module.exports = definition();
else if (typeof define == 'function' && typeof define.amd == 'object') define(definition);
else this[name] = definition();
}('Clustery', function() {
"use strict"
var is_mac = navigator.platform.toLowerCase().indexOf('mac') + 1;
var Clustery = function(data) {
if( ! (this instanceof Clustery))
return new Clustery(data);
var self = this;
var defaults = {
item_height: 0, // 每一个item的高度
rows_in_block: 0, // 每一个block包含的rows
block_height: 0, // 每一个block的高度 item_height * rows_in_block
blocks_in_cluster: 4, // 每个cluster包含的blocks的个数
rows_in_cluster: 0, // 每一个cluster包含的rows blocks_in_cluster * rows_in_block
cluster_height: 0, // 每一个cluster的高度 block_height * blocks_in_cluster
scroll_top: 0
}
if(! isArray(data.rows))
throw new Error("Error! rows is not an Array.");
if(!data.item_height || data.item_height <= 0) {
throw new Error("Error! item_height is invalid.");
}
self.options = {};
var options = ['item_height', 'rows_in_block', 'blocks_in_cluster', 'callbacks'];
for(var i = 0, option; option = options[i]; i++) {
self.options[option] = typeof data[option] != 'undefined' && data[option] != null
? data[option]
: defaults[option];
}
var elems = ['scroll', 'content'];
for(var i = 0, elem; elem = elems[i]; i++) {
self[elem + '_elem'] = data[elem + 'Id']
? document.getElementById(data[elem + 'Id'])
: data[elem + 'Elem'];
if( ! self[elem + '_elem'])
throw new Error("Error! Could not find " + elem + " element");
}
// tabindex forces the browser to keep focus on the scrolling list, fixes #11
if( ! self.content_elem.hasAttribute('tabindex'))
self.content_elem.setAttribute('tabindex', 0);
var rows = data.rows,
cache = {start: 0, end: 0, bottom: 0},
scroll_top = self.scroll_elem.scrollTop;
// get row height
self.exploreEnvironment(rows);
// append initial data
self.notifyData(rows, cache);
// restore the scroll position
self.scroll_elem.scrollTop = scroll_top;
// adding scroll handler
var last_cluster = false,
scroll_debounce = 0,
pointer_events_set = false,
scrollEv = function() {
// fixes scrolling issue on Mac #3
if (is_mac) {
if( ! pointer_events_set) self.content_elem.style.pointerEvents = 'none';
pointer_events_set = true;
clearTimeout(scroll_debounce);
scroll_debounce = setTimeout(function () {
self.content_elem.style.pointerEvents = 'auto';
pointer_events_set = false;
}, 50);
}
if (last_cluster != (last_cluster = self.getClusterNum()))
self.notifyData(rows, cache);
},
resize_debounce = 0,
resizeEv = function() {
clearTimeout(resize_debounce);
resize_debounce = setTimeout(self.refresh, 100);
}
on('scroll', self.scroll_elem, scrollEv);
on('resize', window, resizeEv);
// public methods
self.update = function(new_rows) {
rows = isArray(new_rows)
? new_rows
: [];
var scroll_top = self.scroll_elem.scrollTop;
// fixes #39
if(rows.length * self.options.item_height < scroll_top) {
self.scroll_elem.scrollTop = 0;
last_cluster = 0;
}
self.notifyData(rows, cache);
self.scroll_elem.scrollTop = scroll_top;
}
// if clean then clear all data
// or return all list
self.destroy = function(clean) {
off('scroll', self.scroll_elem, scrollEv);
off('resize', window, resizeEv);
if(!clean) {
var callbacks = this.options.callbacks;
callbacks.shouldUpdate({
top_offset: 0,
bottom_offset: 0,
rows_above: 0,
start: 0,
end: rows.length
});
}
}
self.refresh = function(item_height) {
self.options.item_height = item_height;
self.getRowsHeight(rows) && self.update(rows);
}
}
Clustery.prototype = {
constructor: Clustery,
// calc cluster height
exploreEnvironment: function(rows) {
this.getRowsHeight(rows);
},
getRowsHeight: function(rows) {
var opts = this.options;
opts.cluster_height = 0;
if( ! rows.length) return;
if(!opts.rows_in_block) {
opts.rows_in_block = Math.ceil(this.scroll_elem.offsetHeight / opts.item_height);
}
if(!opts.rows_in_block) {
opts.rows_in_block = 20;
}
opts.block_height = opts.item_height * opts.rows_in_block;
opts.rows_in_cluster = opts.blocks_in_cluster * opts.rows_in_block;
opts.cluster_height = opts.blocks_in_cluster * opts.block_height;
return true
},
// get current cluster number
getClusterNum: function () {
this.options.scroll_top = this.scroll_elem.scrollTop;
return Math.floor(this.options.scroll_top / (this.options.cluster_height - this.options.block_height)) || 0;
},
// generate cluster for current scroll position
generate: function (rows, cluster_num) {
var opts = this.options,
rows_len = rows.length;
if (rows_len < opts.rows_in_block) {
return {
top_offset: 0,
bottom_offset: 0,
start: 0,
end: rows_len
}
}
if( ! opts.cluster_height) {
this.exploreEnvironment(rows);
}
var items_start = Math.max((opts.rows_in_cluster - opts.rows_in_block) * cluster_num, 0),
items_end = items_start + opts.rows_in_cluster,
top_offset = Math.max(items_start * opts.item_height, 0),
bottom_offset = Math.max((rows_len - items_end) * opts.item_height, 0);
return {
top_offset: top_offset,
bottom_offset: bottom_offset,
start: items_start,
end: items_end
}
},
// if necessary verify data changed and notify to user
notifyData: function(rows, cache) {
var data = this.generate(rows, this.getClusterNum()),
this_cluster_start_changed = this.checkChanges('data', data.start, cache),
this_cluster_end_changed = this.checkChanges('data', data.end, cache),
only_bottom_offset_changed = this.checkChanges('bottom', data.bottom_offset, cache),
callbacks = this.options.callbacks;
if(this_cluster_start_changed || this_cluster_end_changed) {
callbacks.shouldUpdate(data);
} else if(only_bottom_offset_changed) {
callbacks.shouldUpdate(data.bottom_offset)
}
},
checkChanges: function(type, value, cache) {
var changed = value != cache[type];
cache[type] = value;
return changed;
}
}
// support functions
function on(evt, element, fnc) {
return element.addEventListener ? element.addEventListener(evt, fnc, false) : element.attachEvent("on" + evt, fnc);
}
function off(evt, element, fnc) {
return element.removeEventListener ? element.removeEventListener(evt, fnc, false) : element.detachEvent("on" + evt, fnc);
}
function isArray(arr) {
return Object.prototype.toString.call(arr) === '[object Array]';
}
function getStyle(prop, elem) {
return window.getComputedStyle ? window.getComputedStyle(elem)[prop] : elem.currentStyle[prop];
}
return Clustery
})) |
'use strict';
describe('commons-filters-spec:', function () {
//prepare module for testing
beforeEach(angular.mock.module('users.commons.filters'));
beforeEach(angular.mock.module('commons.labels.filters'));
beforeEach(angular.mock.module('pascalprecht.translate'));
describe('userInfo-spec:', function () {
it('should get generic info about user', inject(function ($filter) {
//given sample user
var user = {
id: 'user-123',
first_name: 'Zbigniew',
last_name: 'Religa',
email: '<EMAIL>',
role: 'doctor',
degree: 'prof'
};
//when getting info
var info = $filter('userInfo')(user);
//then proper value is returned
expect(info).toEqual('Religa, Zbigniew');
}));
it('should get generic info about user with e-mail', inject(function ($filter) {
//given sample user
var user = {
id: 'user-123',
first_name: 'Zbigniew',
last_name: 'Religa',
email: '<EMAIL>',
role: 'doctor',
degree: 'prof'
};
//when getting info
var info = $filter('userInfo')(user, undefined, true);
//then proper value is returned
expect(info).toEqual('<NAME> (<EMAIL>)');
}));
it('should get detail info about user', inject(function ($filter) {
//given sample user
var user = {
id: 'user-123',
first_name: 'Zbigniew',
last_name: 'Religa',
email: '<EMAIL>',
role: 'doctor',
degree: 'prof'
};
//when getting info
var info = $filter('userInfo')(user, true);
//then proper value is returned
expect(info).toEqual('prof Religa, Zbigniew');
}));
});
describe('addressInfo-spec:', function () {
it('should get location info', inject(function ($filter) {
//given sample address
var address = {
street: 'ul. Grabiszynska 8',
city: 'Wroclaw',
country: 'Polska'
};
//when getting info about location
var info = $filter('addressInfo')(address);
//then proper value is returned
expect(info).toEqual('ul. Grabiszynska 8, Wroclaw, Polska');
}));
});
describe('dateFormat-spec:', function () {
it('should get default date', inject(function ($filter) {
//given sample address
var date = Date.today().set(
{
year: 2015,
month: 0,
day: 5,
hour: 8,
minute: 30,
second: 0
}
);
//when formatting date
var dateString = $filter('dateFormat')(date);
//then proper value is returned
expect(dateString).toEqual('05-01-2015');
}));
it('should get date and time', inject(function ($filter) {
//given sample address
var date = Date.today().set(
{
year: 2015,
month: 0,
day: 5,
hour: 8,
minute: 30,
second: 0
}
);
//when formatting date
var dateString = $filter('dateFormat')(date, 'DD-MM-YYYY HH:mm');
//then proper value is returned
expect(dateString).toEqual('05-01-2015 08:30');
}));
it('should get name of the week day', inject(function ($filter) {
//given sample address
var date = Date.today().set(
{
year: 2015,
month: 0,
day: 5,
hour: 8,
minute: 30,
second: 0
}
);
//when formatting date
var dateString = $filter('dateFormat')(date, 'dddd');
//then proper value is returned
expect(dateString).not.toBe('');
}));
});
describe('normalizeText-spec:', function () {
it('should normalize string', inject(function ($filter) {
//given sample text
var text = 'master of the puppets';
//when normalizing text
var normalized = $filter('normalizeText')(text);
//then proper value is returned
expect(normalized).toBe('Master of the puppets');
}));
});
describe('toLocalHours-spec:', function () {
it('should normalize working hours', inject(function ($filter) {
//given sample hours
var text = '09:00';
//when normalizing hours
var normalized = $filter('toLocalHours')(text);
//then proper value is returned
expect(normalized).toBe('09:00');
}));
});
}); |
package com.jinke.calligraphy.database;
import hallelujah.cal.CalligraphyVectorUtil;
import hallelujah.cal.SingleWord;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.net.Uri;
import android.util.Log;
import com.jinke.calligraphy.app.branch.EditableCalligraphyItem;
import com.jinke.calligraphy.app.branch.EditableCalligraphyItem.Types;
import com.jinke.calligraphy.app.branch.ImageLimit;
import com.jinke.calligraphy.app.branch.Start;
import com.jinke.calligraphy.app.branch.VEditableCalligraphyItem;
import com.jinke.mindmap.MindMapItem;
import com.jinke.single.BitmapCount;
import com.jinke.single.LogUtil;
public class CalligraphyDB {
public static int INIT_WORD_COUNT = 270;
public static final int OP_ADD_WORD = 1;
public static final int OP_ADD_MIND_WORD = 2;
public static final int OP_DELETE_WORD = -1;
private static final int DATEBASE_VERSION = 2;
private static final String DATABASE_NAME = "calligraphy.db";
private static final String WORD_TABLE = "word";
private static final String PAGE_TABLE = "page";
private static final String CREATE_TABLE_WORD = "create table if not exists word " +
"(_id integer primary key autoincrement, "
+ "template_id integer, "
+ "pagenum integer, "
+ "available_id integer, "
+ "itemid integer, "
+ "op_type integer, "
+ "op_pos integer, "
+ "charType text, "
+ "charBitmap blob, "
+ "matrix text, "
+ "uri text, "
+ "uploaded integer, "
+ "created text);";
private static final String CREATE_TABLE_PAGE = "create table if not exists page " +
"(id integer primary key autoincrement, "
+ "pageid text,"
+ "version integer,"
+ "dirty boolean,"
+ "direct integer, "
+ "pagenum integer, "
+ "path text,"
+ "created text);";
//add option type 这种方式处理数据库更新时的操作:添加字段;
private static final String ADD_FLIPDST = "alter table word add flipdstx integer;";
//添加思维导图,增加导图id字段和父节点id字段
private static final String ADD_MINDID = "alter table word add mindid integer;";
private static final String ADD_MINDPARENTID = "alter table word add mindparentid integer;";
private SQLiteDatabase db;
private static CalligraphyDB calligraphyDB;
private CalligraphyDB(Context ctx){
db = ctx.openOrCreateDatabase(DATABASE_NAME, 0, null);
db.execSQL(CREATE_TABLE_WORD);
db.execSQL(CREATE_TABLE_PAGE);
//用以下方法处理数据库版本变更时的操作
Log.e("databases", "version:" + db.getVersion());
try {
if(db.getVersion() < 1){
db.execSQL(ADD_FLIPDST);
db.execSQL(ADD_MINDID);
db.execSQL(ADD_MINDPARENTID);
}else if (db.getVersion() == 1){
db.execSQL(ADD_FLIPDST);
db.execSQL(ADD_MINDID);
db.execSQL(ADD_MINDPARENTID);
}
} catch (Exception e) {
// TODO: handle exception
}
}
public void resetDB(){
db.close();
calligraphyDB = null;
}
public static CalligraphyDB getInstance(Context ctx){
if(calligraphyDB == null)
calligraphyDB = new CalligraphyDB(ctx);
return calligraphyDB;
}
private void initInitWordCount(){
Matrix matrix;
if(Start.c == null)
matrix = Start.m;
else
matrix = Start.c.view.getMMMatrix();
float[] values = new float[9];
matrix.getValues(values);
float scale = values[0];
if(scale > 1 && scale < 1.3)
INIT_WORD_COUNT = 200;
else if(scale >= 1.3 && scale <= 1.6)
INIT_WORD_COUNT = 120;
else if(scale > 1.6 && scale < 2.0)
INIT_WORD_COUNT = 60;
else if(scale > 2.5 && scale >= 2.0)
INIT_WORD_COUNT = 40;
else if(scale > 2.5)
INIT_WORD_COUNT = 25;
else if(scale <= 1)
INIT_WORD_COUNT = 275;
}
public boolean saveOperating(int OP_type,int OP_Pos,int template_id,int pagenum,int available_id,EditableCalligraphyItem eItem){
int itemid = eItem.getItemID();
//一旦有一个没有存,后面的全部存储
SingleWord word = eItem.getWord();
Log.v("vectorword", "saveOperating word:" + (word == null));
switch (OP_type) {
case OP_ADD_MIND_WORD:
case OP_ADD_WORD:
if(word != null){
CalligraphyVectorUtil.saveWordToFile(word, pagenum, available_id, itemid);
word.recycle();
}else
Log.e("vector", "null");
break;
case OP_DELETE_WORD:
break;
default:
break;
}
byte[] bitmapChars = null;
Bitmap charBitmap = eItem.getCharBitmap();
float flipdstx = 0;
String uri = "";
int charType = eItem.getCharType();
if(charType == 7){
//视频音频未传输完成之前不使用uri
uri = eItem.getImageUri().toString();
flipdstx = eItem.getFlipDstX();
}else{
uri = "";
}
String matrix = eItem.getMatrix().toString();
ContentValues initalValues = new ContentValues();
initalValues.put("itemid", itemid);
initalValues.put("op_type", OP_type);
initalValues.put("op_pos", OP_Pos);
initalValues.put("template_id", template_id);
initalValues.put("pagenum", pagenum);
initalValues.put("available_id", available_id);
initalValues.put("charType", charType);
initalValues.put("matrix", matrix);
initalValues.put("uri", uri);
initalValues.put("flipdstx", flipdstx);
if(OP_type == OP_ADD_MIND_WORD){
initalValues.put("mindid", eItem.getMindMapItem().getMindID());
LogUtil.getInstance().e("mindmap", "save mindID:" + eItem.getMindMapItem().getMindID());
initalValues.put("mindparentid", eItem.getMindMapItem().getParentID());
}
try {
if(EditableCalligraphyItem.getType(eItem.getCharType()) == Types.CharsWithStroke
// || eItem.type == Types.AUDIO
// || eItem.type == Types.VEDIO
){
}else{
bitmapChars = BitmapHelper.bitmapDecode(charBitmap);
initalValues.put("charBitmap",
bitmapChars);
}
} catch (OutOfMemoryError ex) {
// TODO Auto-generated catch block
ex.printStackTrace();
} catch (IOException e) {
}
bitmapChars = null;
long result = db.insert(WORD_TABLE, null, initalValues);
return !(result == -1);
}
public LinkedList<EditableCalligraphyItem> getCharList(int available_id,boolean zoomable,List mindList){
initInitWordCount();
LinkedList<EditableCalligraphyItem> charList = new LinkedList<EditableCalligraphyItem>();
EditableCalligraphyItem item = null;
Cursor cursor = db.query(
WORD_TABLE
,new String[] { "template_id","itemid","op_type","op_pos","charType"
,"matrix","uri","charBitmap","flipdstx","mindid","mindparentid"}
, "pagenum = ? and available_id = ?"
, new String[]{Start.getPageNum()+"",available_id + ""}, null, null, null);
int template_id = 0;
int itemid = 0;
int op_type = 0;
int op_pos = 0;
int charType = 0;
float flipdstx = 0;
String matrix = "";
String uri = "";
byte[] attr = null;
int pagenum = Start.getPageNum();
Log.v("startinit", "------available_id:" + available_id + " size:" + cursor.getCount()
+ " pagenum:" + pagenum);
int count = 0;
int imageCount = 0;
MindMapItem mapItem = null;
int preMindItemid = -1;
int mindid = 0;
int mindparentid = 0;
HashMap<Integer, MindMapItem> mindMapHash = new HashMap<Integer, MindMapItem>();
for(cursor.moveToFirst();!cursor.isAfterLast();cursor.moveToNext()){
template_id = cursor.getInt(cursor.getColumnIndex("template_id"));
itemid = cursor.getInt(cursor.getColumnIndex("itemid"));
op_type = cursor.getInt(cursor.getColumnIndex("op_type"));
op_pos = cursor.getInt(cursor.getColumnIndex("op_pos"));
charType = cursor.getInt(cursor.getColumnIndex("charType"));
matrix = cursor.getString(cursor.getColumnIndex("matrix"));
uri = cursor.getString(cursor.getColumnIndex("uri"));
flipdstx = cursor.getFloat(cursor.getColumnIndex("flipdstx"));
attr = cursor.getBlob(cursor.getColumnIndex("charBitmap"));
Log.v("getCharList", "template_id:" + template_id
+"itemid:" + itemid
+"op_type:" + op_type
+"op_pos:" + op_pos
+"charType:" + charType
+"matrix:" + matrix
+"uri:" + uri
+"attr:" + (attr == null));
Types type = EditableCalligraphyItem.getType(charType);
//生成不同的 EditableableItem
if(type == Types.CharsWithStroke){
item = CalligraphyVectorUtil.instance().getEditableCalligraphyItem(
pagenum, available_id, itemid, matrix, zoomable);
}else if(type == Types.CharsWithoutStroke){
attr = cursor.getBlob(cursor.getColumnIndex("charBitmap"));
Bitmap picBitmap = null;
if(attr != null){
try{
picBitmap = BitmapFactory.decodeByteArray(attr, 0,
attr.length);
} catch (OutOfMemoryError e) {
// TODO: handle exception
picBitmap = Start.OOM_BITMAP;
}
}
item = new EditableCalligraphyItem(picBitmap);
item.setMatrix(MatrixHelper.getMatrix(matrix));
item.setItemId(itemid);
}else if(type == Types.EndofLine || type == Types.EnSpace || type == Types.Space){
item = new VEditableCalligraphyItem(type);
item.setItemId(itemid);
}else if(type == Types.ImageItem){
Bitmap picBitmap = null;
if(imageCount < 2){
if(attr != null){
try{
picBitmap = BitmapFactory.decodeByteArray(attr, 0,
attr.length);
} catch (OutOfMemoryError e) {
// TODO: handle exception
picBitmap = Start.OOM_BITMAP;
Log.e("time", "calligraphyDB image oom a" + available_id + " i" + itemid );
}
}
}else
picBitmap = null;
if(type == Types.ImageItem){
if(op_type == OP_ADD_WORD)
ImageLimit.instance().addImageCount();
else
ImageLimit.instance().deleteImageCount();
}
// item.setRecycleStatus("from databases failed");
item = new EditableCalligraphyItem(picBitmap);
item.setType(type);
item.setFlipDstX(flipdstx);
item.setItemId(itemid);
item.setImageUri(Uri.parse(uri));
item.setMatrix(CDBPersistent.getMatrix(matrix));
}else if(type == Types.VEDIO || type == Types.AUDIO){
Bitmap picBitmap = null;
attr = cursor.getBlob(cursor.getColumnIndex("charBitmap"));
if(attr != null){
try{
picBitmap = BitmapFactory.decodeByteArray(attr, 0,
attr.length);
} catch (OutOfMemoryError e) {
// TODO: handle exception
// picBitmap = Start.OOM_BITMAP;
picBitmap = null;
item.setRecycleStatus("from databases failed");
}
}
item = new EditableCalligraphyItem(picBitmap);
item.setType(type);
item.setItemId(itemid);
item.setImageUri(Uri.parse(uri));
item.setMatrix(CDBPersistent.getMatrix(matrix));
if(type == Types.AUDIO)
item.setStopBitmap();
}
item.setOpPos(op_pos);
attr = null;
if(count > INIT_WORD_COUNT){
Log.v("recyclebitmap", "init count:" + count + "bitmap:" + (item.getCharBitmap() == null));
if(item.getCharBitmap() != null && type != Types.CharsWithoutStroke){
item.recycleBitmap();
BitmapCount.getInstance().recycleBitmap("EditableCalligraphy recycleCharListBitmap");
Log.v("recyclebitmap", "init count:" + count + " recycle!!!!");
}
}
if(op_type == OP_ADD_MIND_WORD){
mindid = cursor.getInt(cursor.getColumnIndex("mindid"));
LogUtil.getInstance().e("addNewWord", "item mindid:" + mindid);
mindparentid = cursor.getInt(cursor.getColumnIndex("mindparentid"));
if(!mindMapHash.containsKey(mindid)){
LogUtil.getInstance().e("addNewWord", "item mindid:" + mindid + " not exit");
//该id节点尚未出现,初始化,判断是否有父节点
if(mindparentid == -1){
mapItem = new MindMapItem();
mindList.add(mapItem);
LogUtil.getInstance().e("mindmap", "init root item mindid:" + mapItem.getMindID());
}else{
if(mindMapHash.containsKey(mindparentid)){
LogUtil.getInstance().e("mindmap", "init item mindid:" + mapItem.getMindID() + " contains mindparentid:" + mindparentid);
mapItem = mindMapHash.get(mindparentid).createNewChild();
}else{
LogUtil.getInstance().e("mindmap", "init error !!!!!!!!!!!!!mindid:" + mindid + " parentid:" + mindparentid);
}
}
mindMapHash.put(mindid, mapItem);
LogUtil.getInstance().e("addNewWord", "hash map put" + mindid);
}else{
LogUtil.getInstance().e("addNewWord", "item mindid:" + mindid + " exit");
mapItem = mindMapHash.get(mindid);
}
mapItem.setFlipDstX((int)flipdstx);
mapItem.addNewWord(item);
LogUtil.getInstance().e("addNewWord", "mapItem " + mapItem.getMindID() + " insert");
}else{
// mindMapHash.clear();
}
//按照操作类型,操作位置,初始化charList
switch (op_type) {
case OP_ADD_MIND_WORD:
case 1:
//add char word
// Log.v("getCharList", "file: a" +available_id + "i" + itemid
// + " add to " + op_pos + " size:" + charList.size());
try {
charList.add(op_pos,item);
} catch (IndexOutOfBoundsException e) {
// TODO: handle exception
Log.v("wordCount", "add Index OutOfBounds Exception file: a" +available_id + "i" + itemid
+ " add to " + op_pos + " size:" + charList.size());
charList.add(item);
}
break;
case -1:
//delete word
// Log.v("getCharList", "file: a" +available_id + "i" + itemid
// + " delete " + op_pos + " size:" + charList.size());
EditableCalligraphyItem removeItem = null;
try {
removeItem = charList.remove(op_pos);
} catch (IndexOutOfBoundsException e) {
// TODO: handle exception
removeItem = charList.removeLast();
Log.v("wordCount", "delete Index OutOfBounds Exception file: a" +available_id + "i" + itemid
+ " add to " + op_pos + " size:" + charList.size());
}
LogUtil.getInstance().e("delete", "delete special word");
if(removeItem.isSpecial()){
LogUtil.getInstance().e("delete", "delete special word");
removeItem.getMindMapItem().deleteWord(removeItem);
}
if(removeItem.getCharBitmap() != null){
removeItem.recycleBitmap();
removeItem = null;
}
default:
break;
}
count ++;
}
cursor.close();
return charList;
}
public boolean updateCameraPicUri(int page,int aid,int itemid,Uri newUri){
ContentValues initalValues = new ContentValues();
initalValues.put("uri", newUri.toString());
Log.e("update", "page:" + page + " aid:" + aid + " itemid:" + itemid + "newUri:" + newUri);
initalValues.put("created", BitmapHelper.getCurrent());
try {
initalValues.put("charBitmap",
BitmapHelper.bitmapDecode(
BitmapHelper.getBitmapFromUri(
newUri, page)
));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and itemid = ?",
new String[] { ""+page,""+aid,""+itemid }) > 0;
Log.e("update", "result:" + flag);
return flag;
}
public boolean updateAudioUri(int page,int aid,int itemid,Uri newUri,Bitmap bitmap){
ContentValues initalValues = new ContentValues();
initalValues.put("uri", newUri.toString());
initalValues.put("created", BitmapHelper.getCurrent());
try {
initalValues.put("charBitmap",
BitmapHelper.bitmapDecode(bitmap));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and itemid = ?",
new String[] { ""+page,""+aid,""+itemid }) > 0;
Log.e("update", "result:" + flag);
return flag;
}
public boolean updateMindmapItem(int page,int aid,int itemid,EditableCalligraphyItem item){
ContentValues initalValues = new ContentValues();
initalValues.put("op_type", OP_ADD_MIND_WORD);
initalValues.put("mindid", item.getMindMapItem().getMindID());
initalValues.put("mindparentid", item.getMindMapItem().getParentID());
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and itemid = ? and charType = ?",
new String[] { ""+page,""+aid,""+itemid ,""+item.getCharType()}) > 0;
Log.e("update", "pagenum = " + page
+ "available_id" + aid + " itemid" + itemid + "result:" + flag);
return flag;
}
public boolean updatePictrueItem(int page,int aid,int itemid,EditableCalligraphyItem item){
ContentValues initalValues = new ContentValues();
initalValues.put("matrix", item.getMatrix().toString());
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and itemid = ?",
new String[] { ""+page,""+aid,""+itemid }) > 0;
Log.e("update", "pagenum = " + page
+ "available_id" + aid + " itemid" + itemid + "result:" + flag);
return flag;
}
public int getTotalPageNum() {
Cursor cursor = null;
cursor = db.query("page", new String[] { "pagenum" }, //应该从这里获得,但是会引起恢复时,page表没有被回复,数据不见的bug,暂时改为上面方案,会有只有涂鸦态不能保存的bug。暂时如此
null, null, null,
null, "pagenum asc");
int tmp = 0;
for (cursor.moveToFirst(); !cursor.isAfterLast(); cursor.moveToNext()) {
if (cursor.getInt(0) > tmp) {
tmp = cursor.getInt(0);
}
Log.e("databases", "pagenum:" + tmp);
}
cursor.close();
Log.e("databases", "return total page num:" + tmp);
return tmp;
}
public int getCurrentWordCount(int pagenum,int available_id){
Cursor cursor = db.query(WORD_TABLE, new String[]{"max(itemid)"},
"pagenum = ? and available_id = ?", new String[]{"" + pagenum ,"" + available_id}, null, null, null);
cursor.moveToFirst();
int count = cursor.getInt(cursor.getColumnIndex("max(itemid)"));
cursor.close();
return count;
}
public boolean updatePicdstx(int page,int aid,int itemid,float newDst){
ContentValues initalValues = new ContentValues();
initalValues.put("flipdstx", newDst);
initalValues.put("created", BitmapHelper.getCurrent());
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and itemid = ?",
new String[] { ""+page,""+aid,""+itemid }) > 0;
Log.e("update", "result:" + flag);
return flag;
}
public boolean updateMindDstx(int page,int aid,int mindid,float newDst){
ContentValues initalValues = new ContentValues();
initalValues.put("flipdstx", newDst);
initalValues.put("created", BitmapHelper.getCurrent());
boolean flag = this.db.update(WORD_TABLE, initalValues, "pagenum = ? and available_id = ? and mindid = ?",
new String[] { ""+page,""+aid,""+mindid }) > 0;
Log.e("update", "result:" + flag);
return flag;
}
public void backupData(){
String matrix = (new Matrix()).toString();
ContentValues initalValues = null;
for(int i=0;i<315;i++){
initalValues = new ContentValues();
initalValues.put("itemid", i);
initalValues.put("op_type", 1);
initalValues.put("op_pos", i);
initalValues.put("pagenum", 17);
initalValues.put("available_id", 3);
initalValues.put("charType", 6);
initalValues.put("matrix", matrix);
long result = db.insert(WORD_TABLE, null, initalValues);
}
}
}
|
const path = require('path')
const actionStatus = require('action-status')
const execa = require('execa')
const mockedEnv = require('mocked-env')
const publish = require('../publish')
const readJSON = require('../read-json')
const {mockFiles} = require('./__utils')
jest.mock('action-status')
jest.mock('execa')
jest.mock('../read-json')
describe('publish()', () => {
let restoreEnv = () => {}
const execOpts = {stdio: 'inherit'}
beforeEach(() => {
execa.mockImplementation(() => Promise.resolve({stdout: '', stderr: ''}))
actionStatus.mockImplementation(() => Promise.resolve())
})
afterEach(() => {
restoreEnv()
execa.mockClear()
readJSON.mockClear()
})
it('does the right things on a feature branch', () => {
mockEnv({
GITHUB_REF: 'refs/heads/feature-x',
GITHUB_SHA: 'deadfad'
})
mockFiles({
'package.json': {name: 'pkg', version: '1.0.0'}
})
const version = '0.0.0-deadfad'
return publish().then(() => {
expect(execa).toHaveBeenCalledTimes(2)
expect(execa).toHaveBeenNthCalledWith(
1,
'npm',
['version', version],
Object.assign({}, execOpts, {cwd: path.join(process.cwd(), '.')})
)
expect(execa).toHaveBeenNthCalledWith(
2,
'npm',
['publish', '.', '--tag', 'canary', '--access', 'public'],
execOpts
)
})
})
it('does the right things on a release branch', () => {
mockEnv({
GITHUB_REF: 'refs/heads/release-2.0.0',
GITHUB_SHA: 'deadfad'
})
mockFiles({
'package.json': {name: 'pkg', version: '1.0.0'}
})
const version = '2.0.0-rc.deadfad'
return publish().then(() => {
expect(execa).toHaveBeenCalledTimes(2)
expect(execa).toHaveBeenNthCalledWith(
1,
'npm',
['version', version],
Object.assign({}, execOpts, {cwd: path.join(process.cwd(), '.')})
)
expect(execa).toHaveBeenNthCalledWith(2, 'npm', ['publish', '.', '--tag', 'next', '--access', 'public'], execOpts)
})
})
it('does the right things on master', () => {
const version = '1.1.0'
mockEnv({
GITHUB_REF: 'refs/heads/master',
GITHUB_SHA: 'deadfad'
})
mockFiles({
'package.json': {name: 'pkg', version}
})
return publish().then(() => {
expect(execa).toHaveBeenCalledTimes(2)
expect(execa).toHaveBeenNthCalledWith(1, 'npm', ['view', `pkg@${version}`, 'version'], {stderr: 'inherit'})
expect(execa).toHaveBeenNthCalledWith(
2,
'npm',
['publish', '.', '--tag', 'latest', '--access', 'public'],
execOpts
)
// expect(execa).toHaveBeenNthCalledWith(3, 'git', ['tag', `v${version}`], execOpts)
// expect(execa).toHaveBeenNthCalledWith(4, 'git', ['push', '--tags', 'origin'], execOpts)
})
})
it('respects the "dryRun" option', () => {
mockEnv({
GITHUB_REF: 'refs/heads/run-dry',
GITHUB_SHA: 'bedface'
})
mockFiles({
'package.json': {name: 'pkg', version: '1.0.0'}
})
return publish({dryRun: true, dir: '.'}).then(() => {
expect(execa).toHaveBeenCalledTimes(0)
})
})
it('respects "dir" option on master', () => {
const version = '1.1.0'
mockEnv({
GITHUB_REF: 'refs/heads/master',
GITHUB_SHA: 'deadfad'
})
mockFiles({
'foo/bar/package.json': {name: 'pkg', version}
})
return publish({dir: 'foo/bar'}).then(() => {
expect(execa).toHaveBeenCalledTimes(2)
expect(execa).toHaveBeenNthCalledWith(1, 'npm', ['view', `pkg@${version}`, 'version'], {stderr: 'inherit'})
expect(execa).toHaveBeenNthCalledWith(
2,
'npm',
['publish', 'foo/bar', '--tag', 'latest', '--access', 'public'],
execOpts
)
})
})
it('respects "dir" option on a release branch', () => {
mockEnv({
GITHUB_REF: 'refs/heads/release-2.0.0',
GITHUB_SHA: 'deadfad'
})
mockFiles({
'foo/bar/package.json': {name: 'pkg', version: '1.0.0'}
})
const version = '2.0.0-rc.deadfad'
return publish({dir: 'foo/bar'}).then(() => {
expect(execa).toHaveBeenCalledTimes(2)
expect(execa).toHaveBeenNthCalledWith(
1,
'npm',
['version', version],
Object.assign({}, execOpts, {cwd: path.join(process.cwd(), 'foo/bar')})
)
expect(execa).toHaveBeenNthCalledWith(
2,
'npm',
['publish', 'foo/bar', '--tag', 'next', '--access', 'public'],
execOpts
)
})
})
function mockEnv(env) {
restoreEnv = mockedEnv(env)
}
})
|
import React from 'react';
import { GeneralStepper } from 'v2/components';
import { useStateReducer, isWeb3Wallet } from 'v2/utils';
import { ITxReceipt, ISignedTx, IFormikFields, ITxConfig } from 'v2/types';
import { translateRaw } from 'v2/translations';
import { ROUTE_PATHS } from 'v2/config';
import { ConfirmTransaction, TransactionReceipt } from 'v2/components/TransactionFlow';
import { IStepperPath } from 'v2/components/GeneralStepper/types';
import { SendAssetsForm, SignTransaction } from './components';
import { txConfigInitialState, TxConfigFactory } from './stateFactory';
function SendAssets() {
const {
handleFormSubmit,
handleConfirmAndSign,
handleConfirmAndSend,
handleSignedTx,
handleSignedWeb3Tx,
handleResubmitTx,
txFactoryState
} = useStateReducer(TxConfigFactory, { txConfig: txConfigInitialState, txReceipt: undefined });
// Due to MetaMask deprecating eth_sign method,
// it has different step order, where sign and send are one panel
const web3Steps: IStepperPath[] = [
{
label: 'Send Assets',
component: SendAssetsForm,
props: (({ txConfig }) => ({ txConfig }))(txFactoryState),
actions: (payload: IFormikFields, cb: any) => handleFormSubmit(payload, cb)
},
{
label: translateRaw('CONFIRM_TX_MODAL_TITLE'),
component: ConfirmTransaction,
props: (({ txConfig }) => ({ txConfig }))(txFactoryState),
actions: (payload: ITxConfig, cb: any) => handleConfirmAndSign(payload, cb)
},
{
label: '',
component: SignTransaction,
props: (({ txConfig }) => ({ txConfig }))(txFactoryState),
actions: (payload: ITxReceipt | ISignedTx, cb: any) => handleSignedWeb3Tx(payload, cb)
},
{
label: translateRaw('TRANSACTION_BROADCASTED'),
component: TransactionReceipt,
props: (({ txConfig, txReceipt }) => ({ txConfig, txReceipt }))(txFactoryState)
}
];
const defaultSteps: IStepperPath[] = [
{
label: 'Send Assets',
component: SendAssetsForm,
props: (({ txConfig }) => ({ txConfig }))(txFactoryState),
actions: (payload: IFormikFields, cb: any) => handleFormSubmit(payload, cb)
},
{
label: '',
component: SignTransaction,
props: (({ txConfig }) => ({ txConfig }))(txFactoryState),
actions: (payload: ITxConfig | ISignedTx, cb: any) => handleSignedTx(payload, cb)
},
{
label: translateRaw('CONFIRM_TX_MODAL_TITLE'),
component: ConfirmTransaction,
props: (({ txConfig, signedTx }) => ({ txConfig, signedTx }))(txFactoryState),
actions: (payload: ITxConfig | ISignedTx, cb: any) => handleConfirmAndSend(payload, cb)
},
{
label: translateRaw('TRANSACTION_BROADCASTED'),
component: TransactionReceipt,
props: (({ txConfig, txReceipt }) => ({
txConfig,
txReceipt,
pendingButton: {
text: translateRaw('TRANSACTION_BROADCASTED_RESUBMIT'),
action: (cb: any) => handleResubmitTx(cb)
}
}))(txFactoryState)
}
];
const getPath = () => {
const { senderAccount } = txFactoryState.txConfig;
return senderAccount && isWeb3Wallet(senderAccount.wallet) ? web3Steps : defaultSteps;
};
return (
<GeneralStepper
steps={getPath()}
defaultBackPath={ROUTE_PATHS.DASHBOARD.path}
defaultBackPathLabel={translateRaw('DASHBOARD')}
completeBtnText={translateRaw('SEND_ASSETS_SEND_ANOTHER')}
/>
);
}
export default SendAssets;
|
<gh_stars>0
SELECT dex.backfill_usd_amount(now() - interval '3 days', now() - interval '20 minutes') ;
REFRESH MATERIALIZED VIEW CONCURRENTLY dex.view_token_prices ;
|
def max_subarray_sum(arr):
curr_max = global_max = arr[0]
for i in range(1, len(arr)):
curr_max = max(arr[i], curr_max + arr[i])
if curr_max > global_max:
global_max = curr_max
return global_max |
using System;
using System.Collections.Generic;
namespace IronPython.Runtime {
public class CustomPythonDictionary {
private Dictionary<string, int> storage;
public CustomPythonDictionary() {
storage = new Dictionary<string, int>();
}
public void __setitem__(string key, int value) {
if (storage.ContainsKey(key)) {
storage[key] = value;
} else {
storage.Add(key, value);
}
}
public int __getitem__(string key) {
if (storage.ContainsKey(key)) {
return storage[key];
} else {
throw new KeyNotFoundException("Key not found");
}
}
public string max_key() {
int maxValue = int.MinValue;
string maxKey = null;
foreach (var pair in storage) {
if (pair.Value > maxValue) {
maxValue = pair.Value;
maxKey = pair.Key;
}
}
return maxKey;
}
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.