text stringlengths 1 1.05M |
|---|
#$ -S /bin/bash
#$ -N plasmidspades
#$ -V
#$ -o /workdir/users/acv46/mgmAssembly/log/pspades_$JOB_ID.out
#$ -e /workdir/users/acv46/mgmAssembly/log/pspades_$JOB_ID.err
#$ -wd /workdir/users/acv46/mgmAssembly
#$ -l h_vmem=100G
#$ -q long.q@cbsubrito
#$ -t 1-6
# last edited 13 Mar 2021, Albert Vill
WRK=/workdir/users/acv46/mgmAssembly
FQ=/home/britolab/data/PROseq/Stool
startpath=$PATH
#################################
## Switches and Global Options #############################################
################################# #
#
# increase thread restriction of cluster #
## default = 1 #
## this value is used by metaSPAdes, BWA, CONCOCT, metaBAT, #
### MaxBin, DASTool, checkM, and GTDB-Tk #
export OMP_NUM_THREADS=8 #
#
# to receive emails when the script fails or finishes, #
# add email adress here #
# otherwise, leave as email=NULL #
## NOTE -- sends one email per task #
email=acv46@cornell.edu #
#
# location of jobtracker.sh script #
jobtracker=/workdir/users/acv46/mgmAssembly/scripts/jobtracker.sh #
#
############################################################################
# Create design file of file names
DESIGN_FILE=$WRK/names.txt
DESIGN=$(sed -n "${SGE_TASK_ID}p" $DESIGN_FILE)
NAME=`basename ${DESIGN}`
echo -e "${NAME}: STARTING PIPELINE \n--> run < watch tail pspades_$JOB_ID.out > to view progress"
# make a directory to store all sample-specific files
PSP=$WRK/${NAME}_CAB
mkdir -p $PSP
# check if cleaned reads already exist
## if the pipeline failed post-cleaning, this saves time
## by starting with existing clean reads
## just run script again
####################
## Cleaning Reads ##
####################
echo "${NAME}: checking for existing clean reads at $FQ/${NAME}"
OUT=$PSP/fastq
mkdir -p $OUT
# edit $CLEAN1 and $CLEAN2 to tell shell where to store cleaned fastqs
# HARD-CODED DIR
CLEAN1=$FQ/${NAME}/mgm/fastq/${NAME}.clean_1.fastq
CLEAN2=$FQ/${NAME}/mgm/fastq/${NAME}.clean_2.fastq
BMTAG1=$OUT/${NAME}.clean_1.fastq
BMTAG2=$OUT/${NAME}.clean_2.fastq
if [ ! -f $BMTAG1 ] || [ ! -f $BMTAG2 ]; then
echo "${NAME}: no existing clean reads found"
echo "${NAME}: reading in raw fastqs"
# unzip raw fastqs
# note _R1/_R2 file structure
READ1=$FQ/${NAME}_R1.fastq
READ2=$FQ/${NAME}_R2.fastq
gunzip ${READ1}.gz
gunzip ${READ2}.gz
####################
## 1. dereplicate ##
####################
DEREP1=$OUT/${NAME}.derep_1.fastq
DEREP2=$OUT/${NAME}.derep_2.fastq
echo "${NAME}: checking for existing dereplicated reads"
if [ ! -f $DEREP1 ] || [ ! -f $DEREP2 ]; then
cd $OUT
echo "${NAME}: dereplication start"
PRINSEQ=/programs/prinseq-lite-0.20.4
perl $PRINSEQ/prinseq-lite.pl \
-fastq $READ1 \
-fastq2 $READ2 \
-derep 12345 \
-out_format 3 \
-out_good $OUT/${NAME}.derep \
-out_bad $OUT/${NAME}.derep_bad
echo "${NAME}: dereplication complete"
else
echo "${NAME}: dereplicated reads found, checking for trimmed reads"
fi
if [ ! -f $DEREP1 ] || [ ! -f $DEREP2 ]; then
echo "${NAME}: ERROR - dereplication failed, Aborting"
gzip ${READ1}
gzip ${READ2}
qsub -N derep_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
#############
## 2. trim ##
#############
TRIM1=$OUT/${NAME}.adapter_1.fastq
TRIM2=$OUT/${NAME}.adapter_2.fastq
if [ ! -f $TRIM1 ] || [ ! -f $TRIM2 ]; then
echo "${NAME}: trimming start"
TRIMMO=/programs/trimmomatic/trimmomatic-0.36.jar
ADAPTER=$WRK/index/adapters.fasta
java -Xmx8g -jar $TRIMMO PE \
$DEREP1 \
$DEREP2 \
$OUT/${NAME}.adapter_1.fastq \
$OUT/${NAME}.adapter_1.solo.fastq \
$OUT/${NAME}.adapter_2.fastq \
$OUT/${NAME}.adapter_2.solo.fastq \
ILLUMINACLIP:${ADAPTER}:2:30:10:8:true \
SLIDINGWINDOW:4:15 \
LEADING:3 \
TRAILING:3 \
MINLEN:50
echo "${NAME}: trimming complete"
else
echo "${NAME}: trimmed reads found, checking for human-depleted reads"
fi
if [ ! -f $TRIM1 ] || [ ! -f $TRIM2 ]; then
echo "${NAME}: ERROR - trimming failed, Aborting"
gzip ${READ1}
gzip ${READ2}
qsub -N trim_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
###########################
## 3. remove human reads ##
###########################
if [ ! -f $BMTAG1 ] || [ ! -f $BMTAG2 ]; then
echo "${NAME}: bmtagger start"
BMTAGGER=/programs/bmtools/bmtagger
REFGENOME=/home/britolab/refdbs/HUMAN/Homo_sapiens_assembly19.fasta
CONFIG=$WRK/index/bmtagger.conf
export PATH=/programs/ncbi-blast-2.9.0+/bin:$PATH
${BMTAGGER}/bmtagger.sh \
-C $CONFIG \
-b ${REFGENOME}.bitmask \
-x ${REFGENOME}.srprism \
-T $OUT -q1 \
-1 $TRIM1 \
-2 $TRIM2 \
-o $OUT/${NAME}.clean \
-X
echo "${NAME}: bmtagger complete"
echo "${NAME}: read-cleaning done"
else
echo "${NAME}: human-depleted reads found"
echo "${NAME}: read-cleaning done"
fi
if [ ! -f $BMTAG1 ] || [ ! -f $BMTAG2 ]; then
echo "${NAME}: ERROR - bmtagger failed, Aborting"
gzip ${READ1}
gzip ${READ2}
qsub -N bmtagger_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
####################
## 4. count reads ##
####################
cd $OUT
raw1=$(echo $(wc -l ${READ1} | awk '{print $1}') / 4 | bc)
raw2=$(echo $(wc -l ${READ2} | awk '{print $1}') / 4 | bc)
der1=$(echo $(wc -l ${DEREP1} | awk '{print $1}') / 4 | bc)
der2=$(echo $(wc -l ${DEREP2} | awk '{print $1}') / 4 | bc)
trm1=$(echo $(wc -l ${TRIM1} | awk '{print $1}') / 4 | bc)
trm2=$(echo $(wc -l ${TRIM2} | awk '{print $1}') / 4 | bc)
bmt1=$(echo $(wc -l ${BMTAG1} | awk '{print $1}') / 4 | bc)
bmt2=$(echo $(wc -l ${BMTAG2} | awk '{print $1}') / 4 | bc)
# append counts from all steps
echo -e "${NAME}\t${raw1}\t${raw2}\t${der1}\t${der2}\t${trm1}\t${trm2}\t${bmt1}\t${bmt2}" | \
sed '1i name\traw1\traw2\tderep1\tderep2\ttrim1\ttrim2\tbmtag1\tbmtag2' \
> $OUT/readcounts_${NAME}_${JOB_ID}.txt
echo -e "${NAME}: generated read counts file \n--> readcounts_${NAME}_${JOB_ID}.txt"
##################################
## 5. remove intermediate files ##
##################################
gzip ${READ1} &
gzip ${READ2} &
rm ${DEREP1}
rm ${DEREP2}
rm ${TRIM1}
rm ${TRIM2}
rm $OUT/${NAME}.derep_bad_1.fastq
rm $OUT/${NAME}.derep_bad_2.fastq
rm $OUT/${NAME}.adapter_1.solo.fastq
rm $OUT/${NAME}.adapter_2.solo.fastq
rm $OUT/${NAME}.derep_1_singletons.fastq
rm $OUT/${NAME}.derep_2_singletons.fastq
# HARD-CODED DIR
cp ${BMTAG1} $FQ/../clean
cp ${BMTAG2} $FQ/../clean
cp $OUT/readcounts_${NAME}_${JOB_ID}.txt $FQ/../clean
gzip $CLEAN1
gzip $CLEAN2
echo "${NAME}: checking for an existing plasmidSPAdes assembly"
else
echo "${NAME}: clean reads found, checking for an existing plasmidSPAdes assembly"
fi
#########################
## metaSPAdes assembly ##
#########################
MSPA=$PSP/plasmidSPAdes
if [ ! -f $MSPA/contigs.fasta ]; then
echo "${NAME}: no existing metaSPAdes assembly found"
# if metaSPAdes previously failed, create a clean directory
rm -r $MSPA
mkdir -p $MSPA
cd $MSPA
echo "${NAME}: metaSPAdes assembly start"
SPADES=/programs/SPAdes-3.14.0-Linux/bin
export OMP_NUM_THREADS=8
$SPADES/spades.py --plasmid \
-1 $BMTAG1 \
-2 $BMTAG2 \
-o $MSPA \
-t $OMP_NUM_THREADS
if [ ! -f $MSPA/contigs.fasta ]; then
echo "${NAME}: ERROR - metaSPAdes assembly failed, Aborting"
qsub -N metaSPAdes_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
echo "${NAME}: metaSPAdes assembly complete, checking for existing BWA alignment"
else
echo "${NAME}: existing metaSPAdes assembly found, checking for existing BWA alignment"
fi
###################
## BWA alignment ##
###################
BWA=$MSPA/alignment
BAM=$BWA/${NAME}.bam
if [ ! -f $BAM ] || [ ! -f $BWA/${NAME}_depth.txt ]; then
echo "${NAME}: no existing BWA alignment found"
# if BWA previously failed, create a clean directory
rm -r $BWA
mkdir -p $BWA
cd $BWA
# create and check bwa index
echo "${NAME}: creating BWA index of metaSPAdes assembly"
export PATH=/programs/bwa-0.7.17:$PATH
export PATH=/programs/samtools-1.11/bin:$PATH
bwa index -a bwtsw -p ${NAME} $MSPA/contigs.fasta
if [ ! -f $BWA/${NAME}.amb ] || \
[ ! -f $BWA/${NAME}.ann ] || \
[ ! -f $BWA/${NAME}.pac ] || \
[ ! -f $BWA/${NAME}.bwt ] || \
[ ! -f $BWA/${NAME}.sa ]; then
echo "${NAME}: ERROR - BWA indexing failed, Aborting"
qsub -N BWAindex_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
# create and check bam and bam index
echo "${NAME}: aligning reads to metaSPAdes assembly"
bwa mem -a -v 2 -t $OMP_NUM_THREADS $BWA/${NAME} $BMTAG1 $BMTAG2 \
> $BWA/${NAME}.unsort.bam
samtools view -u $BWA/${NAME}.unsort.bam | \
samtools sort -@ 4 -o $BAM
rm $BWA/${NAME}.unsort.bam
samtools index -@ 4 -b $BAM
if [ ! -f $BAM ] || [ ! -f ${BAM}.bai ]; then
echo "${NAME}: ERROR - BWA alignment failed, Aborting"
qsub -N BWAmem_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
echo "${NAME}: BWA alignment complete"
# create and check depth file
DEPTH=/programs/metabat/jgi_summarize_bam_contig_depths
echo "${NAME}: creating depth file for BWA alignment"
$DEPTH --outputDepth $BWA/${NAME}_depth.txt $BAM
if [ ! -f $BWA/${NAME}_depth.txt ]; then
echo "${NAME}: ERROR - depth calculation failed for BWA alignment, Aborting"
qsub -N depth_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
echo "${NAME}: depth calculation complete, cleaning up"
else
echo "${NAME}: existing BWA alignment found, cleaning up"
fi
##############
## PlasFlow ##
##############
if [ ! -f $MSPA/plasflow_contigs.fasta ] || [ ! -f $MSPA/plasflow_out.tsv ]; then
echo "${NAME}: running PlasFlow"
export PATH=/programs/Anaconda2/bin:$PATH
export LD_LIBRARY_PATH=/programs/Anaconda2/lib:$LD_LIBRARY_PATH
source activate plasflow
/programs/PlasFlow/scripts/filter_sequences_by_length.pl \
-input $MSPA/contigs.fasta \
-output $MSPA/plasflow_contigs.fasta \
-thresh 1000
PlasFlow.py \
--input $MSPA/plasflow_contigs.fasta \
--output $MSPA/plasflow_out.tsv \
--threshold 0.7
source deactivate
if [ ! -f $MSPA/plasflow_out.tsv ]; then
echo "${NAME}: ERROR - PlasFlow failed"
qsub -N plasflow_FAIL_${NAME} -M ${email} ${jobtracker}
exit 1
fi
fi
##############
## Clean Up ##
##############
echo "${NAME} -- PIPELINE FINISHED"
echo "${NAME} -- removing intermediate bins and assembly files to save space"
qsub -N plasmids_DONE_${NAME} -M ${email} ${jobtracker}
|
<reponame>dorawyy/321-MyBubble<gh_stars>1-10
/*! firebase-admin v9.2.0 */
/*!
* Copyright 2020 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Bucket } from '@google-cloud/storage';
import * as _admin from './index.d';
export namespace admin.storage {
/**
* The default `Storage` service if no
* app is provided or the `Storage` service associated with the provided
* app.
*/
export interface Storage {
/**
* Optional app whose `Storage` service to
* return. If not provided, the default `Storage` service will be returned.
*/
app: _admin.app.App;
/**
* @returns A [Bucket](https://cloud.google.com/nodejs/docs/reference/storage/latest/Bucket)
* instance as defined in the `@google-cloud/storage` package.
*/
bucket(name?: string): Bucket;
}
}
|
import random
def shuffle_list(list1):
'''This function will randomly shuffle a list'''
random.shuffle(list1) # Output: [2, 1, 4, 5, 3]
return list1 |
#!/bin/bash
#SBATCH -J Act_cosper_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py cosper 1 Adadelta 4 0.2545093194647412 76 0.9484982179819261 orth PE-infersent 0.01
|
#!/usr/bin/env bash
cmake -DCMAKE_BUILD_TYPE=Release -Bbuildbin -H.
cmake --build buildbin -- -j 4
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DLA-96-1
#
# Security announcement date: 2014-11-28 00:00:00 UTC
# Script generation date: 2017-01-01 21:08:50 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - openjdk-6:6b33-1.13.5-2~deb6u1
#
# Last versions recommanded by security team:
# - openjdk-6:6b38-1.13.10-1~deb6u1
#
# CVE List:
# - CVE-2014-2490
# - CVE-2014-4209
# - CVE-2014-4216
# - CVE-2014-4218
# - CVE-2014-4219
# - CVE-2014-4244
# - CVE-2014-4252
# - CVE-2014-4262
# - CVE-2014-4263
# - CVE-2014-4266
# - CVE-2014-4268
# - CVE-2014-6457
# - CVE-2014-6502
# - CVE-2014-6504
# - CVE-2014-6506
# - CVE-2014-6511
# - CVE-2014-6512
# - CVE-2014-6517
# - CVE-2014-6519
# - CVE-2014-6531
# - CVE-2014-6558
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade openjdk-6=6b38-1.13.10-1~deb6u1 -y
|
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.io.File;
import java.util.StringTokenizer;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.List;
import java.util.ArrayList;
import java.util.Collection;
import java.util.stream.Collectors;
import java.util.Optional;
import java.util.Map;
import java.util.HashMap;
import java.net.UnknownHostException;
public class Parser{
private List<TFNetworkElement> netElements;
private Map<String,TFSwitch> switches;
public Parser(){
netElements = new ArrayList<TFNetworkElement>();
switches = new HashMap<String,TFSwitch>();
}
public void parseFile(String fileName) throws FileNotFoundException, IOException, Exception {
parseFile(fileName,false);
}
public void parseFile(String fileName,boolean fullPath) throws FileNotFoundException, IOException, Exception {
if(!fullPath)
fileName = buildDefaultPath(fileName);
this.netElements = new ArrayList<TFNetworkElement>();
FileReader fr = new FileReader(new File(fileName));
BufferedReader br = new BufferedReader(fr);
StringTokenizer st;
String line = br.readLine();
/*
#NODE
<node_name>,<MAC>,<IP/prefix>,<gateway>
#ROUTER
<router_name>,<num_ports>,<MAC0>,<IP0/prefix>,<MAC1>,<IP1/prefix>,<MAC2>,<IP2/prefix> …
#ROUTERTABLE
<router_name>,<net_dest/prefix>,<nexthop>,<port>
*/
EntryType type = EntryType.Node;
TFRouter router = null;
while (line != null) {
line = line.toLowerCase();
if (line.equals("#node")) {
type = EntryType.Node;
} else if (line.equals("#router")) {
type = EntryType.Router;
} else if (line.equals("#routertable")) {
type = EntryType.RouterTable;
} else {
TFNetworkElement element = parseLine(type,line);
if(element!=null)
this.netElements.add(element);
}
line = br.readLine();
}
br.close();
tuneNetwork();
}
private String buildDefaultPath(String fileName){
File file1 = new File("examples");
File file2 = new File(file1, fileName);
return file2.getPath();
}
private TFNetworkElement parseLine(EntryType type,String line) throws Exception{
StringTokenizer st = new StringTokenizer(line, ",");
TFNetworkElement element = null;
TFSwitch s=null;
switch (type) {
case Node:
element = new TFNode(st.nextToken().trim(),
st.nextToken().trim(),
st.nextToken().trim(),
st.nextToken().trim());
TFNode n = (TFNode)element;
s=switches.get(n.getNetwork());
if(s == null){
s = new TFSwitch(n.getNetwork(),n.getNetCIDR());
switches.put(s.getNetwork(),s);
}
s.addHost(n);
n.setLAN(s);
break;
case Router:
element = new TFRouter(st.nextToken().trim(),Integer.parseInt(st.nextToken().trim()));
int pNumber = 0;
while(st.hasMoreElements()){
TFPort port = new TFPort((TFRouter)element);
port.number=pNumber;
port.setMAC(st.nextToken().trim());
port.setIPrefix(st.nextToken().trim());
((TFRouter)element).addPort(port);
pNumber++;
s=switches.get(port.getNetwork());
if(s == null){
s = new TFSwitch(port.getNetwork(),port.getNetCIDR());
switches.put(s.getNetwork(),s);
}
s.addHost(port);
}
break;
case RouterTable:
String routerName = st.nextToken().trim();
String netDest =st.nextToken().trim();
String nextHop =st.nextToken().trim();
int portNumber=Integer.parseInt(st.nextToken().trim());
TFRouter router = getRouterByName(routerName);
if(router != null){
TFPort port = router.getPortByNumber(portNumber);
if(port!=null){
TFRouterTableEntry entry = new TFRouterTableEntry(netDest,nextHop,portNumber);
entry.port = port;
router.addTableEntry(entry);
}
}
break;
default:
throw new Exception("Error reading file.");
}
return element;
}
public List<TFNetworkElement> getNetworkElements(){
if(netElements==null)
netElements = new ArrayList<TFNetworkElement>();
return netElements;
}
public Map<String,TFSwitch> getSwitches(){
if(switches==null)
switches = new HashMap<String,TFSwitch>();
return switches;
}
public void tuneNetwork() throws UnknownHostException{
for (TFNetworkElement ele : netElements) {
if(ele instanceof TFNode){
((TFNode)ele).gateway = getGatewayByIP(((TFNode)ele).gatewayIP);
}else if(ele instanceof TFRouter){
((TFRouter)ele).tuneRouterTable(getSwitches(),getRouters());
}
}
}
private TFRouter getGatewayByIP(String ip){
List<TFRouter> routers = getRouters();
for (TFRouter r : routers)
if(r.hasIP(ip))
return r;
return null;
}
private List<TFRouter> getRouters(){
return netElements
.stream()
.filter(e -> e instanceof TFRouter)
.map(e -> (TFRouter) e)
.collect(Collectors.toList());
}
private TFRouter getRouterByName(String name){
Optional<TFNetworkElement> opt = netElements
.stream()
.filter(e -> e instanceof TFRouter && ((TFRouter)e).name.equals(name)).findFirst();
if(opt.isPresent())
return (TFRouter)opt.get();
return null;
}
}
|
#!/bin/bash
export PYTHONIOENCODING=utf-8
export LANG=zh_CN.UTF-8
export PYTHONPATH=/data/stock
export LC_CTYPE=zh_CN.UTF-8
echo "start main.py" > /data/logs/web.log
/usr/local/bin/python3 /data/stock/web/main.py -log_file_prefix=/data/logs/web.log >> /data/logs/web.log 2>&1
|
// indexController.js
(function () {
"use strict";
var indexController = function () {
this.menuItems = [
{
caption: 'Select',
divider: false,
glyphicon: 'glyphicon-check'
},
{
caption: '',
divider: true,
glyphicon: ''
},
{
caption: 'Cut',
divider: false,
glyphicon: 'glyphicon-scissors'
},
{
caption: 'Copy',
divider: false,
glyphicon: 'glyphicon-copy'
},
{
caption: 'Paste',
divider: false,
glyphicon: 'glyphicon-paste'
},
{
caption: 'Delete',
divider: false,
glyphicon: 'glyphicon-remove'
},
{
caption: '',
divider: true,
glyphicon: ''
},
{
caption: 'Edit...',
divider: false,
glyphicon: 'glyphicon-edit'
},
];
this.onMenuClick = function (menuItem) {
alert(menuItem.caption + " clicked");
}
}
indexController.$inject = [];
var module = angular.module("demoApp");
module.controller("indexController", indexController);
})();
|
<reponame>bno1/adventofcode2019ts
import {runIntcode} from "../src/intcode";
import "jasmine";
describe("test programs d02", function() {
const cases = [
[[1,0,0,0,99], [2,0,0,0,99]],
[[2,3,0,3,99], [2,3,0,6,99]],
[[2,4,4,5,99,0], [2,4,4,5,99,9801]],
[[1,1,1,4,99,5,6,0,99], [30,1,1,4,2,5,6,0,99]]
];
for (let [initial, final] of cases) {
it(`program ${initial} evaluates to ${final}`, function() {
let state = [...initial];
runIntcode(state);
expect(state).toEqual(final);
});
}
});
describe("test programs d05", function() {
const cases = [
[[1002,4,3,4,33], [1002,4,3,4,99]],
[[1101,100,-1,4,0], [1101,100,-1,4,99]],
];
for (let [initial, final] of cases) {
it(`program ${initial} evaluates to ${final}`, function() {
let state = [...initial];
runIntcode(state);
expect(state).toEqual(final);
});
}
const io_cases = [
[[3,9,8,9,10,9,4,9,99,-1,8], [8], [1]],
[[3,9,8,9,10,9,4,9,99,-1,8], [0], [0]],
[[3,9,7,9,10,9,4,9,99,-1,8], [8], [0]],
[[3,9,7,9,10,9,4,9,99,-1,8], [0], [1]],
[[3,3,1108,-1,8,3,4,3,99], [8], [1]],
[[3,3,1108,-1,8,3,4,3,99], [0], [0]],
[[3,3,1107,-1,8,3,4,3,99], [8], [0]],
[[3,3,1107,-1,8,3,4,3,99], [0], [1]],
[[3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9], [0], [0]],
[[3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9], [1], [1]],
[[3,3,1105,-1,9,1101,0,0,12,4,12,99,1], [0], [0]],
[[3,3,1105,-1,9,1101,0,0,12,4,12,99,1], [1], [1]],
[[3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31, 1106,0,36,98,0,0,
1002,21,125,20,4,20,1105,1,46,104, 999,1105,1,46,1101,1000,1,20,4,20,
1105,1,46,98,99], [7], [999]],
[[3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31, 1106,0,36,98,0,0,
1002,21,125,20,4,20,1105,1,46,104, 999,1105,1,46,1101,1000,1,20,4,20,
1105,1,46,98,99], [8], [1000]],
[[3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31, 1106,0,36,98,0,0,
1002,21,125,20,4,20,1105,1,46,104, 999,1105,1,46,1101,1000,1,20,4,20,
1105,1,46,98,99], [9], [1001]],
];
for (let [program, input, output] of io_cases) {
it(`program ${program} with input ${input} output ${output}`, function() {
let state = [...program];
expect(runIntcode(state, input)).toEqual(output);
});
}
});
describe("test programs d09", function() {
const cases = [
[[109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99],
[109,1,204,-1,1001,100,1,100,1008,100,16,101,1006,101,0,99]],
[[104,1125899906842624,99], [1125899906842624]],
];
for (let [initial, output] of cases) {
it(`program ${initial} outputs ${output}`, function() {
let state = [...initial];
expect(runIntcode(state)).toEqual(output);
});
}
it (`program outputs 16-digit integer`, function() {
let output = runIntcode([1102,34915192,34915192,7,4,7,99,0]);
expect(output.length).toEqual(1);
expect(output[0].toString().length).toEqual(16);
});
});
|
"""
Charset-Normalizer
~~~~~~~~~~~~~~
The Real First Universal Charset Detector.
A library that helps you read text from an unknown charset encoding.
Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
All IANA character set names for which the Python core library provides codecs are supported.
Basic usage:
>>> from charset_normalizer import from_bytes
>>> results = from_bytes('Bсеки човек има право на образование. Oбразованието трябва да бъде безплатно, поне що се отнася до началното и основното образование.'.encode('utf_8'))
>>> "utf_8" in results
True
>>> best_result = results.best()
>>> str(best_result)
'Bсеки човек има право на образование. Oбразованието трябва да бъде безплатно, поне що се отнася до началното и основното образование.'
Others methods and usages are available - see the full documentation
at <https://github.com/Ousret/charset_normalizer>.
:copyright: (c) 2021 by <NAME>
:license: MIT, see LICENSE for more details.
"""
from charset_normalizer.api import from_fp, from_path, from_bytes, normalize
from charset_normalizer.legacy import detect
from charset_normalizer.version import __version__, VERSION
from charset_normalizer.models import CharsetMatch, CharsetMatches
# Backward-compatible v1 imports
from charset_normalizer.models import CharsetNormalizerMatch
import charset_normalizer.api as CharsetDetector
CharsetNormalizerMatches = CharsetDetector
|
<reponame>cgojin/owt-server
// Copyright (C) <2019> Intel Corporation
//
// SPDX-License-Identifier: Apache-2.0
#include "AcmmFrameMixer.h"
#include "AudioUtilities.h"
namespace mcu {
static inline AudioConferenceMixer::Frequency convert2Frequency(int32_t freq)
{
switch (freq) {
case 8000:
return AudioConferenceMixer::kNbInHz;
case 16000:
return AudioConferenceMixer::kWbInHz;
case 32000:
return AudioConferenceMixer::kSwbInHz;
case 48000:
return AudioConferenceMixer::kFbInHz;
default:
return AudioConferenceMixer::kLowestPossible;
}
}
DEFINE_LOGGER(AcmmFrameMixer, "mcu.media.AcmmFrameMixer");
AcmmFrameMixer::AcmmFrameMixer()
: m_asyncHandle(NULL)
, m_vadEnabled(false)
, m_frequency(0)
{
m_mixerModule.reset(AudioConferenceMixer::Create(0));
m_mixerModule->RegisterMixedStreamCallback(this);
m_mixerModule->SetMultipleInputs(true);
m_groupIds.resize(MAX_GROUPS + 1);
for (size_t i = 1; i < MAX_GROUPS + 1; ++i)
m_groupIds[i] = true;
//reserved for broadcast group
m_groupIds[0] = false;
m_broadcastGroup.reset(new AcmmBroadcastGroup());
m_running = true;
m_thread = boost::thread(&AcmmFrameMixer::performMix, this);
}
AcmmFrameMixer::~AcmmFrameMixer()
{
m_running = false;
m_thread.join();
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
m_mixerModule->UnRegisterMixedStreamCallback();
if (m_vadEnabled) {
m_mixerModule->UnRegisterMixerVadCallback();
m_vadEnabled = false;
}
}
bool AcmmFrameMixer::getFreeGroupId(uint16_t *id)
{
for (size_t i = 0; i < m_groupIds.size(); ++i) {
if (m_groupIds[i]) {
m_groupIds[i] = false;
*id = i;
return true;
}
}
ELOG_WARN("No free Id, max groups reached(%d)!", MAX_GROUPS);
return false;
}
boost::shared_ptr<AcmmGroup> AcmmFrameMixer::getGroup(const std::string& group)
{
if (m_groupIdMap.find(group) == m_groupIdMap.end())
return NULL;
if (m_groups.find(m_groupIdMap[group]) == m_groups.end())
return NULL;
return m_groups[m_groupIdMap[group]];
}
boost::shared_ptr<AcmmGroup> AcmmFrameMixer::addGroup(const std::string& group)
{
uint16_t id = 0;
boost::shared_ptr<AcmmGroup> acmmGroup;
if (getFreeGroupId(&id)) {
m_groupIdMap[group] = id;
acmmGroup.reset(new AcmmGroup(id));
m_groups[m_groupIdMap[group]] = acmmGroup;
}
return acmmGroup;
}
void AcmmFrameMixer::removeGroup(const std::string& group)
{
m_groups.erase(m_groupIdMap[group]);
m_groupIds[m_groupIdMap[group]] = true;
m_groupIdMap.erase(group);
}
void AcmmFrameMixer::setEventRegistry(EventRegistry* handle)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
ELOG_TRACE("setEventRegistry(%p)", handle);
m_asyncHandle = handle;
}
void AcmmFrameMixer::enableVAD(uint32_t period)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
ELOG_DEBUG("enableVAD, period(%u)", period);
m_vadEnabled = true;
m_mostActiveInput.reset();
m_mixerModule->RegisterMixerVadCallback(this, period / 10);
}
void AcmmFrameMixer::disableVAD()
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
ELOG_DEBUG("disableVAD");
m_vadEnabled = false;
m_mostActiveInput.reset();
m_mixerModule->UnRegisterMixerVadCallback();
}
void AcmmFrameMixer::resetVAD()
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
ELOG_DEBUG("resetVAD");
m_mostActiveInput.reset();
}
bool AcmmFrameMixer::addInput(const std::string& group, const std::string& inStream, const owt_base::FrameFormat format, owt_base::FrameSource* source)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
boost::shared_ptr<AcmmGroup> acmmGroup;
boost::shared_ptr<AcmmInput> acmmInput;
int ret;
ELOG_DEBUG("addInput: group(%s), inStream(%s), format(%s), source(%p)", group.c_str(), inStream.c_str(), getFormatStr(format), source);
acmmGroup = getGroup(group);
if (!acmmGroup) {
acmmGroup = addGroup(group);
if (!acmmGroup) {
ELOG_ERROR("Can not add input group");
return false;
}
}
acmmInput = acmmGroup->getInput(inStream);
if (acmmInput) {
ELOG_DEBUG("Update previous input");
acmmInput->unsetSource();
if(!acmmInput->setSource(format, source)) {
ELOG_ERROR("Fail to update source");
return false;
}
} else {
acmmInput = acmmGroup->addInput(inStream);
if(!acmmInput) {
ELOG_ERROR("Fail to add input");
return false;
}
if (!acmmInput->setSource(format, source)) {
ELOG_ERROR("Fail to set source");
return false;
}
ret = m_mixerModule->SetMixabilityStatus(acmmInput.get(), true);
if (ret != 0) {
ELOG_ERROR("Fail to SetMixabilityStatus");
return false;
}
if (!acmmGroup->numOfOutputs()) {
ret = m_mixerModule->SetAnonymousMixabilityStatus(acmmInput.get(), true);
if (ret != 0) {
ELOG_ERROR("Fail to SetAnonymousMixabilityStatus");
return false;
}
}
if (!acmmGroup->anyOutputsConnected()) {
std::vector<boost::shared_ptr<AcmmOutput>> outputs;
acmmGroup->getOutputs(outputs);
for(auto& o : outputs) {
m_broadcastGroup->removeDest(m_outputInfoMap[o.get()].dest);
if (!o->addDest(m_outputInfoMap[o.get()].format, m_outputInfoMap[o.get()].dest)) {
ELOG_ERROR("Fail to reconnect dest");
return false;
}
}
}
}
statistics();
return true;
}
void AcmmFrameMixer::removeInput(const std::string& group, const std::string& inStream)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
boost::shared_ptr<AcmmGroup> acmmGroup;
boost::shared_ptr<AcmmInput> acmmInput;
int ret;
ELOG_DEBUG("removeInput: group(%s), inStream(%s)", group.c_str(), inStream.c_str());
acmmGroup = getGroup(group);
if (!acmmGroup) {
ELOG_ERROR("Invalid gropu(%s)", group.c_str());
return;
}
acmmInput = acmmGroup->getInput(inStream);
if (!acmmInput) {
ELOG_ERROR("Invalid input(%s)", inStream.c_str());
return;
}
ret = m_mixerModule->SetMixabilityStatus(acmmInput.get(), false);
if (ret != 0) {
ELOG_ERROR("Fail to unSetMixabilityStatus");
return;
}
acmmGroup->removeInput(inStream);
if (acmmGroup->allInputsMuted() && acmmGroup->anyOutputsConnected()) {
std::vector<boost::shared_ptr<AcmmOutput>> outputs;
acmmGroup->getOutputs(outputs);
for(auto& o : outputs) {
o->removeDest(m_outputInfoMap[o.get()].dest);
if (!m_broadcastGroup->addDest(m_outputInfoMap[o.get()].format, m_outputInfoMap[o.get()].dest)) {
ELOG_ERROR("Fail to reconnect broadcast dest");
return;
}
}
}
if (!acmmGroup->numOfInputs() && !acmmGroup->numOfOutputs()) {
removeGroup(group);
}
if (m_mostActiveInput == acmmInput)
m_mostActiveInput.reset();
statistics();
return;
}
void AcmmFrameMixer::setInputActive(const std::string& group, const std::string& inStream, bool active)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
boost::shared_ptr<AcmmGroup> acmmGroup;
boost::shared_ptr<AcmmInput> acmmInput;
ELOG_DEBUG("+++setInputActive: group(%s), inStream(%s), active(%d)", group.c_str(), inStream.c_str(), active);
acmmGroup = getGroup(group);
if (!acmmGroup) {
ELOG_ERROR("Invalid gropu(%s)", group.c_str());
return;
}
acmmInput = acmmGroup->getInput(inStream);
if (!acmmInput) {
ELOG_ERROR("Invalid input(%s)", inStream.c_str());
return;
}
if (acmmInput->isActive() == active)
return;
acmmInput->setActive(active);
if (!acmmGroup->numOfOutputs())
return;
if (acmmGroup->allInputsMuted() && acmmGroup->anyOutputsConnected()) {
std::vector<boost::shared_ptr<AcmmOutput>> outputs;
acmmGroup->getOutputs(outputs);
for(auto& o : outputs) {
o->removeDest(m_outputInfoMap[o.get()].dest);
if (!m_broadcastGroup->addDest(m_outputInfoMap[o.get()].format, m_outputInfoMap[o.get()].dest)) {
ELOG_ERROR("Fail to reconnect broadcast dest");
return;
}
}
} else if (!acmmGroup->allInputsMuted() && !acmmGroup->anyOutputsConnected()) {
std::vector<boost::shared_ptr<AcmmOutput>> outputs;
acmmGroup->getOutputs(outputs);
for(auto& o : outputs) {
m_broadcastGroup->removeDest(m_outputInfoMap[o.get()].dest);
if (!o->addDest(m_outputInfoMap[o.get()].format, m_outputInfoMap[o.get()].dest)) {
ELOG_ERROR("Fail to reconnect dest");
return;
}
}
}
statistics();
ELOG_DEBUG("---setInputActive: group(%s), inStream(%s), active(%d)", group.c_str(), inStream.c_str(), active);
}
bool AcmmFrameMixer::addOutput(const std::string& group, const std::string& outStream, const owt_base::FrameFormat format, owt_base::FrameDestination* destination)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
boost::shared_ptr<AcmmGroup> acmmGroup;
boost::shared_ptr<AcmmOutput> acmmOutput;
boost::shared_ptr<AcmmOutput> acmmBroadcastOutput;
int ret;
ELOG_DEBUG("addOutput: group(%s), outStream(%s), format(%s), dest(%p)", group.c_str(), outStream.c_str(), getFormatStr(format), destination);
acmmGroup = getGroup(group);
if (!acmmGroup) {
acmmGroup = addGroup(group);
if (!acmmGroup) {
ELOG_ERROR("Can not add output group");
return false;
}
}
acmmOutput = acmmGroup->getOutput(outStream);
if (acmmOutput) {
ELOG_DEBUG("Update previous output");
if (acmmGroup->allInputsMuted()) {
m_broadcastGroup->removeDest(m_outputInfoMap[acmmOutput.get()].dest);
m_outputInfoMap.erase(acmmOutput.get());
if (!m_broadcastGroup->addDest(format, destination)) {
ELOG_ERROR("Fail to update broadcast dest");
return false;
}
} else {
acmmOutput->removeDest(m_outputInfoMap[acmmOutput.get()].dest);
m_outputInfoMap.erase(acmmOutput.get());
if (!acmmOutput->addDest(format, destination)) {
ELOG_ERROR("Fail to update dest");
return false;
}
}
OutputInfo outputInfo;
outputInfo.format = format;
outputInfo.dest = destination;
m_outputInfoMap[acmmOutput.get()] = outputInfo;
} else {
acmmOutput = acmmGroup->addOutput(outStream);
if(!acmmOutput) {
ELOG_ERROR("Fail to add output");
return false;
}
if ((acmmGroup->numOfOutputs() == 1) && acmmGroup->numOfInputs()) {
std::vector<boost::shared_ptr<AcmmInput>> inputs;
acmmGroup->getInputs(inputs);
for(auto& i : inputs) {
ret = m_mixerModule->SetAnonymousMixabilityStatus(i.get(), false);
if (ret != 0) {
ELOG_WARN("Fail to unSetAnonymousMixabilityStatus");
}
}
}
if (acmmGroup->allInputsMuted()) {
if (!m_broadcastGroup->addDest(format, destination)) {
ELOG_ERROR("Fail to add broadcast dest");
return false;
}
} else {
if (!acmmOutput->addDest(format, destination)) {
ELOG_ERROR("Fail to add dest");
return false;
}
}
OutputInfo outputInfo;
outputInfo.format = format;
outputInfo.dest = destination;
m_outputInfoMap[acmmOutput.get()] = outputInfo;
}
updateFrequency();
statistics();
return true;
}
void AcmmFrameMixer::removeOutput(const std::string& group, const std::string& outStream)
{
boost::unique_lock<boost::shared_mutex> lock(m_mutex);
boost::shared_ptr<AcmmGroup> acmmGroup;
boost::shared_ptr<AcmmOutput> acmmOutput;
int ret;
ELOG_DEBUG("removeOutput: group(%s), outStream(%s)", group.c_str(), outStream.c_str());
acmmGroup = getGroup(group);
if (!acmmGroup) {
ELOG_ERROR("Invalid gropu(%s)", group.c_str());
return;
}
acmmOutput = acmmGroup->getOutput(outStream);
if (!acmmOutput) {
ELOG_ERROR("Invalid output(%s)", outStream.c_str());
return;
}
if ((acmmGroup->numOfOutputs() == 1) && acmmGroup->numOfInputs()) {
std::vector<boost::shared_ptr<AcmmInput>> inputs;
acmmGroup->getInputs(inputs);
for(auto& i : inputs) {
ret = m_mixerModule->SetAnonymousMixabilityStatus(i.get(), true);
if (ret != 0) {
ELOG_WARN("Fail to unSetAnonymousMixabilityStatus");
}
}
}
if (acmmGroup->allInputsMuted()) {
m_broadcastGroup->removeDest(m_outputInfoMap[acmmOutput.get()].dest);
} else {
acmmOutput->removeDest(m_outputInfoMap[acmmOutput.get()].dest);
}
m_outputInfoMap.erase(acmmOutput.get());
acmmGroup->removeOutput(outStream);
if (!acmmGroup->numOfInputs() && !acmmGroup->numOfOutputs()) {
removeGroup(group);
}
updateFrequency();
statistics();
return;
}
void AcmmFrameMixer::updateFrequency()
{
int32_t maxFreq = m_broadcastGroup->NeededFrequency();
int32_t freq;
int ret;
for (auto& g : m_groups) {
freq = g.second->NeededFrequency();
if (freq > maxFreq)
maxFreq= freq;
}
if (m_frequency != maxFreq) {
ret = m_mixerModule->SetMinimumMixingFrequency(convert2Frequency(maxFreq));
if (ret != 0) {
ELOG_WARN("Fail to SetMinimumMixingFrequency, %d", maxFreq);
return;
}
ELOG_DEBUG("Max mixing frequency %d -> %d", m_frequency, maxFreq);
m_frequency = maxFreq;
}
return;
}
void AcmmFrameMixer::performMix()
{
int64_t currTime;
int64_t nextTime = currentTimeMs() + MIXER_INTERVAL_MS;
while(true) {
currTime = currentTimeMs();
if (nextTime > currTime) {
ELOG_TRACE("performMix, waiting for the next time %ld, sleep %ld ms", nextTime, nextTime-currTime);
usleep((nextTime - currTime) * 1000);
} else {
ELOG_WARN("performMix, processing too slow, current time %ld, next time %ld", currTime, nextTime);
}
{
boost::upgrade_lock<boost::shared_mutex> lock(m_mutex);
m_mixerModule->Process();
}
if (!m_running) {
break;
}
nextTime += MIXER_INTERVAL_MS;
}
}
void AcmmFrameMixer::NewMixedAudio(int32_t id,
const AudioFrame& generalAudioFrame,
const AudioFrame** uniqueAudioFrames,
uint32_t size)
{
std::map<uint16_t, bool> groupMap;
for(uint32_t i = 0; i< size; i++) {
uint16_t groupId = (uniqueAudioFrames[i]->id_ >> 16) & 0xffff;
ELOG_TRACE("NewMixedAudio, frame id(0x%x), groupId(%u)"
, uniqueAudioFrames[i]->id_
, groupId);
if (m_groups.find(groupId) != m_groups.end()) {
boost::shared_ptr<AcmmGroup> acmmGroup = m_groups[groupId];
if (acmmGroup->numOfInputs()) {
if (acmmGroup->numOfOutputs()) {
acmmGroup->NewMixedAudio(uniqueAudioFrames[i]);
}
groupMap[groupId] = true;
}
}
}
for (auto& p : m_groups) {
boost::shared_ptr<AcmmGroup> acmmGroup = p.second;
if (groupMap.find(acmmGroup->id()) == groupMap.end()) {
if (acmmGroup->numOfOutputs()) {
acmmGroup->NewMixedAudio(&generalAudioFrame);
}
}
}
m_broadcastGroup->NewMixedAudio(&generalAudioFrame);
}
boost::shared_ptr<AcmmInput> AcmmFrameMixer::getInputById(int32_t id)
{
uint16_t groupId = (id >> 16) & 0xffff;
uint16_t streamId = id & 0xffff;
if (m_groups.find(groupId) != m_groups.end())
return m_groups[groupId]->getInput(streamId);
return NULL;
}
void AcmmFrameMixer::VadParticipants(const ParticipantVadStatistics *statistics, const uint32_t size)
{
if (!m_asyncHandle || !m_vadEnabled || size < 1) {
ELOG_TRACE("VAD skipped, asyncHandle(%p), enabled(%d), size(%d)", m_asyncHandle, m_vadEnabled, size);
return;
}
const ParticipantVadStatistics* active = NULL;
const ParticipantVadStatistics* p = statistics;
boost::shared_ptr<AcmmInput> activeAcmmInput;
boost::shared_ptr<AcmmInput> acmmInput;
for(uint32_t i = 0; i < size; i++, p++) {
ELOG_TRACE("%d, vad streamId(0x%x), energy(%u)", i, p->id, p->energy);
if (p->energy == 0)
continue;
if (!active || p->energy > active->energy) {
acmmInput = getInputById(p->id);
if (!acmmInput) {
ELOG_TRACE("Not valid vad streamId(0x%x)", p->id);
continue;
}
active = p;
activeAcmmInput = acmmInput;
}
}
if (activeAcmmInput && m_mostActiveInput != activeAcmmInput) {
ELOG_TRACE("Active vad %s -> %s"
, m_mostActiveInput ? m_mostActiveInput->name().c_str() : "NULL"
, activeAcmmInput->name().c_str());
m_mostActiveInput = activeAcmmInput;
m_asyncHandle->notifyAsyncEvent("vad", m_mostActiveInput->name().c_str());
}
}
void AcmmFrameMixer::statistics()
{
uint32_t activeCount = 0;
uint32_t mutedCount = 0;
uint32_t receivedOnlyCount = 0;
uint32_t streamInCount = 0;
uint32_t unknownCount = 0;
for (auto& p : m_groups) {
boost::shared_ptr<AcmmGroup> acmmGroup = p.second;
if(!acmmGroup->allInputsMuted() && acmmGroup->anyOutputsConnected())
activeCount++;
else if(acmmGroup->numOfInputs() && acmmGroup->allInputsMuted() && acmmGroup->numOfOutputs())
mutedCount++;
else if(acmmGroup->numOfInputs() && acmmGroup->numOfOutputs() == 0)
streamInCount++;
else if(acmmGroup->numOfInputs() == 0 && acmmGroup->numOfOutputs() != 0)
receivedOnlyCount++;
else
unknownCount++;
}
ELOG_DEBUG("All(%ld), Active(%d), Muted(%d), ReceivedOnly(%d), StreamIn(%d), Unknown(%d)"
, m_groups.size()
, activeCount
, mutedCount
, receivedOnlyCount
, streamInCount
, unknownCount
);
}
} /* namespace mcu */
|
import http from "./http";
/*
For doing API calls
*/
export const testService = () => http.get("test");
|
<reponame>bilaleren/mui-tabs
export { default } from './Tabs'
export type { TabsProps, TabsActionRefAttributes } from './Tabs'
|
<gh_stars>0
/**
* Created by ligh on 2016/11/5.
*/
/**
* 发射器数据
* @type {{}}
*/
var EmitterData = exports = module.exports = function (eventType,targetID,data,result)
{
/**
* 目标id
*/
this.targetID = targetID;
/**
* 发送的数据
*/
this.data = data;
/**
* 事件类型
*/
this.eventType = eventType;
this.code = result || 200;
}; |
#!/usr/bin/env bash
centerUrl=`cat /etc/hamonize/propertiesJob/propertiesInfo.hm | grep CENTERURL | awk -F'=' '{print $2}'`
CENTERURL="http://${centerUrl}/act/progrmAct"
DATETIME=`date +'%Y-%m-%d %H:%M:%S'`
HOSTNAME=`hostname`
UUID=`cat /etc/hamonize/uuid`
LOGFILE="/var/log/hamonize/agentjob/progrmjob.log"
if [[ ! -f "$LOGFILE" ]]; then
touch $LOGFILE
fi
echo "################### Progrm block ###########################"
UPDT_INS=`cat /etc/hamonize/progrm/progrm.hm | jq '.INS' | sed -e "s/\"//g" `
echo "block program name =========> $UPDT_INS" >>$LOGFILE
if [ "$UPDT_INS" != null ]
then
echo "program access unpossible ==" >>$LOGFILE
EC=0
INSRET=""
OLD_IFS=$IFS;
IFS=,;
for I in $UPDT_INS;
do
echo "program name is ===> $I" >>$LOGFILE
sudo ps aux | grep $I | awk {'print $2'} | xargs kill -9 >>$LOGFILE
sleep 1
DO_WHEREISPROGRM=`whereis $I`
echo "whereis progrm === $DO_WHEREISPROGRM" >>$LOGFILE
DO_FILE_PATH=`echo $DO_WHEREISPROGRM | awk '{print $2}'`
echo "do program is ===>$DO_FILE_PATH" >>$LOGFILE
sudo chmod 644 $DO_FILE_PATH
PROGRM_CHMOD=`ls -al "$DO_FILE_PATH" | awk '{print $1}'`
echo "PROGRM_CHMOD===> $PROGRM_CHMOD" >>$LOGFILE
INSRET=$INSRET"{\"uuid\":\"${UUID}\",\"hostname\":\"${HOSTNAME}\",\"status\":\"ins\",\"status_yn\":\"Y\",\"progrmname\":\"${I}\",\"datetime\":\"${DATETIME}\"}"
if [ "$EC" -eq "$#" ]; then
INSRET=$INSRET","
fi
EC=`expr "$EC" + 1`
done;
fi
echo "================================================================="
echo "################### progrm allow ###########################"
UPDT_INS=`cat /etc/hamonize/progrm/progrm.hm | jq '.DEL' | sed -e "s/\"//g" `
echo "install file total data=========$UPDT_INS" >>$LOGFILE
if [ "$UPDT_INS" != null ]
then
echo "program access possible ==" >>$LOGFILE
EC=0
DELRET=""
OLD_IFS=$IFS;
IFS=,;
for I in $UPDT_INS;
do
echo "program name is ===> $I" >>$LOGFILE
DO_WHEREISPROGRM=`whereis $I`
echo "whereis progrm === $DO_WHEREISPROGRM" >>$LOGFILE
DO_FILE_PATH=`echo $DO_WHEREISPROGRM | awk '{print $2}'`
echo "do program is ===>$DO_FILE_PATH" >>$LOGFILE
sudo chmod 755 $DO_FILE_PATH
PROGRM_CHMOD=`ls -al "$DO_FILE_PATH" | awk '{print $1}'`
echo "PROGRM_CHMOD===$PROGRM_CHMOD"
DELRET=$DELRET"{\"uuid\":\"${UUID}\",\"hostname\":\"${HOSTNAME}\",\"status\":\"del\",\"status_yn\":\"N\",\"progrmname\":\"${I}\",\"datetime\":\"${DATETIME}\"}"
if [ "$EC" -eq "$#" ]; then
DELRET=$DELRET","
fi
EC=`expr "$EC" + 1`
done;
fi
echo "################## updt json data ############################"
PROGRM_JSON="{\
\"insresert\":[$INSRET],\
\"delresert\": [$DELRET],\
\"uuid\": \"$PCUUID\"\
}"
echo $PROGRM_JSON >>$LOGFILE
RETUPDT=`curl -X POST -H 'User-Agent: HamoniKR OS' -H 'Content-Type: application/json' -f -s -d "$PROGRM_JSON" $CENTERURL`
echo $RETUPDT >> ${LOGFILE}
|
"""
Create an algorithm to detect fake reviews for a product based on its sentiment score.
"""
from textblob import TextBlob
def detect_fake_reviews(reviews):
fake_reviews = []
for review in reviews:
sentiment_score = TextBlob(review).sentiment.polarity
if sentiment_score < 0.2 or sentiment_score > 0.8:
fake_reviews.append(review)
return fake_reviews
if __name__ == '__main__':
reviews = ["This product is terrible!", "This is the best product ever!"]
print(detect_fake_reviews(reviews)) # ['This product is terrible!'] |
module.exports = {
first: "#5A6360", // Black Coral
firstLight: "#7F8A87", // Mummy's Tomb what a locely name for a colour
firstSuperLight: "#EFF4F3",
firstDark: "#5A6360",
accent: "#FF6633",
bright: "#ffffff",
light: "#f3f3f3",
middle: "#666666",
dark: "#333333",
superDark: "#111111",
veryDark: "#000000"
};
// Great source of colours at https://coolors.co/331832-d81e5b-f0544f-7f8a87-fdf0d5
|
package com.tdsata.ourapp.entity;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.widget.ImageView;
import com.tdsata.ourapp.R;
import com.tdsata.ourapp.util.FixedValue;
import com.tdsata.ourapp.util.Tools;
import java.io.File;
import java.io.Serializable;
import java.util.Arrays;
/**
* 部门成员的数据属性.
*/
public class Member implements Serializable {
private static final long serialVersionUID = 202102100951L;
// 数据属性
private String name; // 姓名
private String number; // 学号
private String subject;// 专业
private String phone; // 联系电话
private String teacher;// 辅导员
private String qq; // QQ
private String sex; // 性别
private int count; // 积分
private String flag; // 身份标识
private String photoName;// 图片的文件名,用于本地缓存及校验更新
public String getName() {
return name;
}
public String getNumber() {
return number;
}
public String getSubject() {
return subject;
}
public String getPhone() {
return phone;
}
public String getTeacher() {
return teacher;
}
public String getQQ() {
return qq;
}
public String getSex() {
return sex;
}
public int getCount() {
return count;
}
public String getFlag() {
return flag;
}
public String[] getFlagArray() {
String[] result = flag.split(",");
for (int i = 0; i < result.length; i++) {
result[i] = result[i].trim();
}
return result;
}
/**
* 获取身份信息.
* 多个身份以"/"连接.
*
* @return 身份信息
*/
public String getIdentity() {
String[] flags = getFlagArray();
StringBuilder identity = new StringBuilder();
// 0-部员,1-小组组长,2-副部长,3-部长
for (int i = 0; i < flags.length; i++) {
switch (flags[i]) {
case "0":
identity.append("部员");
break;
case "1":
identity.append("组长");
break;
case "2":
identity.append("副部长");
break;
case "3":
identity.append("部长");
break;
}
if (i < flags.length - 1) {
identity.append("/");
}
}
return identity.toString();
}
public String getPhotoName() {
return photoName;
}
public void setPhotoName(String photoName) {
this.photoName = photoName;
}
/**
* 判断是否使用默认的头像.
*
* @return 若使用默认的头像则返回true,否则返回false
*/
public boolean useDefaultPhoto() {
return "default_photo".equals(photoName);
}
/**
* 为ImageView及其子类的对象设置头像.
*
* @param context 上下文
* @param imageView 被设置的ImageView对象
*/
public void settingHeadPhoto(final Context context, final ImageView imageView) {
imageView.setImageResource(R.drawable.pic_default_head_photo);
if (!useDefaultPhoto()) {
Tools.threadPool.execute(new Runnable() {
@Override
public void run() {
File photo = Tools.generateFileAtCache(context, FixedValue.photoDirectory, photoName, true);
final Bitmap pic = BitmapFactory.decodeFile(photo.getAbsolutePath());
imageView.post(new Runnable() {
@Override
public void run() {
if (pic != null) {
imageView.setImageBitmap(pic);
} else {
imageView.setImageResource(R.drawable.pic_default_head_photo);
}
}
});
}
});
}
}
/**
* 判断是否是部门部长或副部长.
*
* @return 若是则返回true,否则返回false
*/
public boolean isAdministrator() {
return flag.contains("2") || flag.contains("3");
}
@Override
public String toString() {
return "Member {" +
"学号:" + number +
", 姓名:" + name +
", 性别:" + sex +
", 专业:" + subject +
", 辅导员:" + teacher +
", QQ:" + qq +
", 电话:" + phone +
", 部门积分:" + count +
", 身份标识:" + Arrays.toString(getFlagArray()) +
", 头像文件名:" + photoName +
'}';
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dtstack.flinkx.connector.oraclelogminer.table;
import com.dtstack.flinkx.connector.oraclelogminer.conf.LogMinerConf;
import com.dtstack.flinkx.connector.oraclelogminer.options.LogminerOptions;
import com.dtstack.flinkx.connector.oraclelogminer.source.OraclelogminerDynamicTableSource;
import org.apache.flink.configuration.ConfigOption;
import org.apache.flink.configuration.ReadableConfig;
import org.apache.flink.formats.json.JsonOptions;
import org.apache.flink.table.api.TableSchema;
import org.apache.flink.table.connector.source.DynamicTableSource;
import org.apache.flink.table.factories.DynamicTableSourceFactory;
import org.apache.flink.table.factories.FactoryUtil;
import org.apache.flink.table.utils.TableSchemaUtils;
import java.util.HashSet;
import java.util.Set;
/**
* Date: 2021/04/27 Company: www.dtstack.com
*
* @author tudou
*/
public class OraclelogminerDynamicTableFactory implements DynamicTableSourceFactory {
public static final String IDENTIFIER = "oraclelogminer-x";
@Override
public String factoryIdentifier() {
return IDENTIFIER;
}
@Override
public Set<ConfigOption<?>> requiredOptions() {
Set<ConfigOption<?>> options = new HashSet<>();
options.add(LogminerOptions.JDBC_URL);
options.add(LogminerOptions.USERNAME);
options.add(LogminerOptions.PASSWORD);
return options;
}
@Override
public Set<ConfigOption<?>> optionalOptions() {
Set<ConfigOption<?>> options = new HashSet<>();
options.add(LogminerOptions.FETCHSIZE);
options.add(LogminerOptions.CAT);
options.add(LogminerOptions.POSITION);
options.add(LogminerOptions.START_TIME);
options.add(LogminerOptions.START_SCN);
options.add(LogminerOptions.TABLE);
options.add(LogminerOptions.QUERY_TIMEOUT);
options.add(LogminerOptions.SUPPORT_AUTO_LOG);
options.add(LogminerOptions.IO_THREADS);
options.add(LogminerOptions.MAX_LOAD_FILE_SIZE);
options.add(LogminerOptions.TRANSACTION_CACHE_NUM_SIZE);
options.add(LogminerOptions.TRANSACTION_EXPIRE_TIME);
options.add(JsonOptions.TIMESTAMP_FORMAT);
return options;
}
@Override
public DynamicTableSource createDynamicTableSource(Context context) {
final FactoryUtil.TableFactoryHelper helper =
FactoryUtil.createTableFactoryHelper(this, context);
// 1.所有的requiredOptions和optionalOptions参数
final ReadableConfig config = helper.getOptions();
// 2.参数校验
helper.validate();
// 3.封装参数
TableSchema physicalSchema =
TableSchemaUtils.getPhysicalSchema(context.getCatalogTable().getSchema());
LogMinerConf logMinerConf = getLogMinerConf(config);
return new OraclelogminerDynamicTableSource(
physicalSchema, logMinerConf, JsonOptions.getTimestampFormat(config));
}
/**
* 初始化LogMinerConf
*
* @param config LogMinerConf
* @return
*/
private LogMinerConf getLogMinerConf(ReadableConfig config) {
LogMinerConf logMinerConf = new LogMinerConf();
logMinerConf.setUsername(config.get(LogminerOptions.USERNAME));
logMinerConf.setPassword(config.get(LogminerOptions.PASSWORD));
logMinerConf.setJdbcUrl(config.get(LogminerOptions.JDBC_URL));
logMinerConf.setReadPosition(config.get(LogminerOptions.POSITION));
logMinerConf.setStartTime(config.get(LogminerOptions.START_TIME));
logMinerConf.setStartScn(config.get(LogminerOptions.START_SCN));
logMinerConf.setListenerTables(config.get(LogminerOptions.TABLE));
logMinerConf.setCat(config.get(LogminerOptions.CAT));
logMinerConf.setFetchSize(config.get(LogminerOptions.FETCHSIZE));
logMinerConf.setQueryTimeout(config.get(LogminerOptions.QUERY_TIMEOUT));
logMinerConf.setSupportAutoAddLog(config.get(LogminerOptions.SUPPORT_AUTO_LOG));
logMinerConf.setMaxLogFileSize(config.get(LogminerOptions.MAX_LOAD_FILE_SIZE));
logMinerConf.setIoThreads(config.get(LogminerOptions.IO_THREADS));
logMinerConf.setTransactionCacheNumSize(
config.get(LogminerOptions.TRANSACTION_CACHE_NUM_SIZE));
logMinerConf.setTransactionExpireTime(config.get(LogminerOptions.TRANSACTION_EXPIRE_TIME));
logMinerConf.setPavingData(true);
logMinerConf.setSplitUpdate(true);
return logMinerConf;
}
}
|
def extractTagName(htmlElement: str) -> str:
start_index = htmlElement.find('<') + 1 # Find the index of the character after '<'
end_index = htmlElement.find(' ', start_index) # Find the index of the first space after '<'
if end_index == -1: # If no space is found, find the index of '>'
end_index = htmlElement.find('>', start_index)
tag_name = htmlElement[start_index:end_index] # Extract the tag name using the start and end indices
return tag_name |
const { session, dialog, app, BrowserWindow, ipcMain, shell, Menu } = require("electron");
const fs = require("fs");
const fsPromises = fs.promises;
const path = require("path");
process.env["user_agent"] = `${app.name}/${app.getVersion()}`;
const { Config } = require("../js/config");
const { Library } = require("../js/library");
const { History } = require("../js/history");
const { importNNDDDB } = require("../js/import-nndd-db");
const { getNicoDataFilePaths } = require("../js/nico-data-file");
const { logger } = require("../js/logger");
const { Store } = require("../js/store");
const { selectFileDialog, selectFolderDialog, showMessageBox } = require("../js/dialog");
const { setupContextmenu } = require("../js/contextmenu");
const {
JsonStore, UserCSS, UserIconCache,
getWindowState,
setLogLevel,
popupInputContextMenu,
selectFolder } = require("./util");
app.commandLine.appendSwitch("autoplay-policy", "no-user-gesture-required");
const config = new Config();
const library = new Library();
const history = new History();
const store = new Store();
const user_css = new UserCSS();
const json_store = new JsonStore(async ()=>{
return await config.get("data_dir", "");
});
const user_icon_cache = new UserIconCache();
const app_dir = path.join(__dirname, "/..");
// ウィンドウオブジェクトをグローバル参照をしておくこと。
// しないと、ガベージコレクタにより自動的に閉じられてしまう。
let main_win = null;
let main_win_menu = null;
let player_win = null;
let do_app_quit = false;
let main_html_path = path.join(app_dir, "html", "index.html");
const player_html_path = path.join(app_dir, "html", "player.html");
const preload_main_path = path.join(app_dir, "main", "preload_main.js");
const preload_player_path = path.join(app_dir, "main", "preload_player.js");
let config_fiiename = "config.json";
if(app.commandLine.getSwitchValue("test")){
const { TestParams } = require("../../test/test-params");
const test_params = new TestParams(app_dir);
test_params.load();
main_html_path = test_params.main_html_path;
config_fiiename = test_params.config_fiiename;
if(test_params.use_mock_server){
const { setupMockServer } = require("../../test/mock_server/nico-mock-server");
setupMockServer(test_params.mock_server_port, test_params.mock_server_wait_msec);
}
}
process.on("uncaughtException", (error) => {
logger.error("uncaught exception:", error);
dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK"],
message: `致命的エラー: ${error.message}\n終了します`
});
do_app_quit = true;
app.quit();
});
process.on('unhandledRejection', (reason, p) => {
logger.error("unhandled rejection:", p, "reason:", reason);
dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK"],
message: `エラー: ${reason.message}`
});
});
const setupConfig = async () => {
try {
const config_path = path.join(app.getPath("userData"), config_fiiename);
config.setup(config_path);
await config.load();
} catch (error) {
const ret = dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK", "Cancel"],
message: `設定読み込み失敗、初期設定で続けますか?: ${error.message}`
});
if(ret===0){
//OK
config.clear();
}else{
//Cancel
return false;
}
}
try {
[
{ key:"data_dir", title:"DB,ブックマーク,履歴等" },
{ key:"download.dir", title:"動画" }
].forEach(param => {
const { key, title } = param;
const cfg_dir = config.get(key, undefined);
const select_dir = selectFolder(cfg_dir, `${title}を保存するフォルダの選択`);
if (!select_dir) {
throw new Error(`${title}を保存するフォルダが選択されていない`);
}
config.set(key, select_dir);
});
} catch (error) {
dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK"],
message: `設定失敗、終了します: ${error.message}`
});
return false;
}
return true;
};
const quit = async () => {
const close_ret = dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "info",
buttons: ["OK", "Cancel"],
message:"終了しますか?"
});
if(close_ret!=0){
// cancel, 終了しない
return false;
}
try {
config.set("main.window.state", getWindowState(main_win));
await config.save();
} catch (error) {
const ret = dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK", "Cancel"],
message: `設定の保存に失敗: ${error.message}\nこのまま終了しますか?`
});
if(ret!=0){
// cancel, 終了しない
return false;
}
}
try {
await library.save(false);
} catch (error) {
const ret = dialog.showMessageBoxSync(BrowserWindow.getFocusedWindow(), {
type: "error",
buttons: ["OK", "Cancel"],
message: `データベースの保存に失敗: ${error.message}\nこのまま終了しますか?`
});
if(ret!=0){
// cancel, 終了しない
return false;
}
}
return true;
};
const changeLogLevel = (args) => {
const { level } = args;
config.set("log.level", level);
setLogLevel(level);
main_win.webContents.send("setting:on-change-log-level", args);
if(player_win !== null){
player_win.webContents.send("setting:on-change-log-level", args);
}
};
function createWindow() {
// ブラウザウィンドウの作成
const state = config.get("main.window.state", { width: 1000, height: 600 });
state.title = `${app.name} ${app.getVersion()}`;
state.webPreferences = {
nodeIntegration: false,
contextIsolation: false,
preload: preload_main_path,
spellcheck: false
};
main_win = new BrowserWindow(state);
main_win._tag = "main";
if (state.maximized) {
main_win.maximize();
}
const main_menu = () => {
const menu_templete = [
{ label: "ログ",
submenu: [
{ label: "ログファイルを開く", click() {
shell.openExternal(logger.getPath());
}},
{ label: "ログの場所を開く", click() {
shell.showItemInFolder(logger.getPath());
}},
{ type: "separator" },
{ id: "log-level", label: "debugレベルで取得", type: "checkbox", checked: false, click(e) {
const level = e.checked?"debug":"info";
changeLogLevel({level});
}}
]
},
{ label: "ヘルプ",
submenu: [
{ role: "reload" },
{ role: "forcereload" },
{ role: "toggledevtools" },
{ type: "separator" },
{ id: "open-devtools", label: "起動時にdevtoolsを開く", type: "checkbox", checked: false, click(e) {
config.set("open_devtools", e.checked);
}},
{ label: "設定ファイルの場所を開く", async click() {
await shell.showItemInFolder(config.config_path);
}}
]
},
];
return Menu.buildFromTemplate(menu_templete);
};
main_win_menu = main_menu();
main_win.setMenu(main_win_menu);
[
["log-level", config.get("log.level", "info")=="debug"],
["open-devtools", config.get("open_devtools", false)]
].forEach(item => {
const menu_id = item[0];
const checked = item[1];
const menu_item = main_win_menu.getMenuItemById(menu_id);
menu_item.checked = checked;
});
main_win.webContents.on("did-finish-load", async () => {
user_css.apply(main_win);
main_win.webContents.send("app:on-load-content");
main_win.webContents.on("context-menu", (e, props) => {
popupInputContextMenu(main_win, props);
});
});
// アプリケーションのindex.htmlの読み込み
main_win.loadURL(main_html_path);
if(config.get("open_devtools", false)){
// DevToolsを開く
main_win.webContents.openDevTools();
}
main_win.on("close", async (e) => {
if(do_app_quit){
if(main_win){
main_win.webContents.closeDevTools();
}
return;
}
e.preventDefault();
if(await quit()){
// devtools閉じて終了
if(main_win){
main_win.webContents.closeDevTools();
}
do_app_quit = true;
app.quit();
}
});
// ウィンドウが閉じられた時に発行される
main_win.on("closed", async () => {
// ウィンドウオブジェクトを参照から外す。
// もし何個かウィンドウがあるならば、配列として持っておいて、対応するウィンドウのオブジェクトを消去するべき。
if (player_win !== null) {
player_win.close();
}
main_win = null;
});
}
// このメソッドはElectronが初期化を終えて、ブラウザウィンドウを作成可能になった時に呼び出される。
// 幾つかのAPIはこのイベントの後でしか使えない。
app.on("ready", async ()=>{
if(!await setupConfig()){
do_app_quit = true;
app.quit();
return;
}
const log_level = config.get("log.level", "info");
setLogLevel(log_level);
const user_css_path = app.isPackaged?
path.join(process.resourcesPath, "user.css") // リリース時はリソースフォルダのuser.cssを使用
:path.join(app_dir, "css", "user.css"); // 開発、デバッグ時はcss/user.cssを使用
await user_css.load(user_css_path);
const user_agent = process.env["user_agent"];
session.defaultSession.setUserAgent(user_agent);
// dialog
ipcMain.handle("app:show-message-box", async (event, args) => {
const { type, title, message, okcancel} = args;
const bw = BrowserWindow.fromId(event.sender.id);
return showMessageBox(bw, type, title, message, okcancel);
});
ipcMain.handle("app:show-select-folder-dialog", async (event, args) => { // eslint-disable-line no-unused-vars
const bw = BrowserWindow.fromId(event.sender.id);
return selectFolderDialog(bw);
});
ipcMain.handle("app:show-select-file-dialog", async (event, args) => {
const { name, exts, multi_select } = args;
const bw = BrowserWindow.fromId(event.sender.id);
return selectFileDialog(bw, name, exts, multi_select);
});
ipcMain.on("app:show-player", async (event, args) => { // eslint-disable-line no-unused-vars
if(player_win){
player_win.show();
}
});
ipcMain.on("app:play-video", async (event, args) => {
await createPlayerWindow();
player_win.show();
const {video_id, online, time} = args;
const video_item = library.getItem(video_id);
player_win.webContents.send("app:play-video", {
video_id,
online,
time,
video_item,
});
});
const app_msg_list = [
{ name:"search-tag", focus:true },
{ name:"load-mylist", focus:true },
{ name:"add-bookmarks", focus:false },
{ name:"add-download-item", focus:false },
{ name:"add-stack-items", focus:false },
];
app_msg_list.forEach(msg=>{
const { name, focus } = msg;
ipcMain.on(`app:${name}`, (event, args) => {
if(focus){
main_win.focus();
}
main_win.webContents.send(`app:${name}`, args);
});
});
[
"bookmark",
"library-search",
"mylist",
"nico-search",
"download",
"nglist",
"stack",
].forEach(name=>{
ipcMain.handle(`${name}:getItems`, async (event, args) => { // eslint-disable-line no-unused-vars
if(!store.has(name)){
store.setItems(name, await json_store.load(name, []));
}
return store.getItems(name);
});
ipcMain.handle(`${name}:updateItems`, async (event, args) => {
const { items } = args;
store.setItems(name, items);
await json_store.save(name, items);
if(name=="download"){
main_win.webContents.send("download:on-update-item");
}
});
});
// download
ipcMain.handle("download:getIncompleteIDs", async (event, args) => { // eslint-disable-line no-unused-vars
const name = "download";
if(!store.has(name)){
store.setItems(name, await json_store.load(name, []));
}
const items = store.getItems(name);
if(!items){
return [];
}
const ids = [];
items.forEach(item => {
if(item.state != 2){
ids.push(item.id);
}
});
return ids;
});
// history
const history_max = 50;
const items = await json_store.load("history", []);
history.setup(history_max);
history.setData(items);
ipcMain.handle("history:getItems", (event, args) => { // eslint-disable-line no-unused-vars
return history.getData("history");
});
ipcMain.handle("history:updateItems", async (event, args) => {
const { items } = args;
history.setData(items);
await json_store.save("history", items);
});
ipcMain.on("history:addItem", async (event, args) => {
const { item } = args;
history.add(item);
const items = history.getData();
await json_store.save("history", items);
main_win.webContents.send("history:on-update-item", args);
const video_id = item.id;
const video_item = library.getItem(video_id);
if(video_item===null){
return;
}
const props = {
last_play_date : new Date().getTime(),
play_count : video_item.play_count + 1
};
library.update(video_id, props);
});
// library
const data_dir = config.get("data_dir", "");
library.setup(data_dir);
ipcMain.handle("library:addItem", async (event, args) => {
const { item } = args;
await library.addItem(item);
});
ipcMain.handle("library:addDownloadItem", async (event, args) => {
const { download_item } = args;
await library.addDownloadedItem(download_item);
});
ipcMain.handle("library:load", async (event, args) => { // eslint-disable-line no-unused-vars
await library.load();
});
ipcMain.handle("library:has", (event, args) => {
const { video_id } = args;
return library.has(video_id);
});
ipcMain.handle("library:updateItemProps", async (event, args) => {
const { video_id, props } = args;
await library.update(video_id, props);
});
ipcMain.handle("library:getItem", (event, args) => {
const { video_id } = args;
return library.getItem(video_id);
});
ipcMain.handle("library:deleteItem", async (event, args) => {
const { video_id } = args;
const video_item = library.getItem(video_id);
if(video_item===null){
return {
success:false,
error:new Error(`${video_id}が存在しません`)
};
}
try {
await library.delete(video_id);
} catch (error) {
return {
success:false,
error:error
};
}
/**
* @type {Array}
*/
const paths = getNicoDataFilePaths(video_item);
const exist_paths = [];
for (let index = 0; index < paths.length; index++) {
const file_path = paths[index];
try {
await fsPromises.stat(file_path);
exist_paths.push(file_path);
} catch (error) {
// pass
}
}
for (let index = 0; index < exist_paths.length; index++) {
const file_path = exist_paths[index];
const result = shell.moveItemToTrash(file_path);
if(!result){
return {
success:false,
error:new Error(`${file_path}のゴミ箱への移動に失敗`)
};
}
}
return {
success:true,
error:null
};
});
ipcMain.handle("library:import-nndd-db", async (event, args) => {
const { db_file_path } = args;
const data_dir = library.dataDir;
if(data_dir == ""){
return {
result : false,
error : new Error("データを保存するフォルダが設定されていない")
};
}
try {
const { path_data_list, video_data_list } = await importNNDDDB(db_file_path);
library.setData(
path_data_list,
video_data_list
);
await library.save();
} catch (error) {
return {
result : false,
error : error
};
}
return {
result : true,
error : null
};
});
library.on("init", ()=>{
main_win.webContents.send("library:on-init", {
items:library.getItems()
});
});
library.on("update-item", (args)=>{
main_win.webContents.send("library:on-update-item", args);
});
library.on("add-item", (args)=>{
main_win.webContents.send("library:on-add-item", args);
});
library.on("delete-item", (args)=>{
main_win.webContents.send("library:on-delete-item", args);
});
// config
ipcMain.handle("config:get", (event, args) => {
const { key, value } = args;
return config.get(key, value);
});
ipcMain.handle("config:set", (event, args) => {
const { key, value } = args;
config.set(key, value);
});
user_icon_cache.setup(
path.join(config.get("data_dir", ""), "user_icon"),
config.get("user_icon_cache", false));
createWindow();
setupContextmenu(main_win, player_win, config, history, store);
});
// すべてのウィンドウが閉じられた時にアプリケーションを終了する。
app.on("window-all-closed", () => {
// macOSでは、Cmd + Q(終了)をユーザーが実行するまではウィンドウが全て閉じられても終了しないでおく。
if (process.platform !== "darwin") {
app.quit();
}
});
app.on("activate", () => {
// macOS では、ドックをクリックされた時にウィンドウがなければ新しく作成する。
if (main_win === null) {
createWindow();
}
});
const createPlayerWindow = () => {
return new Promise((resolve, reject) => { // eslint-disable-line no-unused-vars
if(player_win !== null){
resolve();
return;
}
const state = config.get("player.window.state", { width: 800, height: 600 });
state.webPreferences = {
nodeIntegration: false,
contextIsolation: false,
preload: preload_player_path,
spellcheck: false
};
player_win = new BrowserWindow(state);
player_win.removeMenu();
player_win.webContents.on("did-finish-load", async () => {
user_css.apply(player_win);
player_win.webContents.send("app:on-load-content");
player_win.webContents.on("context-menu", (e, props) => {
popupInputContextMenu(player_win, props);
});
});
if (state.maximized) {
player_win.maximize();
}
ipcMain.once("app:player-ready", (event, args) => { // eslint-disable-line no-unused-vars
if(config.get("open_devtools", false)){
player_win.webContents.openDevTools();
}
player_win.on("close", (e) => { // eslint-disable-line no-unused-vars
config.set("player.window.state", getWindowState(player_win));
player_win = null;
});
resolve();
});
player_win.loadURL(player_html_path);
player_win.on("close", e => { // eslint-disable-line no-unused-vars
if(player_win){
player_win.webContents.closeDevTools();
}
});
});
}; |
def filter_societies(societies_list, language, priority_keyword):
filtered_societies = []
for society in societies_list:
society_name = society[f'society_name_{language}']
priorities = society['key_priorities']
if priority_keyword in priorities:
filtered_societies.append(society_name)
return filtered_societies |
<reponame>RenukaGurumurthy/Gooru-Core-API<filename>gooru-core/src/main/java/org/ednovo/goorucore/application/serializer/ExcludeNullTransformer.java
package org.ednovo.goorucore.application.serializer;
import flexjson.transformer.AbstractTransformer;
public class ExcludeNullTransformer extends AbstractTransformer {
@Override
public Boolean isInline() {
return true;
}
@Override
public void transform(Object object) {
if (object == null) {
return;
}
}
} |
/*
* Copyright 2017 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.privacy_avare.service;
import java.io.InputStream;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.Properties;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import de.privacy_avare.config.DefaultProperties;
import de.privacy_avare.domain.Profile;
import de.privacy_avare.exeption.ClientPreferencesOutdatedException;
import de.privacy_avare.exeption.MalformedProfileIdException;
import de.privacy_avare.exeption.NoProfilesInDatabaseException;
import de.privacy_avare.exeption.ProfileAlreadyExistsException;
import de.privacy_avare.exeption.ProfileSetOnDeletionException;
import de.privacy_avare.exeption.ServerPreferencesOutdatedException;
import de.privacy_avare.exeption.ProfileNotFoundException;
import de.privacy_avare.repository.ProfileRepository;
/**
* Klasse stellt verschiedene Services zur Interaktion mit Profilen in der
* Datenbank bereit. Bei jedem Datenbankzugriff wird stets der Zeitpunkt
* lastProfileContact auf den Zeitpunkt der Anfrage gesetzt.
*
* @author <NAME>
* @version 1.0
*
*/
@Service
public class ProfileService {
@Autowired
private ProfileRepository profileRepository;
@Autowired
private IdService idService;
private static int minTimeDifference;
/**
* Static-Block, welcher aus application.properties die Einstellung
* minTimeDifference ausliest, mithilfe welcher die Aktualität von Client- und
* Serverprofilen verglichen wird.
*/
static {
InputStream inputStream = null;
try {
inputStream = ProfileService.class.getResourceAsStream("/application.properties");
Properties properties = new Properties(new DefaultProperties());
properties.load(inputStream);
minTimeDifference = Integer.valueOf(properties.getProperty("server.minTimeDifference"));
} catch (Exception e) {
e.printStackTrace();
minTimeDifference = 5;
} finally {
System.out.println("************************************************");
System.out.println(
"Folgender minimaler Zeitunterschied zwischen Server-Profil und Client-Profil beim TimeStamp-Vergleich wurde festgelegt:");
System.out.println("\t Minimaler Zeitunterschied in Minuten: " + minTimeDifference);
System.out.println("************************************************");
}
}
/**
* default-Konstruktor ohne erweiterte Funktionalität.
*/
public ProfileService() {
}
/**
* Erzeugt ein neues Profil mit einer gegebenen ProfileId. Bei erfolgreicher
* Erzeugung wird ein entsprechendes DB-Profil angelegt, wobei die Eigenschaft
* lastProfileChange auf 0 gesetzt wird. Das DB-Profil enthält noch keine
* preferences.
*
*
* @return Neu erzeugtes Profil.
* @throws ProfileAlreadyExistsException
* ProfileId bereits vergeben.
*
*/
public Profile createNewProfile() throws ProfileAlreadyExistsException {
String id = idService.generateId();
id = id.toLowerCase();
if (idService.isIdAlreadyTaken(id) == true) {
throw new ProfileAlreadyExistsException("UserID wird bereits in einem bestehenden Profil verwendet.");
}
Profile profile = new Profile(id);
updateProfile(profile);
return profile;
}
/**
* Erzeugt ein neues Profil mit einer gegebenen ProfileId. Bei erfolgreicher
* Erzeugung wird ein entsprechendes DB-Profil angelegt, wobei die Eigenschaft
* lastProfileChangeTimestamp auf 0 gesetzt wird. Das DB-Profil enthält noch
* keine preferences.
*
* @param id
* Bestehende ProfileID.
* @return Neu erzeugtes Profil.
* @throws ProfileAlreadyExistsException
* ProfileId bereits vergeben.
* @throws MalformedProfileIdException
* Ungültiges ProfileId-Format.
*/
public Profile createNewProfile(String id) throws ProfileAlreadyExistsException, MalformedProfileIdException {
id = id.toLowerCase();
if (idService.validateId(id) == false) {
throw new MalformedProfileIdException(
"Ungültiges ProfileID-Format - Entspricht nicht dem Aufbau einer üblichen Id.");
}
if (idService.isIdAlreadyTaken(id) == true) {
throw new ProfileAlreadyExistsException("UserID wird bereits in einem bestehenden Profil verwendet.");
}
Profile profile = new Profile(id);
updateProfile(profile);
return profile;
}
/**
* Sucht in der Datenbank nach einem Profil mit einer bestimmten ProfileId.
* Eigenschaft lastProfileContact wird in der Datenbank aktualisiert. Das
* zurückgelieferte Profil weist jedoch noch den ursprünglichen Zeitstempel auf.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @return Vorhandenes Profil der Datenbank.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
*/
public Profile getProfileById(String id) throws ProfileNotFoundException {
id = id.toLowerCase();
Profile dbProfile = profileRepository.findOne(id);
if (dbProfile == null) {
throw new ProfileNotFoundException("Kein Profil mit entsprechender ID gefunden.");
} else {
updateProfile(dbProfile);
}
return dbProfile;
}
/**
* Sucht in der Datenbank nach einem Profil mit einer bestimmten ProfileId. Wird
* ein Profil gefunden, so wird seine Eigenschaft lastProfileChangeTime mit dem
* Parameter clientLastProfileChange verglichen. Ist das Profil aus der
* Datenbank mindestens 'minTimeDifference' Minuten neuer als der im Parameter
* spezifizierte Zeitstempel, so wird das Profil aus der Datenbank
* zurückgeliefert.
*
* Der Wert lastProfileContact wird in der Datenbank in allen Fällen angepasst.
* ein Profil gefunden, so wird seine Eigenschaft lastProfileChange mit dem
* Parameter clientLastProfileChange verglichen. Ist das Profil aus der
* Datenbank mindestens 'minTimeDifference' Minuten neuer als der im Parameter
* spezifizierte Zeitstempel, so wird das Profil aus der Datenbank
* zurückgeliefert.
*
* Der Wert lastProfileContact wird in der Datenbank in allen Fällen angepasst.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @param clientLastProfileChange
* Entspricht der Aktualität des Profils auf dem Clientgerät.
* @return Gefundenes, aktuelleres Datenbankprofil.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
* @throws ProfileSetOnDeletionException
* Profil zum Löschen auf unSync gesetzt.
* @throws ServerPreferencesOutdatedException
* Profil in DB weist einen älteren Zeitpunkt lastProfileChange auf
* als der Parameter.
*/
public Profile getProfileByIdComparingLastChange(String id, Date clientLastProfileChange)
throws ProfileNotFoundException, ProfileSetOnDeletionException, ServerPreferencesOutdatedException {
id = id.toLowerCase();
Profile dbProfile = getProfileById(id);
GregorianCalendar dbLastProfileChange = new GregorianCalendar();
dbLastProfileChange.setTime(dbProfile.getLastProfileChange());
dbLastProfileChange.set(Calendar.MINUTE, dbLastProfileChange.get(Calendar.MINUTE) - ProfileService.minTimeDifference);
if (dbLastProfileChange.getTime().after(clientLastProfileChange)) {
return dbProfile;
} else {
throw new ServerPreferencesOutdatedException("Profil in DB älter als Clientprofil");
}
}
/**
* Liefert eine Liste aller in der Datenbank vorhandenen Profilen, absteigend
* nach der ProfileId sortiert, zurück. Dabei werden die Profile unabhängig
* ihrer gesetzten Eigenschaften zurückgeliefert. Bei allen gefundenen Profilen
* wird die Eigenschaft lastProfileContactTimestamp aktualisiert.
*
* @return Liste mit allen Profilen.
* @throws NoProfilesInDatabaseException
* Datenbank ist leer.
*/
public Iterable<Profile> getAllProfiles() throws NoProfilesInDatabaseException {
Iterable<Profile> list = profileRepository.findAllByOrderByIdAsc();
if (list.iterator().hasNext() == false) {
throw new NoProfilesInDatabaseException("Keine Profile in der DB vorhanden.");
}
updateProfiles(list);
return list;
}
/**
* Liefert den Zeitstempel für die Eigenschaft lastProfileChange des
* entsprechenden Profils zurück. Die Eigenschaft lastProfileContact wird bei
* diesem Zugriff nicht verändert.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @return lastProfileChange des entsprechenden Profils.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
*/
public Date getLastProfileChange(String id) throws ProfileNotFoundException {
id = id.toLowerCase();
Profile dbProfile = profileRepository.findOne(id);
if (dbProfile == null) {
throw new ProfileNotFoundException("Kein Profil mit entsprechender ID gefunden.");
}
Date dbProfileLastProfileChange = dbProfile.getLastProfileChange();
return dbProfileLastProfileChange;
}
/**
* Liefert den Zeitstempel für die Eigenschaft lastProfileContact des
* entsprechenden Profils zurück. Die Eigenschaft lastProfileContact wird bei
* diesem Zugriff nicht verändert.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @return lastProfileContact des entsprechenden Profils.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
*/
public Date getLastProfileContact(String id) throws ProfileNotFoundException {
id = id.toLowerCase();
Profile dbProfile = profileRepository.findOne(id);
if (dbProfile == null) {
throw new ProfileNotFoundException("Kein Profil mit entsprechender ID gefunden.");
}
Date dbProfileLastProfileContact = dbProfile.getLastProfileContact();
return dbProfileLastProfileContact;
}
/**
* Liefert die Preferences des entsprechenden Profils zurück. Die Eigenschaft
* lastProfileContact wird bei diesem Zugriff nicht verändert.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @return Preferences des entsprechenden Profils.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
*/
public String getPreferences(String id) throws ProfileNotFoundException, ProfileSetOnDeletionException {
id = id.toLowerCase();
Profile dbProfile = profileRepository.findOne(id);
if (dbProfile == null) {
throw new ProfileNotFoundException("Kein Profil mit entsprechender ID gefunden.");
}
String dbProfilePreferences = dbProfile.getPreferences();
return dbProfilePreferences;
}
/**
* Pushen eines aktualisierten Profils. Ist der Zeitunkt lastProfileChange des
* zu pushenden Profils aktueller als der des in der Datenbank bestehenden
* Profils, so wird dieses Überschrieben. Andernfalls wird entsprechend dem
* Parameter overwrite das ursprüngliche Profil in der Datenbank beibehalten
* (overwrite = false) oder überschrieben (overwrite = true).
*
* Der Zeitpunkt lastProfileContact wird in allen Fällen aktualisiert.
*
* Ist kein Profil mit entsprechender ID in der Datenbank vorhanden, so wird das
* zu pushende Profil in die DB geschrieben.
*
* @param id
* ProfileId, nach welcher in der Datenbank gesucht werden soll.
* @param clientLastProfileChange
* Letzte Änderungszeitpunkt des Profils auf Clienseite.
* @param clientPreferences
* Die zu pushenden Präferenzen.
* @param overwrite
* Soll ein bestehendes, aktuelleres Profil überschrieben werden?
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
* @throws ClientPreferencesOutdatedException
* Profil in DB aktueller als Clientprofile.
*/
public void pushProfile(String id, Date clientLastProfileChange, String clientPreferences, boolean overwrite)
throws ProfileNotFoundException, ProfileSetOnDeletionException, ClientPreferencesOutdatedException {
// Abrufen des entsprechenden Profils aus der Datenbank, wirft eventuell
// Exceptions
id = id.toLowerCase();
Profile dbProfile = getProfileById(id);
{
if (overwrite == true) {
dbProfile.setPreferences(clientPreferences);
dbProfile.setLastProfileChange(clientLastProfileChange);
updateProfile(dbProfile);
} else {
GregorianCalendar dbProfileLastProfileChange = new GregorianCalendar();
dbProfileLastProfileChange.setTime(dbProfile.getLastProfileChange());
dbProfileLastProfileChange.set(Calendar.MINUTE,
dbProfileLastProfileChange.get(Calendar.MINUTE) + ProfileService.minTimeDifference);
if (dbProfileLastProfileChange.getTime().before(clientLastProfileChange) || overwrite == true) {
dbProfile.setPreferences(clientPreferences);
dbProfile.setLastProfileChange(clientLastProfileChange);
updateProfile(dbProfile);
} else {
throw new ClientPreferencesOutdatedException("Profil in DB aktueller als Clientprofile.");
}
}
}
}
/**
* Fügt ein Profil in die Datenbank ein. Bereits bestehende Profile mit
* identischer ProfileId werden überschrieben.
*
* Der Zeitpunkt lastProfileContact wird in allen Fällen aktualisiert.
*
* Die Methode dient hauptsächlich zur Verwendung in anderen Service-Methoden,
* um eine Aktualisierung der Eigenschaft lastProfileContactTimestamp
* sicherzustellen.
*
* @param profile
* Das in die Datenbank zu schreibende Profil.
*/
public void updateProfile(Profile profile) {
profile.set_id(profile.get_id().toLowerCase());
profile.setLastProfileContact(GregorianCalendar.getInstance(Locale.GERMANY).getTime());
profileRepository.save(profile);
}
/**
* Fügt eine Menge von Profilen in die Datenbank ein. Bereits bestehende Profile
* mit identischer ProfileId werden überschrieben. Bei jedem Methodenaufruf wird
* in jedem Profil der Zeitpunkt lastProfileContact aktualisiert.
*
* Die Methode dient hauptsächlich zur Verwendung in anderen Service-Methoden,
* um eine Aktualisierung der Eigenschaft lastProfileContactTimestamp
* sicherzustellen.
*
* @param profileList
* Liste der in die Datenbank zu schreibende Profile.
*/
public void updateProfiles(Iterable<Profile> profileList) {
for (Profile profile : profileList) {
profile.setLastProfileContact(GregorianCalendar.getInstance(Locale.GERMANY).getTime());
profileRepository.save(profile);
}
}
/**
* Sucht ein durch die ProfileId eindeutig festgelegtes Profile in der
* Datenbank. Der Zeitpunkt des lastProfileChange wird auf 100 Jahre in die
* Zukunft gesetzt. Das bestehende Profil wird durch ein unSyncProfile ersetzt.
*
*
* Der Wert lastProfileContact wird in der Datenbank in allen Fällen angepasst.
* Der Wert lastProfileContact wird in der Datenbank in allen Fällen angepasst.
*
* @param id
* ProfileId des zu löschen Profiles.
* @param unSyncProfile
* unSyncProfile des Clients
*
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
*/
public void setProfileOnDeletion(String id, String unSyncProfile) throws ProfileNotFoundException {
// throws ProfileNotFoundException und ProfileSetOnDeletionException
id = id.toLowerCase();
Profile dbProfile = getProfileById(id);
// Falls Profil noch nicht benutzt, setze letzten Änderungszeitpunkt zunächst
// auf aktuellen Zeitpunkt.
// Bestimmung des aktuellen Zeitpunktes plus 100 Jahre.
Calendar lastProfileChange = GregorianCalendar.getInstance(Locale.GERMANY);
lastProfileChange.set(Calendar.YEAR, lastProfileChange.get(Calendar.YEAR) + 100);
// Setzen der Eigenschaften lastProfileChange + 100 Jahre;
// Ersetzen der Nutzerpräferenzen durch unSyncProfile
dbProfile.setLastProfileChange(lastProfileChange.getTime());
dbProfile.setPreferences(unSyncProfile);
// Überschreiben des zu löschenden Profils in der Datenbank
updateProfile(dbProfile);
}
/**
* Sucht ein durch die ProfileId eindeutig festgelegtes Profile in der
* Datenbank. Der Zeitpunkt des lastProfileChange wird auf 100 Jahre in die
* Zukunft gesetzt. Zusätzlich wird das unSync-Flag auf true gesetzt.
*
* Der Wert lastProfileContactTimestamp wird in der Datenbank in allen Fällen
* angepasst.
*
* @param profile
* Instanz des zu löschenden Profils.
* @throws ProfileNotFoundException
* Kein Profil mit entsprechender ID gefunden.
* @throws ProfileSetOnDeletionException
* Profile bereits zum Löschen freigegeben.
*/
public void setProfileOnDeletion(Profile profile) throws ProfileNotFoundException, ProfileSetOnDeletionException {
setProfileOnDeletion(profile.get_id(), profile.getPreferences());
}
}
|
/**
* Water Solution Inventory Spring Boot App
* Spring Boot REST API
*
* OpenAPI spec version: 1.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
/* tslint:disable:no-unused-variable member-ordering */
import { Inject, Injectable, Optional } from '@angular/core';
import { HttpClient, HttpHeaders, HttpParams,
HttpResponse, HttpEvent } from '@angular/common/http';
import { CustomHttpUrlEncodingCodec } from '../encoder';
import { Observable } from 'rxjs/Observable';
import { CustomerUser } from '../model/customerUser';
import { EmployeeUser } from '../model/employeeUser';
import { SupplierUser } from '../model/supplierUser';
import { TransactionRequest } from '../model/transactionRequest';
import { User } from '../model/user';
import { UserList } from '../model/userList';
import { UserWithUserRoles } from '../model/userWithUserRoles';
import { BASE_PATH, COLLECTION_FORMATS } from '../variables';
import { Configuration } from '../configuration';
@Injectable()
export class UserControllerService {
protected basePath = 'https://localhost:8080';
public defaultHeaders = new HttpHeaders();
public configuration = new Configuration();
constructor(protected httpClient: HttpClient, @Optional()@Inject(BASE_PATH) basePath: string, @Optional() configuration: Configuration) {
if (basePath) {
this.basePath = basePath;
}
if (configuration) {
this.configuration = configuration;
this.basePath = basePath || configuration.basePath || this.basePath;
}
}
/**
* @param consumes string[] mime-types
* @return true: consumes contains 'multipart/form-data', false: otherwise
*/
private canConsumeForm(consumes: string[]): boolean {
const form = 'multipart/form-data';
for (const consume of consumes) {
if (form === consume) {
return true;
}
}
return false;
}
/**
* Activate user
*
* @param transactionRequest transactionRequest
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public activateUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'body', reportProgress?: boolean): Observable<User>;
public activateUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<User>>;
public activateUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<User>>;
public activateUserUsingPUT(transactionRequest: TransactionRequest, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (transactionRequest === null || transactionRequest === undefined) {
throw new Error('Required parameter transactionRequest was null or undefined when calling activateUserUsingPUT.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.put<User>(`${this.basePath}/user/activate`,
transactionRequest,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get all active users
*
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getAllActiveUsersUsingGET(observe?: 'body', reportProgress?: boolean): Observable<UserList>;
public getAllActiveUsersUsingGET(observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<UserList>>;
public getAllActiveUsersUsingGET(observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<UserList>>;
public getAllActiveUsersUsingGET(observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<UserList>(`${this.basePath}/user/active`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get all users
*
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getAllUsersUsingGET(observe?: 'body', reportProgress?: boolean): Observable<UserList>;
public getAllUsersUsingGET(observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<UserList>>;
public getAllUsersUsingGET(observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<UserList>>;
public getAllUsersUsingGET(observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<UserList>(`${this.basePath}/user`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get customer by id
*
* @param id id
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getCustomerByIdUsingGET(id: string, observe?: 'body', reportProgress?: boolean): Observable<CustomerUser>;
public getCustomerByIdUsingGET(id: string, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<CustomerUser>>;
public getCustomerByIdUsingGET(id: string, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<CustomerUser>>;
public getCustomerByIdUsingGET(id: string, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (id === null || id === undefined) {
throw new Error('Required parameter id was null or undefined when calling getCustomerByIdUsingGET.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<CustomerUser>(`${this.basePath}/user/customer/${encodeURIComponent(String(id))}`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get employee by id
*
* @param id id
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getEmployeeByIdUsingGET(id: string, observe?: 'body', reportProgress?: boolean): Observable<EmployeeUser>;
public getEmployeeByIdUsingGET(id: string, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<EmployeeUser>>;
public getEmployeeByIdUsingGET(id: string, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<EmployeeUser>>;
public getEmployeeByIdUsingGET(id: string, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (id === null || id === undefined) {
throw new Error('Required parameter id was null or undefined when calling getEmployeeByIdUsingGET.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<EmployeeUser>(`${this.basePath}/user/employee/${encodeURIComponent(String(id))}`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get supplier by id
*
* @param id id
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getSupplierByIdUsingGET(id: string, observe?: 'body', reportProgress?: boolean): Observable<SupplierUser>;
public getSupplierByIdUsingGET(id: string, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<SupplierUser>>;
public getSupplierByIdUsingGET(id: string, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<SupplierUser>>;
public getSupplierByIdUsingGET(id: string, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (id === null || id === undefined) {
throw new Error('Required parameter id was null or undefined when calling getSupplierByIdUsingGET.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<SupplierUser>(`${this.basePath}/user/supplier/${encodeURIComponent(String(id))}`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Get user by user name
*
* @param userName userName
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public getUserByUserNameUsingGET(userName: string, observe?: 'body', reportProgress?: boolean): Observable<UserWithUserRoles>;
public getUserByUserNameUsingGET(userName: string, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<UserWithUserRoles>>;
public getUserByUserNameUsingGET(userName: string, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<UserWithUserRoles>>;
public getUserByUserNameUsingGET(userName: string, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (userName === null || userName === undefined) {
throw new Error('Required parameter userName was null or undefined when calling getUserByUserNameUsingGET.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
return this.httpClient.get<UserWithUserRoles>(`${this.basePath}/user/${encodeURIComponent(String(userName))}`,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Save customer
*
* @param customerUser customerUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public saveCustomerUsingPOST(customerUser: CustomerUser, observe?: 'body', reportProgress?: boolean): Observable<CustomerUser>;
public saveCustomerUsingPOST(customerUser: CustomerUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<CustomerUser>>;
public saveCustomerUsingPOST(customerUser: CustomerUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<CustomerUser>>;
public saveCustomerUsingPOST(customerUser: CustomerUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (customerUser === null || customerUser === undefined) {
throw new Error('Required parameter customerUser was null or undefined when calling saveCustomerUsingPOST.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.post<CustomerUser>(`${this.basePath}/user/customer`,
customerUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Save employee
*
* @param employeeUser employeeUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public saveEmployeeUsingPOST(employeeUser: EmployeeUser, observe?: 'body', reportProgress?: boolean): Observable<EmployeeUser>;
public saveEmployeeUsingPOST(employeeUser: EmployeeUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<EmployeeUser>>;
public saveEmployeeUsingPOST(employeeUser: EmployeeUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<EmployeeUser>>;
public saveEmployeeUsingPOST(employeeUser: EmployeeUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (employeeUser === null || employeeUser === undefined) {
throw new Error('Required parameter employeeUser was null or undefined when calling saveEmployeeUsingPOST.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.post<EmployeeUser>(`${this.basePath}/user/employee`,
employeeUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Save supplier
*
* @param supplierUser supplierUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public saveSupplierUsingPOST(supplierUser: SupplierUser, observe?: 'body', reportProgress?: boolean): Observable<SupplierUser>;
public saveSupplierUsingPOST(supplierUser: SupplierUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<SupplierUser>>;
public saveSupplierUsingPOST(supplierUser: SupplierUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<SupplierUser>>;
public saveSupplierUsingPOST(supplierUser: SupplierUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (supplierUser === null || supplierUser === undefined) {
throw new Error('Required parameter supplierUser was null or undefined when calling saveSupplierUsingPOST.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.post<SupplierUser>(`${this.basePath}/user/supplier`,
supplierUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Suspend user
*
* @param transactionRequest transactionRequest
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public suspendUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'body', reportProgress?: boolean): Observable<User>;
public suspendUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<User>>;
public suspendUserUsingPUT(transactionRequest: TransactionRequest, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<User>>;
public suspendUserUsingPUT(transactionRequest: TransactionRequest, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (transactionRequest === null || transactionRequest === undefined) {
throw new Error('Required parameter transactionRequest was null or undefined when calling suspendUserUsingPUT.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.put<User>(`${this.basePath}/user/suspend`,
transactionRequest,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Update customer
*
* @param customerUser customerUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public updateCustomerUsingPUT(customerUser: CustomerUser, observe?: 'body', reportProgress?: boolean): Observable<CustomerUser>;
public updateCustomerUsingPUT(customerUser: CustomerUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<CustomerUser>>;
public updateCustomerUsingPUT(customerUser: CustomerUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<CustomerUser>>;
public updateCustomerUsingPUT(customerUser: CustomerUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (customerUser === null || customerUser === undefined) {
throw new Error('Required parameter customerUser was null or undefined when calling updateCustomerUsingPUT.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.put<CustomerUser>(`${this.basePath}/user/customer`,
customerUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Update employee
*
* @param employeeUser employeeUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public updateEmployeeUsingPUT(employeeUser: EmployeeUser, observe?: 'body', reportProgress?: boolean): Observable<EmployeeUser>;
public updateEmployeeUsingPUT(employeeUser: EmployeeUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<EmployeeUser>>;
public updateEmployeeUsingPUT(employeeUser: EmployeeUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<EmployeeUser>>;
public updateEmployeeUsingPUT(employeeUser: EmployeeUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (employeeUser === null || employeeUser === undefined) {
throw new Error('Required parameter employeeUser was null or undefined when calling updateEmployeeUsingPUT.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.put<EmployeeUser>(`${this.basePath}/user/employee`,
employeeUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
/**
* Update supplier
*
* @param supplierUser supplierUser
* @param observe set whether or not to return the data Observable as the body, response or events. defaults to returning the body.
* @param reportProgress flag to report request and response progress.
*/
public updateSupplierUsingPUT(supplierUser: SupplierUser, observe?: 'body', reportProgress?: boolean): Observable<SupplierUser>;
public updateSupplierUsingPUT(supplierUser: SupplierUser, observe?: 'response', reportProgress?: boolean): Observable<HttpResponse<SupplierUser>>;
public updateSupplierUsingPUT(supplierUser: SupplierUser, observe?: 'events', reportProgress?: boolean): Observable<HttpEvent<SupplierUser>>;
public updateSupplierUsingPUT(supplierUser: SupplierUser, observe: any = 'body', reportProgress: boolean = false ): Observable<any> {
if (supplierUser === null || supplierUser === undefined) {
throw new Error('Required parameter supplierUser was null or undefined when calling updateSupplierUsingPUT.');
}
let headers = this.defaultHeaders;
// to determine the Accept header
let httpHeaderAccepts: string[] = [
'application/json'
];
const httpHeaderAcceptSelected: string | undefined = this.configuration.selectHeaderAccept(httpHeaderAccepts);
if (httpHeaderAcceptSelected != undefined) {
headers = headers.set('Accept', httpHeaderAcceptSelected);
}
// to determine the Content-Type header
const consumes: string[] = [
'application/json'
];
const httpContentTypeSelected: string | undefined = this.configuration.selectHeaderContentType(consumes);
if (httpContentTypeSelected != undefined) {
headers = headers.set('Content-Type', httpContentTypeSelected);
}
return this.httpClient.put<SupplierUser>(`${this.basePath}/user/supplier`,
supplierUser,
{
withCredentials: this.configuration.withCredentials,
headers: headers,
observe: observe,
reportProgress: reportProgress
}
);
}
}
|
function sort(arr) {
let n = arr.length;
for (let i = 0; i < n-1; i++) {
let min_idx = i;
for (let j = i+1; j < n; j++) {
if (arr[j] < arr[min_idx])
min_idx = j;
}
let temp = arr[min_idx];
arr[min_idx] = arr[i];
arr[i] = temp;
}
return arr;
} |
alias reload!='. ~/.zshrc'
alias cls='clear' # Good 'ol Clear Screen command
alias cat='bat'
alias ls='lsd'
alias ll='ls -l --group-dirs first -- ' |
<filename>VolodymyrPeretiatko/src/test/java/hw1/model/ProductTest.java
package hw1.model;
import org.junit.*;
public class ProductTest {
private Product product;
private static Product testData1;
private static Product testData2;
@BeforeClass
public static void initTestData(){
testData1 = new Product(1, "Laptop HP 750B", 500.0);
testData2 = new Product(2, "Laptop HP 1020Z", 650.0);
}
@AfterClass
public static void clearTestData(){
testData1 = null;
testData2 = null;
}
@Before
public void init(){
product = new Product(1, "Laptop HP 750B", 500.00);
}
@Test
public void testProductEquels(){
Assert.assertTrue(product.equals(testData1));
Assert.assertFalse(product.equals(testData2));
}
@Test
public void testProductHash(){
Assert.assertEquals(testData1.hashCode(), product.hashCode());
Assert.assertNotEquals(testData2.hashCode(), product.hashCode());
}
@Test
public void testGetPrice(){
Assert.assertEquals(500.0, product.getPrice(), 0.1);
}
}
|
#!/usr/bin/env bash
UPDATE='curl --silent --show-error http://localhost:9200/_scripts/asset_update'
echo ${UPDATE}
${UPDATE} | python -m json.tool
MERGE='curl --silent --show-error http://localhost:9200/_scripts/asset_merge'
echo ${MERGE}
${MERGE} | python -m json.tool
|
const Promise = require('./promise')
const p1 = new Promise((resolve,reject)=>{
setTimeout(()=>{
resolve('p1')
},2000)
})
p1.then((res)=>{
console.log(res)
return new Promise((resolve,reject)=>{
setTimeout(()=>{
resolve('p2Resolve')
},1000)
})
}).then((res)=>{
console.log(res)
})
const p2 = new Promise((resolve,reject)=>{
setTimeout(()=>{
resolve('p2')
},3000)
})
Promise.all([p1,p2]).then((val)=>{
console.log(val)
})
Promise.race([p1,p2]).then((val)=>{
console.log(val)
}) |
<gh_stars>1-10
require 'spec_helper'
module Qernel
describe Node do
before :all do
NastyCache.instance.expire!
Etsource::Base.loader('spec/fixtures/etsource')
end
describe "max_demand_recursive" do
before do
@gql = Scenario.default.gql
end
it "equals max_demand if max_demand is available" do
expect(@gql.query_future("V(max_demand_node_b, max_demand)")).to eq(6000.0)
expect(@gql.query_future("V(max_demand_node_b, max_demand_recursive)")).to eq(6000.0)
expect(@gql.query_future("V(max_demand_node_c, max_demand)")).to eq(500.0)
expect(@gql.query_future("V(max_demand_node_c, max_demand_recursive)")).to eq(500.0)
end
it "uses max_demand / share of the node that has minimum share * max_demand" do
expect(@gql.query_future("V(max_demand_node_a, max_demand)")).to eq(5000.0)
expect(@gql.query_future("V(max_demand_node_a, max_demand_recursive)")).to eq(5000.0)
end
it "works with one edge" do
expect(@gql.query_future("V(max_demand_node_d, max_demand)")).to eq(5000.0)
end
it "the max_demand actually works" do
expect(@gql.query_future("V(max_demand_node_with_high_demand_remainder, demand)")).to eq(5000.0)
end
end
describe "max_demand_recursive with different numbers" do
before do
@gql = Scenario.default.gql(prepare: false)
@gql.init_datasets
@gql.future_graph.node(:max_demand_node_b).output_edges.first.share = 0.99
@gql.future_graph.node(:max_demand_node_c).output_edges.first.share = 0.01
@gql.update_graphs
@gql.calculate_graphs
end
it "equals max_demand if max_demand is available" do
expect(@gql.query_future("V(max_demand_node_b, max_demand)")).to eq(6000.0)
expect(@gql.query_future("V(max_demand_node_b, max_demand_recursive)")).to eq(6000.0)
expect(@gql.query_future("V(max_demand_node_c, max_demand)")).to eq(500.0)
expect(@gql.query_future("V(max_demand_node_c, max_demand_recursive)")).to eq(500.0)
end
it "uses max_demand / share of the node that has minimum share * max_demand" do
expect(@gql.query_future("V(max_demand_node_a, max_demand)").floor).to eq(6060)
expect(@gql.query_future("V(max_demand_node_a, max_demand_recursive)").floor).to eq(6060)
end
it "works with one edge" do
expect(@gql.query_future("V(max_demand_node_d, max_demand)").floor).to eq(6060)
end
it "the max_demand actually works" do
expect(@gql.query_future("V(max_demand_node_with_high_demand_remainder, demand)").ceil).to eq(3940)
end
end
end
end
|
#!/bin/bash
docker run -it --rm \
-v "$PWD/src":/usr/src \
--network digilog-data-network \
--log-driver fluentd \
--log-opt fluentd-address="localhost:24224" \
--log-opt tag="scheduler" \
scheduler "$@" |
<form action="/contact-form-submission" method="POST">
<label for="name">Name</label><br />
<input type="text" name="name"><br />
<label for="email">Email address</label><br />
<input type="email" name="email"><br />
<label for="telephone">Phone Number</label><br />
<input type="text" name="telephone"><br />
<label for="feedback">Comments</label><br />
<textarea name="feedback"></textarea>
<br /><br />
<input type="submit" value="Submit">
</form> |
<filename>src/com/freighttrust/schema/universal/ConversationMessage.java<gh_stars>0
package com.freighttrust.schema.universal;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ConversationMessage complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ConversationMessage">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="ParticipantName">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <maxLength value="256"/>
* </restriction>
* </simpleType>
* </element>
* <element name="Text">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <maxLength value="2147483647"/>
* </restriction>
* </simpleType>
* </element>
* <element name="CreatedTime" type="{http://www.freighttrust.com/schema/universal/2020/06}emptiable_dateTime" minOccurs="0"/>
* <element name="IsInternal" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="IsSystem" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* </all>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ConversationMessage", namespace = "http://www.freighttrust.com/schema/universal/2020/06", propOrder = {
})
public class ConversationMessage {
@XmlElement(name = "ParticipantName", namespace = "http://www.freighttrust.com/schema/universal/2020/06", required = true)
protected String participantName;
@XmlElement(name = "Text", namespace = "http://www.freighttrust.com/schema/universal/2020/06", required = true)
protected String text;
@XmlElement(name = "CreatedTime", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected String createdTime;
@XmlElement(name = "IsInternal", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected Boolean isInternal;
@XmlElement(name = "IsSystem", namespace = "http://www.freighttrust.com/schema/universal/2020/06")
protected Boolean isSystem;
/**
* Gets the value of the participantName property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getParticipantName() {
return participantName;
}
/**
* Sets the value of the participantName property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setParticipantName(String value) {
this.participantName = value;
}
/**
* Gets the value of the text property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getText() {
return text;
}
/**
* Sets the value of the text property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setText(String value) {
this.text = value;
}
/**
* Gets the value of the createdTime property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCreatedTime() {
return createdTime;
}
/**
* Sets the value of the createdTime property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCreatedTime(String value) {
this.createdTime = value;
}
/**
* Gets the value of the isInternal property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsInternal() {
return isInternal;
}
/**
* Sets the value of the isInternal property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsInternal(Boolean value) {
this.isInternal = value;
}
/**
* Gets the value of the isSystem property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isIsSystem() {
return isSystem;
}
/**
* Sets the value of the isSystem property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setIsSystem(Boolean value) {
this.isSystem = value;
}
}
|
import os
import glob
from bs4 import BeautifulSoup
def extract_candidate_info(html_dir: str) -> dict:
html_list = glob.glob(os.path.join(html_dir, "candidates/*.html"))
candidate_info = {}
for html_path in html_list:
with open(html_path, 'r') as f:
html = f.read()
soup = BeautifulSoup(html, 'html.parser')
candidate_name = soup.find('h1').text.split(': ')[1]
candidate_data = {tag.text.split(': ')[0]: tag.text.split(': ')[1] for tag in soup.find_all('p')}
candidate_info[candidate_name] = candidate_data
return candidate_info |
package io.opensphere.core.pipeline;
/**
* A scale detector that doesn't scale anything.
*/
public class NoScale extends ScaleDetector
{
/**
* Constructor.
*/
public NoScale()
{
super(null);
}
@Override
public float getScale()
{
return 1.0f;
}
}
|
#!/bin/bash
# Validate input arguments
if [ "$#" -ne 3 ]; then
echo "Usage: $0 <database> <collection> <json_file>"
exit 1
fi
# Assign input arguments to variables
database=$1
collection=$2
json_file=$3
# Check if the JSON file exists
if [ ! -f "$json_file" ]; then
echo "Error: JSON file not found"
exit 1
fi
# Import JSON data into MongoDB
mongoimport --host localhost --db $database --collection $collection < $json_file
# Check the exit status of mongoimport
if [ $? -eq 0 ]; then
echo "Import successful"
else
echo "Import failed"
fi |
#!/bin/sh
# Copyright (c) 2013-2019 The Btcavenue Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
export LC_ALL=C
set -e
srcdir="$(dirname "$0")"
cd "$srcdir"
if [ -z "${LIBTOOLIZE}" ] && GLIBTOOLIZE="$(command -v glibtoolize)"; then
LIBTOOLIZE="${GLIBTOOLIZE}"
export LIBTOOLIZE
fi
command -v autoreconf >/dev/null || \
(echo "configuration failed, please install autoconf first" && exit 1)
autoreconf --install --force --warnings=all
|
<gh_stars>1-10
import {
Adapter,
BaseMessageSignerWalletAdapter,
WalletAdapterNetwork,
} from '@solana/wallet-adapter-base';
import {
BitKeepWalletAdapter,
BloctoWalletAdapter,
CloverWalletAdapter,
GlowWalletAdapter,
LedgerWalletAdapter,
PhantomWalletAdapter,
SlopeWalletAdapter,
SolflareWalletAdapter,
SolletExtensionWalletAdapter,
SolletWalletAdapter,
TorusWalletAdapter,
} from '@solana/wallet-adapter-wallets';
import type { Cluster } from '@solana/web3.js';
import type { SolanaWallet } from '../types';
export interface WalletTypes {
wallets: SolanaWallet[];
installed: SolanaWallet[];
undetected: SolanaWallet[];
loadable: SolanaWallet[];
}
export const findWallet = (wallets: SolanaWallet[], name: string) => {
const foundWallet = wallets.find((wallet) => wallet.name === name);
if (!foundWallet) throw new Error(`${name} wallet not found.`);
return foundWallet;
};
export const getWalletAdapters = ({
network,
}: { network?: WalletAdapterNetwork } = {}): Adapter[] => [
new PhantomWalletAdapter(),
new GlowWalletAdapter(),
new SlopeWalletAdapter(),
new SolflareWalletAdapter({ network }),
new SolletExtensionWalletAdapter({ network }),
new BitKeepWalletAdapter(),
new CloverWalletAdapter(),
new TorusWalletAdapter(),
new LedgerWalletAdapter(),
new SolletWalletAdapter({ network }),
new BloctoWalletAdapter({ network }),
];
export const getWallets = (solanaNetwork: Cluster) => {
const solanaWallets = getWalletAdapters({
network: solanaNetwork as WalletAdapterNetwork,
}).filter((wallet) => 'signMessage' in wallet) as BaseMessageSignerWalletAdapter[];
return solanaWallets;
};
|
<filename>core/store/primitives/src/main/java/org/onosproject/store/primitives/impl/DefaultConsistentMapBuilder.java
/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.primitives.impl;
import org.onosproject.store.primitives.DistributedPrimitiveCreator;
import org.onosproject.store.serializers.KryoNamespaces;
import org.onosproject.store.service.AsyncConsistentMap;
import org.onosproject.store.service.ConsistentMap;
import org.onosproject.store.service.ConsistentMapBuilder;
import org.onosproject.store.service.Serializer;
/**
* Default {@link AsyncConsistentMap} builder.
*
* @param <K> type for map key
* @param <V> type for map value
*/
public class DefaultConsistentMapBuilder<K, V> extends ConsistentMapBuilder<K, V> {
private final DistributedPrimitiveCreator primitiveCreator;
public DefaultConsistentMapBuilder(DistributedPrimitiveCreator primitiveCreator) {
this.primitiveCreator = primitiveCreator;
}
@Override
public ConsistentMap<K, V> build() {
return buildAsyncMap().asConsistentMap();
}
@Override
public AsyncConsistentMap<K, V> buildAsyncMap() {
AsyncConsistentMap<K, V> map;
// If a compatibility function is defined, we don't assume CompatibleValue and Version is registered in
// the user-provided serializer since it's an implementation detail. Instead, we use the user-provided
// serializer to convert the CompatibleValue value to a raw byte[] and use a separate serializer to encode
// the CompatibleValue to binary.
if (compatibilityFunction != null) {
Serializer serializer = serializer();
// Convert the byte[] value to CompatibleValue<byte[]>
AsyncConsistentMap<K, CompatibleValue<byte[]>> rawMap = primitiveCreator.newAsyncConsistentMap(
withSerializer(Serializer.using(KryoNamespaces.API, CompatibleValue.class)));
// Convert the CompatibleValue<byte[]> value to CompatibleValue<V> using the user-provided serializer.
AsyncConsistentMap<K, CompatibleValue<V>> compatibleMap =
DistributedPrimitives.newTranscodingMap(
rawMap,
key -> key,
key -> key,
value -> value == null ? null :
new CompatibleValue<byte[]>(serializer.encode(value.value()), value.version()),
value -> value == null ? null :
new CompatibleValue<V>(serializer.decode(value.value()), value.version()));
map = DistributedPrimitives.newCompatibleMap(compatibleMap, compatibilityFunction, version());
} else {
map = primitiveCreator.newAsyncConsistentMap(name(), serializer());
}
map = nullValues() ? map : DistributedPrimitives.newNotNullMap(map);
map = relaxedReadConsistency() ? DistributedPrimitives.newCachingMap(map) : map;
map = readOnly() ? DistributedPrimitives.newUnmodifiableMap(map) : map;
return meteringEnabled() ? DistributedPrimitives.newMeteredMap(map) : map;
}
} |
def running_total(nums):
'''Returns the running total of the given list of numbers'''
total = 0
for num in nums:
total += num
yield total |
#!/usr/bin/env bats
#
# Copyright 2021 HAProxy Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
load '../../libs/dataplaneapi'
load "../../libs/get_json_path"
load '../../libs/resource_client'
load '../../libs/version'
load '../../libs/haproxy_config_setup'
load '../../libs/haproxy_version'
load 'utils/_helpers'
@test "storage_ssl_certificates: Add a ssl certificate file" {
refute dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
pre_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
run dpa_curl_file_upload POST "$_STORAGE_SSL_CERTS_BASE_PATH" "@${BATS_TEST_DIRNAME}/data/1.pem;filename=1.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal $SC 201
assert_equal $(get_json_path "$BODY" '.storage_name') "1.pem"
assert dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
# confirm haproxy wasn't reloaded or restarted
post_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
new_logs_count=$(( $pre_logs_count - $post_logs_count ))
assert [ $new_logs_count = 0 ]
}
@test "storage_ssl_certificates: Get a list of managed ssl certificate files" {
resource_get "$_STORAGE_SSL_CERTS_BASE_PATH"
assert_equal "$SC" 200
assert_equal "$(get_json_path "$BODY" '.|length')" 1
assert_equal "$(get_json_path "$BODY" '.[0].storage_name')" "1.pem"
}
@test "storage_ssl_certificates: Get a ssl certificate file contents" {
run dpa_curl_download GET "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal $SC 200
assert_output --partial "1.pem"
# we opted to not return the certificate contents (i.e. secret keys, just the identifier)
#assert dpa_diff_var_file '$BODY' "1.pem"
}
@test "storage_ssl_certificates: Replace a ssl certificate file contents" {
run dpa_curl_text_plain PUT "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem" "@${BATS_TEST_DIRNAME}/data/2.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal "$SC" 202
assert dpa_diff_docker_file '/etc/haproxy/ssl/1.pem' "data/2.pem"
}
@test "storage_ssl_certificates: Replace a ssl certificate file contents with skip reload" {
pre_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
run dpa_curl_text_plain PUT "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem?skip_reload=true" "@${BATS_TEST_DIRNAME}/data/2.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal "$SC" 200
# confirm haproxy wasn't reloaded or restarted
post_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
new_logs_count=$(( $pre_logs_count - $post_logs_count ))
assert [ $new_logs_count = 0 ]
}
@test "storage_ssl_certificates: Delete a ssl certificate file" {
resource_delete "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem"
assert_equal "$SC" 202
refute dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
}
@test "storage_ssl_certificates: Delete a ssl certificate file with force reload" {
#reupload cert file
run dpa_curl_file_upload POST "$_STORAGE_SSL_CERTS_BASE_PATH" "@${BATS_TEST_DIRNAME}/data/1.pem;filename=1.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal "$SC" 201
resource_delete "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem" "force_reload=true"
assert_equal "$SC" 204
refute dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
}
@test "storage_ssl_certificates: Delete a ssl certificate file with skip reload" {
#reupload cert file
run dpa_curl_file_upload POST "$_STORAGE_SSL_CERTS_BASE_PATH" "@${BATS_TEST_DIRNAME}/data/1.pem;filename=1.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal "$SC" 201
resource_delete "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem" "skip_reload=true"
assert_equal "$SC" 204
refute dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
}
@test "storage_ssl_certificates: Add a ssl certificate file with force reload" {
run docker cp "${BATS_TEST_DIRNAME}/data/3.pem" "${DOCKER_CONTAINER_NAME}:/etc/haproxy/ssl/"
assert_success
refute dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem?force_reload=true'
pre_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
run dpa_curl_file_upload POST "$_STORAGE_SSL_CERTS_BASE_PATH?force_reload=true" "@${BATS_TEST_DIRNAME}/data/1.pem;filename=1.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal $SC 201
assert_equal $(get_json_path "$BODY" '.storage_name') "1.pem"
assert dpa_docker_exec 'ls /etc/haproxy/ssl/1.pem'
post_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
new_logs_count=$(( $pre_logs_count - $post_logs_count ))
new_logs=$(docker logs dataplaneapi-e2e 2>&1 | tail -n $new_logs_count)
echo -e "$new_logs" # this will help debugging if the test fails
if haproxy_version_ge "2.5"
then
assert echo -e "$new_logs" | grep -q "Loading success"
else
assert echo -e "$new_logs" | head -n 1 | grep -q "Reexecuting Master process"
fi
}
@test "storage_ssl_certificates: Replace a ssl certificate file contents with force reload" {
run docker cp "${BATS_TEST_DIRNAME}/data/3.pem" "${DOCKER_CONTAINER_NAME}:/etc/haproxy/ssl/"
assert_success
pre_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
run dpa_curl_text_plain PUT "$_STORAGE_SSL_CERTS_BASE_PATH/1.pem?force_reload=true" "@${BATS_TEST_DIRNAME}/data/2.pem"
assert_success
dpa_curl_status_body '$output'
assert_equal $SC 200
post_logs_count=$(docker logs dataplaneapi-e2e 2>&1 | wc -l)
new_logs_count=$(( $pre_logs_count - $post_logs_count ))
new_logs=$(docker logs dataplaneapi-e2e 2>&1 | tail -n $new_logs_count)
echo -e "$new_logs" # this will help debugging if the test fails
if haproxy_version_ge "2.5"
then
assert echo -e "$new_logs" | grep -q "Loading success"
else
assert echo -e "$new_logs" | head -n 1 | grep -q "Reexecuting Master process"
fi
# clean up after the test
dpa_docker_exec 'rm /etc/haproxy/ssl/1.pem'
}
|
def optimise_list(arr):
# sort the array
arr.sort()
# go through the array and choose the highest value, then the second highest and so on
i = len(arr)-1
optimised = []
while i >= 0:
optimised.append(arr[i])
i -=1
return optimised |
#!/bin/bash
rsync -avh [0-9]* Reference/ --exclude=README --exclude=Reference --exclude=Data --exclude=Pseudos --exclude=*.py --exclude=gsphere_bgw.out --include=*.sh --include=*.in --include=*.files --include=*.out --include=*.cif --include=*.json --exclude=*.log --exclude=*/out_data/* --exclude=*/*/out_data/* --exclude=*/*/*/out_data/* --include=*/*/ --exclude=*/* --exclude=*/*/*
|
#!/bin/bash
# The following line causes bash to exit at any point if there is any error
# and to output each line as it is executed -- useful for debugging
set -e -x -o pipefail
main() {
dx-download-all-inputs
# Get all the DX environment variables, so that dxda can use them
echo "loading the dx environment"
# we want to avoid outputing the token
source environment >& /dev/null
# build manifest
echo "downloading with dx-download-agent"
mv ${HOME}/in/manifest/*.json.bz2 manifest.json.bz2
flags=""
if $verbose; then
flags="$flags -verbose"
fi
if $gc_info; then
flags="$flags -gc_info"
fi
start=`date +%s`
dx-download-agent download ${flags} manifest.json.bz2
end=`date +%s`
runtime=$((end-start))
dx-jobutil-add-output --class=int runtime $runtime
}
|
"use strict";
exports.__esModule = true;
var discord_js_1 = require("discord.js");
exports["default"] = {
name: "echo",
description: "Echo the message back to the channel",
aliases: ["say"],
category: "misc",
run: function (_a) {
// Welcome back to a simple echo command
var message = _a.message, args = _a.args;
// If you want error
// if (!args[0]) {
// return message.channel.send('You did not specify a message to echo.');
// }
var desc = args[0] ? args.join(' ') : "".concat(message.author, " did not specify any content to echo.");
var embed = new discord_js_1.MessageEmbed()
.setTitle("".concat(message.author.username, " (").concat(message.author.id, ") Echo"))
.setDescription(desc)
.setColor("#0099ff")
.setFooter("Echo command requested by ".concat(message.author.username));
// Send embed
message.channel.send({
embeds: [embed]
});
}
};
|
<reponame>carlosvicient/polymer-project-series<gh_stars>0
import {LitElement, html, css} from 'lit-element';
import store from '../libraries/redux/store';
/**
* `click-visualiser` This component will show the number of clicks in the application (only for those 4 buttons)
*
* @customElement
* @polymer
* @demo
*
*/
class ClickVisualiser extends LitElement {
static get properties() {
return {
totalClicks: { type: Number}
}
}
/**
* Instance of the element is created/upgraded. Use: initializing state,
* set up event listeners, create shadow dom.
* @constructor
*/
constructor() {
super();
this.totalClicks = 0;
store.subscribe(() => {
this.handleUpdatedState(store.getState());
});
}
static get styles() {
return [
css`
p {
font-weight: bold;
}
`,
];
}
/**
* Implement to describe the element's DOM using lit-html.
* Use the element current props to return a lit-html template result
* to render into the element.
*/
render() {
return html`
<h2>Click counter component</h2>
<p>Number of clicks: ${this.totalClicks}</p>
`;
}
handleUpdatedState(state) {
this.totalClicks = state.clickCounter;
}
}
customElements.define('click-visualiser', ClickVisualiser); |
package expect_test
import (
"errors"
"strings"
"testing"
"github.com/a8m/expect"
)
// TODO(Ariel): Create mock that implement TB interface
// and stub `Error` and `Fatal`
func TestStartWith(t *testing.T) {
expect := expect.New(t)
expect("foo").To.StartWith("f")
expect("foo").Not.To.StartWith("bar")
}
func TestEndWith(t *testing.T) {
expect := expect.New(t)
expect("bar").To.EndWith("ar")
expect("bar").Not.To.EndWith("az")
}
func TestContains(t *testing.T) {
expect := expect.New(t)
expect("foobar").To.Contains("ba")
expect("foobar").Not.To.Contains("ga")
}
func TestMatch(t *testing.T) {
expect := expect.New(t)
expect("Foo").To.Match("(?i)foo")
}
func TestEqual(t *testing.T) {
expect := expect.New(t)
expect("a").To.Equal("a")
expect(1).To.Equal(1)
expect(false).Not.To.Equal("true")
expect(map[int]int{}).To.Equal(map[int]int{})
expect(struct{ X, Y int }{1, 2}).Not.To.Equal(&struct{ X, Y int }{1, 2})
}
func TestPanic(t *testing.T) {
expect := expect.New(t)
expect(func() {}).Not.To.Panic()
expect(func() {
panic("foo")
}).To.Panic()
expect(func() {
panic("bar")
}).To.Panic("bar")
}
func TestToChaining(t *testing.T) {
expect := expect.New(t)
expect("foobarbaz").To.StartWith("foo").And.EndWith("baz").And.Contains("bar")
expect("foo").Not.To.StartWith("bar").And.EndWith("baz").And.Contains("bob")
expect("foo").To.Match("f").And.Match("(?i)F")
}
func TestToFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
expect("foo").To.Equal("foo").Else.FailNow()
select {
case <-mockT.FailNowCalled:
t.Errorf("Expected FailNow() on passing test not to be called")
default:
}
expect("foo").To.Equal("bar").Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
func TestNotToFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
expect("foo").Not.To.Equal("bar").Else.FailNow()
select {
case <-mockT.FailNowCalled:
t.Errorf("Expected FailNow() on passing test not to be called")
default:
}
expect("foo").Not.To.Equal("foo").Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
func TestToAndHaveFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
expect("foo").To.Equal("bar").And.Have.Len(3).Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
func TestToAndBeFailNow(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
expect("foo").To.Equal("bar").And.Be.String().Else.FailNow()
select {
case <-mockT.FailNowCalled:
default:
t.Errorf("Expected FailNow() on failing test to be called")
}
}
func TestPassCustomMatcher(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
mockMatcher := newMockMatcher()
mockMatcher.MatchOutput.Ret0 <- nil
expect("foo").To.Pass(mockMatcher)
select {
case actual := <-mockMatcher.MatchInput.Actual:
if actual != "foo" {
t.Errorf(`Expected matcher to be called with "foo"`)
}
default:
t.Errorf("Expected matcher to be called")
}
select {
case <-mockT.ErrorfInput.Args:
t.Errorf("Expected Errorf() on passing test not to be called")
default:
}
uniqueError := "I am a unique error"
mockMatcher.MatchOutput.Ret0 <- errors.New(uniqueError)
expect("foo").To.Pass(mockMatcher)
select {
case args := <-mockT.ErrorfInput.Args:
if len(args) != 2 {
t.Fatalf("Expected %#v to have length 2", args)
}
s, ok := args[1].(string)
if !ok {
t.Errorf("Expected arg %#v to be a string", args[1])
}
if !strings.Contains(s, uniqueError) {
t.Errorf(`Expected message "%s" to contain "%s"`, s, uniqueError)
}
default:
t.Errorf("Expected Errorf() on failing test to be called")
}
}
func TestNotPassCustomMatcher(t *testing.T) {
mockT := newMockT()
expect := expect.New(mockT)
mockMatcher := newMockMatcher()
mockMatcher.MatchOutput.Ret0 <- errors.New("foo")
expect("foo").Not.To.Pass(mockMatcher)
select {
case <-mockT.ErrorfInput.Args:
t.Errorf("Expected Errorf() on passing test not to be called")
default:
}
mockMatcher.MatchOutput.Ret0 <- nil
expect("foo").Not.To.Pass(mockMatcher)
select {
case args := <-mockT.ErrorfInput.Args:
if len(args) != 2 {
t.Fatalf("Expected %#v to have length 2", args)
}
s, ok := args[1].(string)
if !ok {
t.Errorf("Expected arg %#v to be a string", args[1])
}
if !strings.Contains(s, "match &expect_test.mockMatcher{") {
t.Errorf(`Expected message "%s" to contain "match mockMatcher{"`, s)
}
default:
t.Errorf("Expected Errorf() on failing test to be called")
}
}
|
<gh_stars>1-10
/*
Copyright 2019 Adevinta
*/
package main
import (
"net"
"net/http"
"net/http/httptest"
"net/url"
"testing"
"time"
check "github.com/adevinta/vulcan-check-sdk"
"github.com/adevinta/vulcan-check-sdk/agent"
"github.com/adevinta/vulcan-check-sdk/config"
"github.com/adevinta/vulcan-check-sdk/tools"
report "github.com/adevinta/vulcan-report"
)
type buildTestServer func() *httptest.Server
type isAmtServerExposedTestArgs struct {
target string
port string
}
type isAmtServerExposedTest struct {
name string
args isAmtServerExposedTestArgs
testServerBuilder buildTestServer
want bool
wantErr bool
}
func buildExposedAMTTestServer() *httptest.Server {
r := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/"+amtServerPath {
w.Header().Set("server", amtServerHeaderToken)
w.WriteHeader(http.StatusForbidden)
}
}))
return r
}
func buildNotExposedAMTTestServer() *httptest.Server {
r := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
}))
return r
}
var isAmtExposedTestCases = []isAmtServerExposedTest{
{
name: "HappyPathExposed",
testServerBuilder: buildExposedAMTTestServer,
// If a server is specified, there is no need to define args because are inferred from the server url.
want: true,
},
{
name: "HappyPathNotExposed",
testServerBuilder: buildNotExposedAMTTestServer,
// If a server is specified, there is no need to define args because are inferred from the server url.
want: false,
},
{
name: "ErrorHostFormat",
args: isAmtServerExposedTestArgs{
port: "error",
target: "ds",
},
wantErr: true,
// If a server is specified, there is no need to define args because are inferred from the server url.
want: false,
},
{
name: "ErrorNotReachable",
args: isAmtServerExposedTestArgs{
port: "3005",
target: "127.0.0.1",
},
wantErr: false,
// If a server is specified, there is no need to define args because are inferred from the server url.
want: false,
},
}
func TestIsAmtServerExposed(t *testing.T) {
for _, tt := range isAmtExposedTestCases {
t.Run(tt.name, func(t *testing.T) {
// if a test server builder function is defined call it and set test params according.
if tt.testServerBuilder != nil {
srv := tt.testServerBuilder()
defer srv.Close()
srvURL, err := url.Parse(srv.URL)
if err != nil {
t.Fatal(err)
}
host, port, err := net.SplitHostPort(srvURL.Host)
if err != nil {
t.Fatal(err)
}
tt.args.port = port
tt.args.target = host
}
client := http.Client{
Timeout: timeout * time.Second,
CheckRedirect: func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
},
}
got, err := isAmtServerExposed(client, tt.args.target, tt.args.port)
if err != nil && !tt.wantErr {
t.Fatal(err)
} else {
if got != tt.want {
t.Errorf("got %v, want %v", got, tt.want)
}
}
})
}
}
type runTest struct {
name string
testServerBuilder buildTestServer
target string
wantErr bool
score float32
}
var runTestCases = []runTest{
{
name: "HappyPath",
testServerBuilder: buildExposedAMTTestServer,
score: 9.8,
},
}
func TestRun(t *testing.T) {
for _, tt := range runTestCases {
t.Run(tt.name, func(t *testing.T) {
// if a test server builder function is defined call it and set test params according.
if tt.testServerBuilder != nil {
srv := tt.testServerBuilder()
defer srv.Close()
srvURL, err := url.Parse(srv.URL)
if err != nil {
t.Fatal(err)
}
host, port, err := net.SplitHostPort(srvURL.Host)
if err != nil {
t.Fatal(err)
}
// WARNING: Never run this tests in parallel.
amtTCPPorts = []string{port}
tt.target = host
}
a := tools.NewReporter("ID")
b := true
conf := &config.Config{
Check: config.CheckConfig{
CheckID: "ID",
Target: tt.target,
CheckTypeName: "vulcan-exposed-amt",
},
Log: config.LogConfig{
LogFmt: "text",
LogLevel: "debug",
},
CommMode: "push",
AllowPrivateIPs: &b,
}
conf.Push.AgentAddr = a.URL
conf.Push.BufferLen = 10
c := check.NewCheckFromHandlerWithConfig("vulcan-exposed-amt", conf, run)
// WARNING: Always stop the reporter after check finishes, if not, very nasty things can happen.
c.RunAndServe()
a.Stop()
var msg agent.State
for msg = range a.Msgs {
if msg.Status == agent.StatusFailed || msg.Status == agent.StatusFinished {
break
}
}
if msg.Status == agent.StatusFailed && !tt.wantErr {
t.Fatalf("check failed unexpectedly")
}
if report.AggregateScore(msg.Report.Vulnerabilities) != tt.score {
t.Fatalf("check returned wrong score: got %v, wanted %v", report.AggregateScore(msg.Report.Vulnerabilities), tt.score)
}
})
}
}
|
<gh_stars>1-10
import request from '@/utils/request';
export function lookupCategories() {
return request({
url: '/categories/lookup',
method: 'get'
});
}
export function getCategoryTree() {
return request({
url: '/categories/tree',
method: 'get'
});
}
export function getCategory(id) {
return request({
url: '/categories/' + id,
method: 'get'
});
}
export function getCategoryManageData(id) {
return request({
url: '/categories/manage/' + id,
method: 'get'
});
}
export function saveCategory(category) {
return request({
url: '/categories',
method: 'post',
data: category
});
}
export function deleteCategory(id) {
return request({
url: '/categories/' + id,
method: 'delete'
});
}
export function getImageUploadPath(id) {
return process.env.BASE_API + '/categories/' + id + '/uploadImage';
}
export function deleteImage(id) {
return request({
url: '/categories/' + id + '/removeImage',
method: 'delete'
});
}
export function getAttributes(categoryId) {
return request({
url: '/categories/' + categoryId + '/attributes',
method: 'get'
});
}
export function saveAllAttributes(categoryId, attributes) {
return request({
url: '/categories/' + categoryId + '/attributes',
method: 'post',
data: attributes
});
}
export function lookupAttributes(categoryId) {
return request({
url: '/categories/' + categoryId + '/attributes/lookup',
method: 'get'
});
}
export function lookupAttributeValues(categoryId) {
return request({
url: '/categories/' + categoryId + '/attributevalues/lookup',
method: 'get'
});
}
|
def mask_parts_of_string(data_string, mask_char):
output_string = ""
for char in data_string:
if char != ' ':
output_string += mask_char
else:
output_string += char
return output_string
data_string = "This is a demo string"
mask_char = '#'
result = mask_parts_of_string(data_string, mask_char)
print(result) # Outputs |
#!/bin/bash
echo "Build docker image"
sudo docker build -t quobyte/quobyte-mesos:latest .
sudo docker push quobyte/quobyte-mesos:latest
|
<gh_stars>1000+
package com.nepxion.discovery.plugin.framework.listener.loadbalance;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author <NAME>
* @version 1.0
*/
import java.util.List;
import com.nepxion.discovery.plugin.framework.listener.Listener;
import com.netflix.loadbalancer.Server;
public interface LoadBalanceListener extends Listener {
void onGetServers(String serviceId, List<? extends Server> servers);
} |
class SortTest:
def test_sort(self, input_list):
# Sort the input list in ascending order
sorted_list = sorted(input_list)
# Assert that the sorted list matches the expected sorted list
assert sorted_list == input_list, "Sorting function failed to produce the expected result" |
using System;
public class RoutedEvent
{
// Implementation of RoutedEvent class
// This class represents a routed event
// It contains properties and methods specific to a routed event
}
public class RoutedEventHandlerInfo
{
public Type ClassType { get; }
public RoutedEvent RoutedEvent { get; }
public RoutedEventHandlerInfo(Type classType, RoutedEvent routedEvent)
{
ClassType = classType;
RoutedEvent = routedEvent;
}
}
public class EventHandlerManager
{
public RoutedEventHandlerInfo GetClassHandler(Type classType, RoutedEvent routedEvent)
{
// Implement logic to retrieve the class handler for the given class type and routed event
// This logic may involve searching in a data structure or database for the class handler information
// For the purpose of this example, let's assume a simple implementation
return new RoutedEventHandlerInfo(classType, routedEvent);
}
} |
#!/bin/bash
# Copyright © 2018-2022, Qrypt, Inc. All rights reserved.
# Display help menu
usage() {
echo ""
echo "build.sh"
echo "========"
echo ""
echo "General build script used to build the project."
echo ""
echo "Options:"
echo "--help Displays help menu"
echo ""
echo "--build_type=<option> Defaults to Release."
echo " Release - Build targeted for releasing."
echo " Debug - Build targeted for debugging."
echo ""
echo ""
exit
}
# Parse input arguments
for i in "$@"
do
case $i in
--help)
usage
shift
;;
--build_type=*)
BUILD_TYPE="${i#*=}"
shift
;;
*)
echo "Unknown option: $i"
usage
shift
;;
esac
done
# Validate input arguments and set defaults
if [[ "$BUILD_TYPE" == "" ]]; then
BUILD_TYPE="Release"
fi
if [[ "$BUILD_TYPE" != "Release" && "$BUILD_TYPE" != "Debug" ]]; then
echo "Invalid --build_type: $BUILD_TYPE"
usage
fi
echo "***************************************"
echo "* CLEAN"
echo "***************************************"
rm -rvf build
mkdir build
cd build
echo "***************************************"
echo "* BUILD"
echo "***************************************"
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
cmake --build .
cd ..
|
function sortStrings(arr) {
arr.sort(function (a, b) {
return a.localeCompare(b);
});
console.log(arr);
}
sortStrings(['foo', 'bar', 'hello', 'world']); |
<reponame>zarina494/fisrt_git_lesson<gh_stars>0
number1=5.2222
number2=5
number3='5'
number4=number1>number2
print(type(number1),type(number2),type(number3),number4) |
import requests
base_url = "https://github.com/timeline.json"
req = requests.get(base_url)
if(req.status_code == 200) :
print(req.json)
else :
print("Error occured: "+str(req.status_code))
|
import UIKit
class CustomBorderView: UIView {
var borderWidth: CGFloat = 0 {
didSet {
layer.borderWidth = borderWidth
}
}
var borderColor: UIColor? {
didSet {
layer.borderColor = borderColor?.cgColor
}
}
} |
import re
string = "There are 45.8 apples and 12.3 oranges."
numbers = re.findall(r"[-+]?\d*\.\d+|\d+", string)
print(numbers)
# Output: ['45.8', '12.3'] |
package com.ceiba.tipocancha.servicio.testdatabuilder;
import com.ceiba.tipocancha.modelo.entidad.TipoCancha;
import java.util.UUID;
public class TipoCanchaTestDataBuilder {
private static final String TIPO_FUTBOL_5 = "futbol 5";
private static final double VALOR_CANCHA = 80000;
private Long id;
private String tipo;
private double valorCancha;
public TipoCanchaTestDataBuilder() {
id = UUID.randomUUID().getLeastSignificantBits();
this.tipo = TIPO_FUTBOL_5;
valorCancha = VALOR_CANCHA;
}
public TipoCanchaTestDataBuilder conId(Long id) {
this.id = id;
return this;
}
public TipoCanchaTestDataBuilder conValor(double valorCancha) {
this.valorCancha = valorCancha;
return this;
}
public TipoCancha build() {
return new TipoCancha(this.id, tipo, valorCancha);
}
}
|
# Editing
if [[ ! "$SSH_TTY" ]] && is_osx; then
export LESSEDIT='mate %f:%lm' #Edit in TextMate from LESS
export EDITOR='mate -w' #Use TextMate to edit stuff
export JULIA_EDITOR='mate' #Use TextMate to edit stuff
else
export EDITOR='emacs'
export JULIA_EDITOR='rmate'
fi
|
if command -v zoxide &>/dev/null; then
eval "$(zoxide init zsh)"
fi
|
#!/bin/bash -ex
# error log to screen and file
log_temp=`mktemp -d`
error_log=$log_temp/savu_error_log.txt
exec 2> >(tee -ia $error_log)
oldprompt=$PS1
newprompt=">>> "
export PS1=$newprompt
for sig in INT TERM EXIT; do
trap "export PS1=$oldprompt; [[ $sig == EXIT ]] || kill -$sig $$" $sig
done
PREFIX=$HOME
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
savu_version=`cat $DIR/version.txt`
# function for parsing optional arguments
function arg_parse ()
{
flag=$1
return=$2
while [[ $# -gt 2 ]] ; do
if [ $3 == $flag ] ; then
eval "$return"=$4
fi
shift
done
}
# function for parsing flags without arguments
function flag_parse ()
{
flag=$1
return=$2
while [[ $# -gt 2 ]] ; do
if [ $3 == $flag ] ; then
eval "$return"=true
fi
shift
done
}
# Set the install PREFIX
flag_parse "--PREFIX" prefix "$@"
if [ $prefix ] ; then
PREFIX=true
fi
# Set the test flag to true if test only
flag_parse "--tests_only" test_flag "$@"
if [ $test_flag ] ; then
test_flag=true
fi
# Set the prompts flag to false if no prompts are required
flag_parse "--no_prompts" prompts "$@"
if [ $prompts ] ; then
prompts=false
else
prompts=true
fi
# set the intermediate folder
arg_parse "-f" facility "$@"
if [ ! $facility ] ; then
facility=dls # change this default?
fi
export FACILITY=$facility
# set the intermediate folder
arg_parse "-c" conda_folder "$@"
if [ ! $conda_folder ] ; then
conda_folder=Savu_$savu_version
fi
# set the intermediate folder
arg_parse "-s" savu_recipe "$@"
if [ ! $savu_recipe ] ; then
savu_recipe=savu
elif [ $savu_recipe = 'master' ] ; then
savu_recipe=savu_master
else
echo "Unknown Savu installation version."
fi
#=========================library checking==============================
if [ $test_flag ] ; then
echo -e "\n============================================================="
echo -e " ......Thank you for running the Savu tests......\n"
echo -e "Performing a library check..."
else
echo -e "\n============================================================="
echo -e " ......Thank you for running the Savu installer......\n"
echo -e "Performing a library check..."
echo -e "\nNB: An MPI implementation is required to build Savu."
echo -e "fftw is required to build Savu."
echo -e "Cuda is desirable for a full range of plugins."
echo -e "\n============================================================="
fi
# set compiler wrapper
MPICC=$(command -v mpicc)
if ! [ "$MPICC" ]; then
echo "ERROR: I require mpicc but I can't find it. Check /path/to/mpi_implementation/bin is in your PATH"
exit 1
else
echo "Using mpicc: " $MPICC
fi
# check for fftw
CFLAGS=""
LDFLAGS=""
IFS=:
file_base=libfftw?.so
for p in ${LD_LIBRARY_PATH}; do
file_path=${p}/$file_base
if [ "x$p" != "x" -a -e $file_path ]; then
FFTWHOME=${file_path%/lib/libfftw?.so}
CFLAGS="$FFTWHOME/include"
LDFLAGS="$FFTWHOME/lib"
break
fi
done
if [ "$CFLAGS" ]; then
echo "Using fftw: " $FFTWHOME
else
echo "fftw has not been found."
fi
# check for cuda
nvcc=`command -v nvcc`
CUDAHOME=${nvcc%/bin/nvcc}
if [ "$CUDAHOME" ]; then
echo "Using cuda: " $CUDAHOME
else
echo "cuda has not been found."
fi
if [ $test_flag ] && [ $prompts = true ] ; then
PYTHONHOME=`command -v conda`
PYTHONHOME=${PYTHONHOME%conda}
if [ ! $PYTHONHOME ] ; then
echo -e "No conda environment found in PATH. Try:"
echo -e " >>> source <path_to_savu_installation>/savu_setup.sh"
echo -e "Aborting the tests."
exit 1
fi
echo -e "=============================================================\n"
while true ; do
read -n 1 -p "Are you happy to proceed with the tests? (y/n): " input
if [ "$input" = "y" ]; then
echo -e "\nProceeding with the tests."
break
elif [ "$input" = "n" ]; then
echo -e "\nAborting the tests."
exit 0
else
echo -e "\nYour input was unknown.\n"
fi
done
elif [ $prompts = true ] ; then
echo -e "=============================================================\n"
while true ; do
read -n 1 -p "Are you happy to proceed with the installation? (y/n): " input
if [ "$input" = "y" ]; then
echo -e "\nYour input was yes"
break
elif [ "$input" = "n" ]; then
echo -e "\nInstallation process terminated."
exit 0
else
echo -e "\nYour input was unknown.\n"
fi
done
#=====================installing other packages==========================
echo -e "\nInstalling Savu in" $PREFIX
read -p ">>> Press ENTER to continue or input a different path: " input
if [ "$input" != "" ]; then
PREFIX=$input
fi
while true; do
if [ -d "$PREFIX" ]; then
PREFIX=$PREFIX/$conda_folder/
break
fi
echo "The path" $PREFIX "is not recognised"
read -p ">>> Please input a different installation path: " input
PREFIX=$input
done
if [ -d "$PREFIX" ]; then
echo
while true ; do
read -n 1 -p "The folder $PREFIX already exists. Continue? [y/n]" input
if [ "$input" = "y" ]; then
echo -e "\nStarting the installation........"
break
elif [ "$input" = "n" ]; then
echo -e "\nInstallation process terminated."
exit 0
else
echo -e "\nYour input was unknown.\n\n"
fi
done
else
# create the folder
mkdir -p $PREFIX
fi
else
if [ ! -d "$PREFIX" ] ; then
mkdir -p $PREFIX
fi
fi
echo -e "\nThank you! Installing Savu into" $PREFIX"\n"
wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O $PREFIX/miniconda.sh;
bash $PREFIX/miniconda.sh -b -p $PREFIX/miniconda
PYTHONHOME=$PREFIX/miniconda/bin
export PATH="$PYTHONHOME:$PATH"
conda install -y -q conda-build
echo
conda info | grep 'root environment'
echo
conda env update -n root -f $DIR/environment.yml
echo "Building Savu..."
conda build $DIR/$savu_recipe
savubuild=`conda build $DIR/$savu_recipe --output`
echo "Installing Savu..."
conda install -y -q --use-local $savubuild
path=$(python -c "import savu; import os; print os.path.abspath(savu.__file__)")
savu_path=${path%/savu/__init__.pyc}
# get the savu version
install_path=$(python -c "import savu; import savu.version as sv; print sv.__install__")
recipes=$savu_path/$install_path/conda-recipes
launcher_path=`command -v savu_launcher.sh`
launcher_path=${launcher_path%/savu_launcher.sh}
if [ "$facility" ]; then
cp $savu_path/mpi/$facility/savu_launcher.sh $launcher_path
cp $savu_path/mpi/$facility/savu_mpijob.sh $launcher_path
fi
#-----------------------------------------------------------------
echo "Installing pyfai..."
pip install pyfai
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Installing mpi4py..."
mpi4py_version=`cat $recipes/mpi4py/version.txt`
env MPICC=$MPICC pip install mpi4py==$mpi4py_version
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Building hdf5..."
conda uninstall -y -q hdf5
conda build $recipes/hdf5
hdf5build=`conda build $recipes/hdf5 --output`
echo "Installing hdf5..."
conda install -y -q --use-local $hdf5build --no-deps
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Building h5py..."
conda uninstall -y -q h5py
conda build $recipes/h5py --no-test
h5pybuild=`conda build $recipes/h5py --output`
echo "Installing h5py..."
conda install -y -q --use-local $h5pybuild --no-deps
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Building astra toolbox..."
conda build $recipes/astra
astrabuild=`conda build $recipes/astra --output`
echo "Installing astra toolbox..."
conda install -y -q --use-local $astrabuild --no-deps
site_path=$(python -c "import site; print site.getsitepackages()[0]")
cp $recipes/astra/astra.pth $site_path
astra_lib_path=$site_path/astra/lib
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Building xraylib..."
conda build $recipes/xraylib
xraylibbuild=`conda build $recipes/xraylib --output`
echo "Installing xraylib..."
conda install -y -q --use-local $xraylibbuild --no-deps
#-----------------------------------------------------------------
#-----------------------------------------------------------------
echo "Installing tomopy..."
# these packages were missing in copied environment
conda install -y -q -c dgursoy tomopy --no-deps
conda install -y -q -c dgursoy dxchange --no-deps
#-----------------------------------------------------------------
#-----------------------------------------------------------------
#echo "Building xdesign"
#conda build $recipes/xdesign
#xdesignbuild=`conda build $recipes/xdesign --output`
#echo "Installing xdesign"
#conda install -y -q --use-local $xdesignbuild --no-deps
#-----------------------------------------------------------------
echo -e "\n\t***************************************************"
echo -e "\t Package installation complete"
echo -e "\t Check $error_log for errors"
echo -e "\t***************************************************\n"
if [ ! $test_flag ] ; then
if [ $prompts = true ] ; then
while true; do
read -n 1 -p "Would you like to run the tests? (y/n): " input
if [ "$input" = "y" ]; then
echo -e "\nYour input was yes"
test_flag=true
break
elif [ "$input" = "n" ]; then
echo -e "Aborting test run..."
echo -e "To run the tests later type: "
echo -e " >>> bash savu_v2.1/savu_installer.sh --tests_only"
exit 0
else
echo -e "\nYour input was unknown.\n"
fi
done
fi
setup_script=$PREFIX/'savu_setup.sh'
echo -e "\nCreating a Savu setup script" $setup_script
( [ -e "$setup_script" ] || touch "$setup_script" ) && [ ! -w "$setup_script" ] && echo cannot write to $setup_script && exit 1
MPIHOME="$(dirname "$(dirname $MPICC)")"
echo '#!bin/bash' > $setup_script
echo ""export PATH=$MPIHOME/bin:'$PATH'"" >> $setup_script
echo ""export LD_LIBRARY_PATH=$MPIHOME/lib:'$LD_LIBRARY_PATH'"" >> $setup_script
echo ""export PYTHONUSERSITE True"" >> $setup_script
echo ""export PATH=$PYTHONHOME:'$PATH'"" >> $setup_script
echo ""export LD_LIBRARY_PATH=$PYTHONHOME/lib:'$LD_LIBRARY_PATH'"" >> $setup_script
echo ""export LD_LIBRARY_PATH=$astra_lib_path:'$LD_LIBRARY_PATH'"" >> $setup_script
if [ "$CUDAHOME" ]; then
echo ""export PATH=$CUDAHOME/bin:'$PATH'"" >> $setup_script
echo ""export LD_LIBRARY_PATH=$CUDAHOME/lib64:'$LD_LIBRARY_PATH'"" >> $setup_script
fi
if [ "$FFTWHOME" ]; then
echo ""export FFTWDIR=$FFTWHOME"" >> $setup_script
echo ""export LD_LIBRARY_PATH=$FFTWHOME/lib:'$LD_LIBRARY_PATH'"" >> $setup_script
fi
source $setup_script
fi
if [ $test_flag ] ; then
nGPUs=$(python -c "import savu.core.utils as cu; p, count = cu.get_available_gpus(); print count")
echo -e "\n***** Testing Savu setup *****\n"
savu_quick_tests
echo -e "\n*****Running Savu single-threaded local tests *****\n"
savu_full_tests
echo -e "\n************** Single-threaded local tests complete ******************\n"
test_dir=`mktemp -d`
tmp_dir=`mktemp -d`
tmpfile=$tmp_dir/temp_output.txt
touch $tmpfile
echo "tmp file is" $tmpfile
echo -e "\n***** Running Savu MPI local CPU tests *****\n"
local_mpi_cpu_test.sh $test_dir -r $tmpfile
result=$(grep -i "Processing Complete" $tmpfile)
if [ ! $result ] ; then
echo -e "\n****The tests have errored: See $tmpfile for more details****\n"
else
echo -e "\n***Test successfully completed!***\n"
fi
if [ $nGPUs -gt 0 ]; then
echo -e "\n***** Running Savu MPI local GPU tests *****\n"
local_mpi_gpu_test.sh $test_dir
else
echo -e "\n***** Skipping Savu MPI local GPU tests (no GPUs found) *****\n"
fi
rm -r $test_dir
echo -e "\n************** MPI local tests complete ******************\n"
while true ; do
read -n 1 -p "Are you installing Savu for cluster use? (y/n): " input
if [ "$input" = "y" ]; then
launcher_path=`command -v savu_launcher.sh`
mpijob_path=`command -v savu_mpijob.sh`
echo -e "\n\n===============================IMPORTANT NOTICE================================="
echo -e "To run Savu across a cluster you will need to update the savu laucher scripts:"
echo -e "\n$launcher_path"
echo -e "$mpijob_path\n"
echo -e "Once these are update, run the cluster MPI tests:\n\t >>> mpi_cpu_test.sh <output_dir> "
echo -e "\t >>> mpi_gpu_test.sh <output_dir>."
echo -e "================================================================================\n"
while true ; do
read -n 1 -p "Continue? (y): " input
if [ "$input" = "y" ]; then
break
else
echo
fi
done
echo
break
elif [ "$input" = "n" ]; then
break
else
echo -e "\nYour input was unknown.\n"
fi
done
fi
if [ ! $test_flag ] ; then
echo -e "\n\nTo run Savu type 'source $savu_setup' to set relevant paths every time you open a new terminal."
echo -e "Alternatively, if you are using the Modules system, see $DIR/module_template for an example module file."
echo -e "*************** SAVU INSTALLATION COMPLETE! ******************\n"
echo -e " ......Thank you for running the Savu installer......\n"
echo -e "=============================================================\n"
else
echo -e "\n\n*************** SAVU TESTS COMPLETE! ******************\n"
echo -e " ......Thank you for running the Savu tests......\n"
echo -e " Please check $tmpfile for errors\n"
echo -e "=======================================================\n"
fi
exit 0
|
import React, {
useState,
useLayoutEffect,
FC,
useContext,
useRef,
Fragment,
PropsWithChildren
} from 'react'
import mapValues from 'lodash/mapValues'
import i18n, { TFunction } from 'i18next'
import { getConfig, addConfigListener } from '@/_helpers/config-manager'
import zip from 'lodash/zip'
export type LangCode = 'zh-CN' | 'zh-TW' | 'en'
export type Namespace =
| 'common'
| 'content'
| 'langcode'
| 'menus'
| 'options'
| 'popup'
| 'wordpage'
| 'dicts'
export interface RawLocale {
'zh-CN': string
'zh-TW': string
en: string
}
export interface RawLocales {
[message: string]: RawLocale
}
export interface RawDictLocales {
name: RawLocale
options?: RawLocales
helps?: RawLocales
}
export interface DictLocales {
name: string
options?: {
[message: string]: any
}
helps?: {
[message: string]: any
}
}
export async function i18nLoader() {
const { langCode } = await getConfig()
await i18n
.use({
type: 'backend',
init: () => {},
create: () => {},
read: async (lang: LangCode, ns: Namespace, cb: Function) => {
try {
if (ns === 'dicts') {
const dictLocals = extractDictLocales(lang)
cb(null, dictLocals)
return dictLocals
}
const { locale } = await import(
/* webpackInclude: /_locales\/[^/]+\/[^/]+\.ts$/ */
/* webpackMode: "lazy" */
`@/_locales/${lang}/${ns}.ts`
)
cb(null, locale)
return locale
} catch (err) {
cb(err)
}
}
})
.init({
lng: langCode,
fallbackLng: false,
whitelist: ['en', 'zh-CN', 'zh-TW'],
debug: process.env.NODE_ENV === 'development',
saveMissing: false,
load: 'currentOnly',
ns: 'common',
defaultNS: 'common',
interpolation: {
escapeValue: false // not needed for react as it escapes by default
}
})
addConfigListener(({ newConfig }) => {
if (i18n.language !== newConfig.langCode) {
i18n.changeLanguage(newConfig.langCode)
}
})
return i18n
}
const defaultT: i18n.TFunction = () => ''
export const I18nContext = React.createContext<string | undefined>(undefined)
if (process.env.DEBUG) {
I18nContext.displayName = 'I18nContext'
}
export const I18nContextProvider: FC = ({ children }) => {
const [lang, setLang] = useState<string | undefined>(undefined)
useLayoutEffect(() => {
const setLangCallback = () => {
setLang(i18n.language)
}
if (!i18n.language) {
i18nLoader().then(() => {
setLang(i18n.language)
i18n.on('languageChanged', setLangCallback)
})
}
return () => {
i18n.off('languageChanged', setLangCallback)
}
}, [])
return React.createElement(I18nContext.Provider, { value: lang }, children)
}
export interface UseTranslateResult {
/**
* fixedT with the first namespace as default.
* It is a wrapper of the original fixedT, which
* keeps the same reference even after namespaces are loaded
*/
t: i18n.TFunction
i18n: i18n.i18n
/**
* Are namespaces loaded?
* false not ready
* otherwise it is a non-zero positive number
* that changes everytime when new namespaces are loaded.
*/
ready: false | number
}
/**
* Tailored for this project.
* The official `useTranslation` is too heavy.
* @param namespaces will not monitor namespace changes.
*/
export function useTranslate(
namespaces?: Namespace | Namespace[]
): UseTranslateResult {
const ticketRef = useRef(0)
const innerTRef = useRef<TFunction>(defaultT)
// keep the exposed t function always the same
const tRef = useRef<TFunction>((...args: Parameters<TFunction>) =>
innerTRef.current(...args)
)
const lang = useContext(I18nContext)
const genResult = (t: TFunction | null, ready: boolean) => {
if (t) {
innerTRef.current = t
}
if (ready) {
ticketRef.current = (ticketRef.current + 1) % 100000
}
const result: UseTranslateResult = {
t: tRef.current,
i18n,
ready: ready ? ticketRef.current : false
}
return result
}
const [result, setResult] = useState<UseTranslateResult>(() => {
if (!lang) {
return genResult(defaultT, false)
}
if (!namespaces) {
return genResult(i18n.t, true)
}
if (
Array.isArray(namespaces)
? namespaces.every(ns => i18n.hasResourceBundle(lang, ns))
: i18n.hasResourceBundle(lang, namespaces)
) {
return genResult(i18n.getFixedT(lang, namespaces), true)
}
return genResult(defaultT, false)
})
useLayoutEffect(() => {
let isEffectRunning = true
if (lang) {
if (namespaces) {
if (
Array.isArray(namespaces)
? namespaces.every(ns => i18n.hasResourceBundle(lang, ns))
: i18n.hasResourceBundle(lang, namespaces)
) {
setResult(genResult(i18n.getFixedT(lang, namespaces), true))
} else {
// keep the old t while marking not ready
setResult(genResult(null, false))
i18n.loadNamespaces(namespaces).then(() => {
if (isEffectRunning) {
setResult(genResult(i18n.getFixedT(lang, namespaces), true))
}
})
}
} else {
setResult(genResult(i18n.t, true))
}
}
return () => {
isEffectRunning = false
}
}, [lang])
return result
}
/**
* <Trans message="a{b}c{d}e">
* <h1>b</h1>
* <p>d</p>
* </Trans>
* ↓
* [
* "a",
* <h1>b</h1>,
* "c",
* <p>d</p>,
* "e"
* ]
*/
export const Trans = React.memo<PropsWithChildren<{ message?: string }>>(
({ message, children }) => {
if (!message) return null
return React.createElement(
Fragment,
null,
zip(
message.split(/{[^}]*?}/),
Array.isArray(children) ? children : [children]
)
)
}
)
function extractDictLocales(lang: LangCode) {
const req = require.context(
'@/components/dictionaries',
true,
/_locales\.(json|ts)$/
)
return req.keys().reduce<{ [id: string]: DictLocales }>((o, filename) => {
const localeModule = req(filename)
const json: RawDictLocales = localeModule.locales || localeModule
const dictId = /([^/]+)\/_locales\.(json|ts)$/.exec(filename)![1]
o[dictId] = {
name: json.name[lang]
}
if (json.options) {
o[dictId].options = mapValues(json.options, rawLocale => rawLocale[lang])
}
if (json.helps) {
o[dictId].helps = mapValues(json.helps, rawLocale => rawLocale[lang])
}
return o
}, {})
}
|
import string
from os.path import join, dirname
from collections import namedtuple
HERE = join(dirname(__file__), '..', 'tests')
Pos = namedtuple('Pos', 'line,column')
def get_pos(pos, instring):
preceding = instring[:pos]
line = preceding.count('\n') + 1
column = preceding.rfind('\n')
if column == -1:
column = pos + 1
else:
column = pos - column - 1
return Pos(line, column)
def as_bin(i, bits=8):
i = int(i) if isinstance(i, str) else i
i = bin(i)[2:].rjust(bits, '0')
return tuple(map(int, i))
def from_bin(i):
if len(i) == 9:
sign, *i = i
i = int(''.join(map(str, i)), 2)
return -i if sign else i
class TypeGetter:
def __init__(self, graph):
self.graph = graph
def get(self, ttype):
from circuitry.connectable_impls import CustomComponentImplementation
component = self.graph.get_custom(ttype)
if not component:
raise KeyError('No such component as "{}"'.format(ttype))
return lambda name: CustomComponentImplementation(name, component, [])
def get_test_graph(filename):
return get_graph(join(HERE, filename))
def get_graph(filename):
from circuitry.graph import load_graph
return TypeGetter(load_graph(filename=filename))
def get_custom_component(filename, ttype):
return get_graph(filename).get(ttype)
class StateDict(UserDict):
def __setitem__(self, name, val):
assert val in {0, 1}
super().__setitem__(name, val)
def build_pins(num, binary):
pins = [
'{}{}'.format(l, num)
for l in string.ascii_letters[:26]
]
return dict(zip(pins, binary))
|
#!/bin/sh
#
# STIG URL: http://www.stigviewer.com/stig/red_hat_enterprise_linux_6/2014-06-11/finding/V-38457
# Finding ID: V-38457
# Version: RHEL-06-000041
# Finding Level: Medium
#
# The /etc/passwd file must have mode 0644 or less permissive. If the
# "/etc/passwd" file is writable by a group-owner or the world the risk
# of its compromise is increased. The file contains the list of
# accounts on the system and associated information, and ...
#
# CCI: CCI-000366
# NIST SP 800-53 :: CM-6 b
# NIST SP 800-53A :: CM-6.1 (iv)
# NIST SP 800-53 Revision 4 :: CM-6 b
#
############################################################
# Standard outputter function
diag_out() {
echo "${1}"
}
diag_out "----------------------------------"
diag_out "STIG Finding ID: V-38457"
diag_out " Ensure passwd file is set to"
diag_out " mode 0644 (or better)"
diag_out "----------------------------------"
|
#!/bin/bash
#
# Test the build system for basic consistency (Cmake/idf.py version)
#
# A bash script that tests some likely build failure scenarios in a row
#
# Assumes PWD is an out-of-tree build directory, and will create a
# subdirectory inside it to run build tests in.
#
# Environment variables:
# IDF_PATH - must be set
# ESP_IDF_TEMPLATE_GIT - Can override git clone source for template app. Otherwise github.
# NOCLEANUP - Set to '1' if you want the script to leave its temporary directory when done, for post-mortem.
#
#
# Internals:
# * The tests run in sequence & the system keeps track of all failures to print at the end.
# * BUILD directory is set to default BUILD_DIR_BASE
# * The "print_status" function both prints a status line to the log and keeps track of which test is running.
# * Calling the "failure" function prints a failure message to the log and also adds to the list of failures to print at the end.
# * The function "assert_built" tests for a file relative to the BUILD directory.
# * The function "take_build_snapshot" can be paired with the functions "assert_rebuilt" and "assert_not_rebuilt" to compare file timestamps and verify if they were rebuilt or not since the snapshot was taken.
#
# To add a new test case, add it to the end of the run_tests function. Note that not all test cases do comprehensive cleanup
# (although very invasive ones like appending CRLFs to all files take a copy of the esp-idf tree), however the clean_build_dir
# function can be used to force-delete all files from the build output directory.
# Set up some variables
#
# override ESP_IDF_TEMPLATE_GIT to point to a local dir if you're testing and want fast iterations
[ -z ${ESP_IDF_TEMPLATE_GIT} ] && ESP_IDF_TEMPLATE_GIT=https://github.com/espressif/esp-idf-template.git
# uncomment next line to produce a lot more debug output
#export V=1
export PATH="$IDF_PATH/tools:$PATH" # for idf.py
function run_tests()
{
FAILURES=
STATUS="Starting"
print_status "Checking prerequisites"
[ -z ${IDF_PATH} ] && echo "IDF_PATH is not set. Need path to esp-idf installation." && exit 2
print_status "Cloning template from ${ESP_IDF_TEMPLATE_GIT}..."
git clone ${ESP_IDF_TEMPLATE_GIT} template
cd template
if [ -z $CHECKOUT_REF_SCRIPT ]; then
git checkout ${CI_BUILD_REF_NAME} || echo "Using esp-idf-template default branch..."
else
$CHECKOUT_REF_SCRIPT esp-idf-template
fi
print_status "Try to clean fresh directory..."
idf.py fullclean || exit $?
# all relative to the build directory
BOOTLOADER_BINS="bootloader/bootloader.elf bootloader/bootloader.bin"
APP_BINS="app-template.elf app-template.bin"
PARTITION_BIN="partition_table/partition-table.bin"
print_status "Initial clean build"
# if build fails here, everything fails
idf.py build || exit $?
# check all the expected build artifacts from the clean build
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Updating component source file rebuilds component"
# touch a file & do a build
take_build_snapshot
touch ${IDF_PATH}/components/esp32/cpu_start.c
idf.py build || failure "Failed to partial build"
assert_rebuilt ${APP_BINS} esp32/libesp32.a esp32/CMakeFiles/esp32.dir/cpu_start.c.obj
assert_not_rebuilt lwip/liblwip.a freertos/libfreertos.a ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Bootloader source file rebuilds bootloader"
take_build_snapshot
touch ${IDF_PATH}/components/bootloader/subproject/main/bootloader_start.c
idf.py build || failure "Failed to partial build bootloader"
assert_rebuilt ${BOOTLOADER_BINS} bootloader/main/CMakeFiles/main.dir/bootloader_start.c.obj
assert_not_rebuilt ${APP_BINS} ${PARTITION_BIN}
print_status "Partition CSV file rebuilds partitions"
take_build_snapshot
touch ${IDF_PATH}/components/partition_table/partitions_singleapp.csv
idf.py build || failure "Failed to build partition table"
assert_rebuilt ${PARTITION_BIN}
assert_not_rebuilt app-template.bin app-template.elf ${BOOTLOADER_BINS}
print_status "Partial build doesn't compile anything by default"
take_build_snapshot
# verify no build files are refreshed by a partial make
ALL_BUILD_FILES=$(find ${BUILD} -type f | sed "s@${BUILD}/@@" | grep -v '^.')
idf.py build || failure "Partial build failed"
assert_not_rebuilt ${ALL_BUILD_FILES}
print_status "Moving BUILD_DIR_BASE out of tree"
clean_build_dir
OUTOFTREE_BUILD=${TESTDIR}/alt_build
idf.py -B "${OUTOFTREE_BUILD}" build || failure "Failed to build with out-of-tree build dir"
NEW_BUILD_FILES=$(find ${OUTOFREE_BUILD} -type f)
if [ -z "${NEW_BUILD_FILES}" ]; then
failure "No files found in new build directory!"
fi
DEFAULT_BUILD_FILES=$(find ${BUILD} -mindepth 1)
if [ -n "${DEFAULT_BUILD_FILES}" ]; then
failure "Some files were incorrectly put into the default build directory: ${DEFAULT_BUILD_FILES}"
fi
print_status "BUILD_DIR_BASE inside default build directory"
clean_build_dir
idf.py -B "build/subdirectory" build || failure "Failed to build with build dir as subdir"
NEW_BUILD_FILES=$(find ${BUILD}/subdirectory -type f)
if [ -z "${NEW_BUILD_FILES}" ]; then
failure "No files found in new build directory!"
fi
print_status "Can still clean build if all text files are CRLFs"
clean_build_dir
find . -path .git -prune -exec unix2dos {} \; # CRLFify template dir
# make a copy of esp-idf and CRLFify it
CRLF_ESPIDF=${TESTDIR}/esp-idf-crlf
mkdir -p ${CRLF_ESPIDF}
cp -r ${IDF_PATH}/* ${CRLF_ESPIDF}
# don't CRLFify executable files, as Linux will fail to execute them
find ${CRLF_ESPIDF} -name .git -prune -name build -prune -type f ! -perm 755 -exec unix2dos {} \;
IDF_PATH=${CRLF_ESPIDF} idf.py build || failure "Failed to build with CRLFs in source"
# do the same checks we do for the clean build
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Updating rom ld file should re-link app and bootloader"
clean_build_dir
idf.py build
take_build_snapshot
sleep 1 # ninja may ignore if the timestamp delta is too low
cp ${IDF_PATH}/components/esp32/ld/esp32.rom.ld .
echo "/* (Build test comment) */" >> ${IDF_PATH}/components/esp32/ld/esp32.rom.ld
tail ${IDF_PATH}/components/esp32/ld/esp32.rom.ld
idf.py build || failure "Failed to rebuild with modified linker script"
assert_rebuilt ${APP_BINS} ${BOOTLOADER_BINS}
mv esp32.rom.ld ${IDF_PATH}/components/esp32/ld/
print_status "Updating app-only ld file should only re-link app"
take_build_snapshot
cp ${IDF_PATH}/components/esp32/ld/esp32.common.ld .
sleep 1 # ninja may ignore if the timestamp delta is too low
echo "/* (Build test comment) */" >> ${IDF_PATH}/components/esp32/ld/esp32.common.ld
idf.py build || failure "Failed to rebuild with modified linker script"
assert_rebuilt ${APP_BINS}
assert_not_rebuilt ${BOOTLOADER_BINS}
mv esp32.common.ld ${IDF_PATH}/components/esp32/ld/
print_status "sdkconfig update triggers full recompile"
clean_build_dir
idf.py build
take_build_snapshot
# need to actually change config, or cmake is too smart to rebuild
sed -i s/^\#\ CONFIG_FREERTOS_UNICORE\ is\ not\ set/CONFIG_FREERTOS_UNICORE=y/ sdkconfig
idf.py build
# check the sdkconfig.h file was rebuilt
assert_rebuilt config/sdkconfig.h
# pick one each of .c, .cpp, .S that #includes sdkconfig.h
# and therefore should rebuild
assert_rebuilt newlib/CMakeFiles/newlib.dir/syscall_table.c.obj
assert_rebuilt nvs_flash/CMakeFiles/nvs_flash.dir/src/nvs_api.cpp.obj
assert_rebuilt freertos/CMakeFiles/freertos.dir/xtensa_vectors.S.obj
print_status "Updating project CMakeLists.txt triggers full recompile"
clean_build_dir
idf.py build
take_build_snapshot
# Need to actually change the build config, or CMake won't do anything
cp CMakeLists.txt CMakeLists.bak
sed -i 's/^project(/add_compile_options("-DUSELESS_MACRO_DOES_NOTHING=1")\nproject\(/' CMakeLists.txt
idf.py build || failure "Build failed"
mv CMakeLists.bak CMakeLists.txt
# similar to previous test
assert_rebuilt newlib/CMakeFiles/newlib.dir/syscall_table.c.obj
assert_rebuilt nvs_flash/CMakeFiles/nvs_flash.dir/src/nvs_api.cpp.obj
assert_rebuilt freertos/CMakeFiles/freertos.dir/xtensa_vectors.S.obj
print_status "Can build with Ninja (no idf.py)"
clean_build_dir
(cd build && cmake -G Ninja .. && ninja) || failure "Ninja build failed"
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Can build with GNU Make (no idf.py)"
clean_build_dir
mkdir build
(cd build && cmake -G "Unix Makefiles" .. && make) || failure "Make build failed"
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Can build with IDF_PATH set via cmake cache not environment"
clean_build_dir
cp CMakeLists.txt CMakeLists.bak
sed -i 's/ENV{IDF_PATH}/{IDF_PATH}/' CMakeLists.txt
export IDF_PATH_BACKUP="$IDF_PATH"
(unset IDF_PATH &&
cd build &&
cmake -G Ninja .. -DIDF_PATH=${IDF_PATH_BACKUP} &&
ninja) || failure "Ninja build failed"
mv CMakeLists.bak CMakeLists.txt
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "Can build with IDF_PATH unset and inferred by build system"
clean_build_dir
cp CMakeLists.txt CMakeLists.bak
sed -i "s%\$ENV{IDF_PATH}%${IDF_PATH}%" CMakeLists.txt # expand to a hardcoded path
(unset IDF_PATH && cd build &&
cmake -G Ninja .. && ninja) || failure "Ninja build failed"
mv CMakeLists.bak CMakeLists.txt
assert_built ${APP_BINS} ${BOOTLOADER_BINS} ${PARTITION_BIN}
print_status "sdkconfig should have contents both files: sdkconfig and sdkconfig.defaults"
idf.py clean > /dev/null;
idf.py fullclean > /dev/null;
rm -f sdkconfig.defaults;
rm -f sdkconfig;
echo "CONFIG_PARTITION_TABLE_OFFSET=0x10000" >> sdkconfig.defaults;
echo "CONFIG_PARTITION_TABLE_TWO_OTA=y" >> sdkconfig;
idf.py reconfigure > /dev/null;
grep "CONFIG_PARTITION_TABLE_OFFSET=0x10000" sdkconfig || failure "The define from sdkconfig.defaults should be into sdkconfig"
grep "CONFIG_PARTITION_TABLE_TWO_OTA=y" sdkconfig || failure "The define from sdkconfig should be into sdkconfig"
print_status "Building a project with CMake and PSRAM workaround, all files compile with workaround"
cp sdkconfig sdkconfig.psram
rm -rf build
echo "CONFIG_SPIRAM_SUPPORT=y" >> sdkconfig.psram
echo "CONFIG_SPIRAM_CACHE_WORKAROUND=y" >> sdkconfig.psram
# note: we do 'reconfigure' here, as we just need to run cmake
idf.py reconfigure -D SDKCONFIG="`pwd`/sdkconfig.psram"
grep -q '"command"' build/compile_commands.json || failure "compile_commands.json missing or has no no 'commands' in it"
(grep '"command"' build/compile_commands.json | grep -v mfix-esp32-psram-cache-issue) && failure "All commands in compile_commands.json should use PSRAM cache workaround"
rm sdkconfig.psram
print_status "All tests completed"
if [ -n "${FAILURES}" ]; then
echo "Some failures were detected:"
echo -e "${FAILURES}"
exit 1
else
echo "Build tests passed."
fi
}
function print_status()
{
echo "******** $1"
STATUS="$1"
}
function failure()
{
echo "!!!!!!!!!!!!!!!!!!!"
echo "FAILURE: $1"
echo "!!!!!!!!!!!!!!!!!!!"
FAILURES="${FAILURES}${STATUS} :: $1\n"
}
TESTDIR=${PWD}/build_system_tests_$$
mkdir -p ${TESTDIR}
# set NOCLEANUP=1 if you want to keep the test directory around
# for post-mortem debugging
[ -z ${NOCLEANUP} ] && trap "rm -rf ${TESTDIR}" EXIT KILL
SNAPSHOT=${TESTDIR}/snapshot
BUILD=${TESTDIR}/template/build
# copy all the build output to a snapshot directory
function take_build_snapshot()
{
rm -rf ${SNAPSHOT}
cp -ap ${TESTDIR}/template/build ${SNAPSHOT}
}
# verify that all the arguments are present in the build output directory
function assert_built()
{
until [ -z "$1" ]; do
if [ ! -f "${BUILD}/$1" ]; then
failure "File $1 should be in the build output directory"
fi
shift
done
}
# Test if a file has been rebuilt.
function file_was_rebuilt()
{
# can't use [ a -ot b ] here as -ot only gives second resolution
# but stat -c %y seems to be microsecond at least for tmpfs, ext4..
if [ "$(stat -c %y ${SNAPSHOT}/$1)" != "$(stat -c %y ${BUILD}/$1)" ]; then
return 0
else
return 1
fi
}
# verify all the arguments passed in were rebuilt relative to the snapshot
function assert_rebuilt()
{
until [ -z "$1" ]; do
assert_built "$1"
if [ ! -f "${SNAPSHOT}/$1" ]; then
failure "File $1 should be in original build snapshot"
fi
if ! file_was_rebuilt "$1"; then
failure "File $1 should have been rebuilt"
fi
shift
done
}
# verify all the arguments are in the build directory & snapshot,
# but were not rebuilt
function assert_not_rebuilt()
{
until [ -z "$1" ]; do
assert_built "$1"
if [ ! -f "${SNAPSHOT}/$1" ]; then
failure "File $1 should be in snapshot build directory"
fi
if file_was_rebuilt "$1"; then
failure "File $1 should not have been rebuilt"
fi
shift
done
}
# do a "clean" that doesn't depend on idf.py
function clean_build_dir()
{
rm -rf --preserve-root ${BUILD}/* ${BUILD}/.*
}
cd ${TESTDIR}
run_tests
|
<filename>src/client/client.js
/* @flow */
import React from 'react';
import ReactDOM from 'react-dom';
import { BrowserRouter } from 'react-router-dom';
import App from '../shared/containers/App';
import { Provider } from 'mobx-react';
import Stores from '../shared/stores/stores';
const stores = new Stores(window.__STATE__);
ReactDOM.render(
<Provider stores={stores}>
<BrowserRouter>
<App />
</BrowserRouter>
</Provider>,
document.getElementById('app'),
);
|
<reponame>hedzr/voxr-lite<gh_stars>0
/*
* Copyright © 2019 <NAME>.
*/
package rand
import (
"errors"
balance2 "github.com/hedzr/voxr-lite/internal/scheduler/balance"
"math/rand"
)
type (
RandomBalancer struct {
// lastPicked int
}
)
func New() balance2.Balancer {
return &RandomBalancer{}
}
func (s *RandomBalancer) Name() string {
return balance2.RANDOM
}
func (s *RandomBalancer) Clone() balance2.Balancer {
return &RandomBalancer{}
}
func (s *RandomBalancer) UpdateScales(scales map[string]int) (b balance2.Balancer) {
b = s
return
}
func (s *RandomBalancer) Pick(peers []*balance2.ServicePeer) (picked *balance2.ServicePeer, err error) {
if len(peers) == 0 {
err = errors.New("no instance")
return
}
ix := rand.Intn(len(peers))
picked = peers[ix]
return
}
|
package com.abubusoft.kripton.examples.rssreader;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import com.abubusoft.kripton.android.KriptonLibrary;
import com.abubusoft.kripton.android.Logger;
import com.abubusoft.kripton.android.sqlite.SQLiteTestDatabase;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.InputStream;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class DataSourceInstrumentedTest {
@Test
public void testMigration() throws Exception {
// Context of the app under test.
Context testContext = InstrumentationRegistry.getContext();
Context context = InstrumentationRegistry.getTargetContext();
KriptonLibrary.init(context);
InputStream schema1 = testContext.getAssets().open("xeno_schema_1.sql");
InputStream schema2 = testContext.getAssets().open("xeno_schema_2.sql");
InputStream finalSchema1 = testContext.getAssets().open("xeno_schema_1.sql");
InputStream finalSchema2 = testContext.getAssets().open("xeno_schema_2.sql");
SQLiteTestDatabase.clearDatabase(context);
/* SQLiteTestDatabase database = SQLiteTestDatabase.builder(1, schema1)
.addPopulator(datasource -> XenoApplication.fillCountryCodes(context))
.addVersionUpdateTask(2, (datasource, previousVersion, currentVersion) -> XenoApplication.migrationVersion2(context, datasource))
.build();
database.updateAndVerify(1, finalSchema1);
database.updateAndVerify(2, finalSchema2);*/
}
@Test
public void testVersion2() throws Exception {
// Context of the app under test.
Context testContext = InstrumentationRegistry.getContext();
Context context = InstrumentationRegistry.getTargetContext();
KriptonLibrary.init(context);
InputStream schema2 = testContext.getAssets().open("xeno_schema_2.sql");
InputStream finalSchema2 = testContext.getAssets().open("xeno_schema_2.sql");
SQLiteTestDatabase.clearDatabase(context);/*
SQLiteTestDatabase database = SQLiteTestDatabase.builder(2, schema2)
.addPopulator(datasource -> {
Logger.info("execute populator");
XenoApplication.fillCountryCodes(context);
})
.addVersionUpdateTask(2, (datasource, previousVersion, currentVersion) ->
XenoApplication.migrationVersion2(context, datasource)
)
.build();
database.updateAndVerify(2, finalSchema2);*/
}
}
|
#!/bin/sh
# ~/.macos — https://mths.be/macos
# Close any open System Preferences panes, to prevent them from overriding
# settings we’re about to change
osascript -e 'tell application "System Preferences" to quit'
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.macos` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
###############################################################################
# General UI/UX #
###############################################################################
# Disable popup with special characters (accent marks) while holding keys
defaults write -g ApplePressAndHoldEnabled -bool false
# Colored iMac Accent color
# 3 - yellow, 4 - teal, ... 8
defaults write -g NSColorSimulateHardwareAccent -bool YES
defaults write -g NSColorSimulatedHardwareEnclosureNumber -int 4
# Don't close windows when quitting an app (use System Restoration)
# Useful for iTerm to restore previous opened tabs
# defaults write NSGlobalDomain NSQuitAlwaysKeepsWindows 1
# Reduce transparency
defaults write com.apple.universalaccess reduceTransparency -bool true
# Set sidebar icon size to medium
defaults write NSGlobalDomain NSTableViewDefaultSizeMode -int 3
# Increase window resize speed for Cocoa applications
# defaults write NSGlobalDomain NSWindowResizeTime -float 0.001
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode2 -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
# Disable the “Are you sure you want to open this application?” dialog
defaults write com.apple.LaunchServices LSQuarantine -bool false
# Remove duplicates in the “Open With” menu (also see `lscleanup` alias)
/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister -kill -r -domain local -domain system -domain user
# Set Help Viewer windows to non-floating mode
# defaults write com.apple.helpviewer DevMode -bool true
# Restart automatically if the computer freezes
sudo systemsetup -setrestartfreeze on
# Set language and text formats
# defaults write NSGlobalDomain AppleLanguages -array "en-RU" "ru-RU"
# defaults write NSGlobalDomain AppleLocale -string "en_RU"
# defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
# defaults write NSGlobalDomain AppleMetricUnits -bool true
# # Disable automatic capitalization as it’s annoying when typing code
# defaults write NSGlobalDomain NSAutomaticCapitalizationEnabled -bool false
# # Disable smart dashes as they’re annoying when typing code
# defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
# # Disable automatic period substitution as it’s annoying when typing code
# defaults write NSGlobalDomain NSAutomaticPeriodSubstitutionEnabled -bool false
# # Disable smart quotes as they’re annoying when typing code
# defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
###############################################################################
# Trackpad, mouse, keyboard, Bluetooth accessories, and input #
###############################################################################
# Trackpad: enable tap to click for this user and for the login screen
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
###############################################################################
# Finder #
###############################################################################
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Keep folders on top when sorting by name
defaults write com.apple.finder _FXSortFoldersFirst -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Avoid creating .DS_Store files on network or USB volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
# Disable disk image verification
defaults write com.apple.frameworks.diskimages skip-verify -bool true
defaults write com.apple.frameworks.diskimages skip-verify-locked -bool true
defaults write com.apple.frameworks.diskimages skip-verify-remote -bool true
# Automatically open a new Finder window when a volume is mounted
defaults write com.apple.frameworks.diskimages auto-open-ro-root -bool true
defaults write com.apple.frameworks.diskimages auto-open-rw-root -bool true
defaults write com.apple.finder OpenWindowForNewRemovableDisk -bool true
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `Flwv`
defaults write com.apple.finder FXPreferredViewStyle -string "clmv"
# Show the ~/Library folder
chflags nohidden ~/Library
# Show the /Volumes folder
chflags nohidden ~/Library && xattr -d com.apple.FinderInfo ~/Library
# Expand the following File Info panes:
# “General”, “Open with”, and “Sharing & Permissions”
defaults write com.apple.finder FXInfoPanesExpanded -dict \
General -bool true \
OpenWith -bool true \
Privileges -bool true
###############################################################################
# Dock, Dashboard, and hot corners #
###############################################################################
# Speed up Mission Control animations
defaults write com.apple.dock expose-animation-duration -float 0.1
# Group windows by application in Mission Control
defaults write com.apple.dock expose-group-apps -bool true
# Don’t automatically rearrange Spaces based on most recent use
defaults write com.apple.dock mru-spaces -bool false
# Automatically hide and show the Dock
defaults write com.apple.dock autohide -bool true
# Don’t show recent applications in Dock
defaults write com.apple.dock show-recents -bool false
# Set the icon size of Dock items to 44 pixels
defaults write com.apple.dock tilesize -int 44
# Add a spacer to the left side of the Dock (where the applications are)
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
# Add a spacer to the right side of the Dock (where the Trash is)
#defaults write com.apple.dock persistent-others -array-add '{tile-data={}; tile-type="spacer-tile";}'
###############################################################################
# Safari & WebKit #
###############################################################################
# Show the full URL in the address bar (note: this still hides the scheme)
defaults write com.apple.Safari ShowFullURLInSmartSearchField -bool true
# Make Safari’s search banners default to Contains instead of Starts With
defaults write com.apple.Safari FindOnPageMatchesWordStartsOnly -bool false
# Enable the Develop menu and the Web Inspector in Safari
defaults write com.apple.Safari IncludeDevelopMenu -bool true
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true
# Warn about fraudulent websites
defaults write com.apple.Safari WarnAboutFraudulentWebsites -bool true
# Enable “Do Not Track”
defaults write com.apple.Safari SendDoNotTrackHTTPHeader -bool true
# Update extensions automatically
defaults write com.apple.Safari InstallExtensionUpdatesAutomatically -bool true
# Disable notification popups
defaults write com.apple.Safari CanPromptForPushNotifications -bool false
###############################################################################
# Mail #
###############################################################################
# Copy email addresses as `foo@example.com` instead of `Foo Bar <foo@example.com>` in Mail.app
defaults write com.apple.mail AddressesIncludeNameOnPasteboard -bool false
###############################################################################
# Time Machine #
###############################################################################
# Prevent Time Machine from prompting to use new hard drives as backup volume
defaults write com.apple.TimeMachine DoNotOfferNewDisksForBackup -bool true
###############################################################################
# Activity Monitor #
###############################################################################
# Show the main window when launching Activity Monitor
defaults write com.apple.ActivityMonitor OpenMainWindow -bool true
# Visualize CPU usage in the Activity Monitor Dock icon
defaults write com.apple.ActivityMonitor IconType -int 5
# Show all processes in Activity Monitor
defaults write com.apple.ActivityMonitor ShowCategory -int 0
# Sort Activity Monitor results by CPU usage
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
###############################################################################
# Address Book, Dashboard, iCal, TextEdit, and Disk Utility #
###############################################################################
# Use plain text mode for new TextEdit documents
defaults write com.apple.TextEdit RichText -int 0
# Open and save files as UTF-8 in TextEdit
defaults write com.apple.TextEdit PlainTextEncoding -int 4
defaults write com.apple.TextEdit PlainTextEncodingForWrite -int 4
# Auto-play videos when opened with QuickTime Player
defaults write com.apple.QuickTimePlayerX MGPlayMovieOnOpen -bool true
###############################################################################
# Mac App Store #
###############################################################################
# Enable Debug Menu in the Mac App Store
# defaults write com.apple.appstore ShowDebugMenu -bool true
# Enable the automatic update check
defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool true
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
# Download newly available updates in background
defaults write com.apple.SoftwareUpdate AutomaticDownload -int 1
# Install System data files & security updates
defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 1
# Automatically download apps purchased on other Macs
# defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 1
# Turn on app auto-update
defaults write com.apple.commerce AutoUpdate -bool true
# Allow the App Store to reboot machine on macOS updates
defaults write com.apple.commerce AutoUpdateRestartRequired -bool true
###############################################################################
# Photos #
###############################################################################
# Prevent Photos from opening automatically when devices are plugged in
defaults -currentHost write com.apple.ImageCapture disableHotPlug -bool true
###############################################################################
# Google Chrome & Google Chrome Canary #
###############################################################################
# Use the system-native print preview dialog
defaults write com.google.Chrome DisablePrintPreview -bool true
defaults write com.google.Chrome.canary DisablePrintPreview -bool true
# Expand the print dialog by default
defaults write com.google.Chrome PMPrintingExpandedStateForPrint2 -bool true
defaults write com.google.Chrome.canary PMPrintingExpandedStateForPrint2 -bool true
###############################################################################
# Magnet #
###############################################################################
# Disable ads
defaults write com.crowdcafe.windowmagnet alreadyClickedDynamoItem -bool true
defaults write com.crowdcafe.windowmagnet seenDynamo -bool true
# Disable all shortcuts
defaults write com.crowdcafe.windowmagnet centerWindowComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowCenterThirdComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowEastComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowLeftThirdComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowLeftTwoThirdsComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowNorthComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowNorthEastComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowNorthWestComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowRightThirdComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowRightTwoThirdsComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowSouthComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowSouthEastComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowSouthWestComboKey -dict
defaults write com.crowdcafe.windowmagnet expandWindowWestComboKey -dict
defaults write com.crowdcafe.windowmagnet maximizeWindowComboKey -dict
defaults write com.crowdcafe.windowmagnet moveWindowToNextDisplay -dict
defaults write com.crowdcafe.windowmagnet moveWindowToPreviousDisplay -dict
defaults write com.crowdcafe.windowmagnet restoreWindowComboKey -dict
###############################################################################
# Transmission.app #
###############################################################################
# Use `~/Downloads` to store completed downloads
defaults write org.m0k.transmission DownloadLocationConstant -bool true
# Trash original torrent files
defaults write org.m0k.transmission DeleteOriginalTorrent -bool true
# Hide the donate message
defaults write org.m0k.transmission WarningDonate -bool false
# Hide the legal disclaimer
defaults write org.m0k.transmission WarningLegal -bool false
###############################################################################
# Additional Hacks #
###############################################################################
# Enable Mac's startup chime
# sudo nvram StartupMute=%00
###############################################################################
# Xcode #
###############################################################################
# see https://xcode-tips.github.io
# Faster Xcode Rename Refactoring
defaults write com.apple.dt.Xcode CodeFoldingAnimationSpeed -int 0
# Show project build times in the activity viewer
defaults write com.apple.dt.Xcode ShowBuildOperationDuration -bool YES
###############################################################################
# Kill affected applications #
###############################################################################
for app in "Activity Monitor" \
"Address Book" \
"Calendar" \
"cfprefsd" \
"Contacts" \
"Dock" \
"Finder" \
"Google Chrome" \
"Mail" \
"Messages" \
"Photos" \
"Safari" \
"SystemUIServer" \
"Transmission" \
"iCal"; do
killall "${app}" &> /dev/null
done
echo "Done. Note that some of these changes require a logout/restart to take effect."
|
(define (swap-list-elements list1 list2)
(cond ((null? list1) '())
((null? list2) '())
(else
(cons (car list2)
(cons (car list1)
(swap-list-elements (cdr list1) (cdr list2))))))) |
#!/bin/sh
for i in 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
do
echo "OXE 10.100.8."$i":"
sshpass -p "mtcl" ssh mtcl@10.100.8.$i RUNMAO
done
|
python hsc_deploy.py --target sqltestnet-api.aergo.io:7845 --exported-key 47CqNF6VHLjr77YPtvjtxrfekDdyhJrWvy1C6qN49JfnthExWL9hcfrWZ5J3ErgSAwyPBhoUu --password pCjjd98Ha8LiEHBCEiot --waiting-time 3 $@
|
#!/bin/bash
mvnDebug clean compile quarkus:dev
|
<reponame>nschonni/docs-yaml
import { readdirSync, readFileSync } from 'fs';
import * as Fuse from 'fuse.js';
import { safeLoad } from 'js-yaml';
import { join } from 'path';
import {
CompletionItem,
CompletionItemKind,
CompletionItemProvider,
Position,
SnippetString,
TextDocument
} from 'vscode';
import { reporter } from '../helper/telemetry';
import { SNIPPETS_ROOT_PATH } from './yaml-constant';
/// Internal representation of a yaml code snippet corresponding to CompletionItemProvider
export interface CodeSnippet {
readonly name: string;
readonly label: string;
readonly description: string;
readonly body: string;
}
/**
* A docs-yaml completion provider provides yaml code snippets for docs-yaml, eg: Achievements, Module.
*/
export class DocsYamlCompletionProvider implements CompletionItemProvider {
// Storing all loaded yaml code snippets from snippets folder
private snippets: CodeSnippet[] = [];
// Default constructor
public constructor() {
this.loadCodeSnippets();
}
// Provide code snippets for vscode
public provideCompletionItems(doc: TextDocument, pos: Position) {
reporter.sendTelemetryEvent(`yaml.schemaError`);
const wordPos = doc.getWordRangeAtPosition(pos);
const word = doc.getText(wordPos);
return this.filterCodeSnippets(word).map(
(snippet: CodeSnippet): CompletionItem => {
const item = new CompletionItem(snippet.label, CompletionItemKind.Snippet);
item.insertText = new SnippetString(snippet.body);
item.documentation = snippet.description;
return item;
}
);
}
// Load yaml code snippets from snippets folder
private loadCodeSnippets(): void {
this.snippets = readdirSync(__dirname)
.filter((filename: string): boolean => filename.endsWith('.yaml'))
.map(
(filename: string): CodeSnippet =>
this.readYamlCodeSnippet(join(SNIPPETS_ROOT_PATH, filename))
);
}
// Filter all internal code snippets using the parameter word
private filterCodeSnippets(word: string): CodeSnippet[] {
const searcher = new Fuse(this.snippets, { keys: ['name'] });
return searcher.search(word.toLowerCase());
}
// Parse a yaml snippet file into a CodeSnippet
private readYamlCodeSnippet(filename: string): CodeSnippet {
return safeLoad(readFileSync(filename, 'utf-8')) as CodeSnippet;
}
}
|
class GraphRelationship:
def __init__(self, start_node, end_node, relationship_type):
self.start_node = start_node
self.end_node = end_node
self.relationship_type = relationship_type
def get_start_node(self):
return self.start_node
def set_start_node(self, start_node):
self.start_node = start_node
def get_end_node(self):
return self.end_node
def set_end_node(self, end_node):
self.end_node = end_node
def get_relationship_type(self):
return self.relationship_type
def set_relationship_type(self, relationship_type):
self.relationship_type = relationship_type
def to_dict(self):
return {
'start_node': self.start_node,
'end_node': self.end_node,
'relationship_type': self.relationship_type
}
# Example usage
relationship = GraphRelationship('node1', 'node2', 'CONNECTED_TO')
print(relationship.to_dict()) # Output: {'start_node': 'node1', 'end_node': 'node2', 'relationship_type': 'CONNECTED_TO'} |
<gh_stars>0
/* ssln_gt.h - includes files needed by ssln_gt.c
ENUMS:
TYPEDEFS:
DEFINITIONS:
MACROS:
*/
#ifndef SSLN_GT_H
#include <ssln_cmn.h>
#define FREEALL() {ssln_rid(cmpnum1); ssln_rid(cmpnum2);}
#define SSLN_GT_H NULL
#endif
|
import React from 'react';
import { render, fireEvent } from '@testing-library/react';
import '@testing-library/jest-dom/extend-expect';
import { Wrapper } from '../../../test/test.helpers';
import * as A from '../index';
/**
* Tabs components
*/
const AccordionComponent = (props = {}) => {
const {
onOpen,
onClose,
initialAccordion,
animate,
headProps = {}
} = props;
return (
<Wrapper>
<A.Accordion
data-testid="accordionSwitcher"
initialAccordion={initialAccordion}
onOpen={onOpen}
onClose={onClose}
animate={animate}
>
<A.AccordionHead
id="accordion-1"
className="my-custom-class"
data-testid="accordionHead-1"
{...headProps}
>
{(open, close, isOpen) => (
<h2
data-testid="accordion-1"
onClick={() => !isOpen ? open() : close()}
>
Accordion head 1
</h2>
)}
</A.AccordionHead>
<A.AccordionHead id="accordion-2" {...headProps}>
{(open, close, isOpen) => (
<h2
data-testid="accordion-2"
onClick={() => !isOpen ? open() : close()}
>
Accordion head 2
</h2>
)}
</A.AccordionHead>
<A.AccordionHead data-testid="accordion-3" id="accordion-3" {...headProps}>
Accordion head 3
</A.AccordionHead>
<A.AccordionContent
data-testid="accordionPanel-1"
className="my-custom-class"
whenActive="accordion-1"
>
{(close) => (
<>
<h2>Accordion panel 1</h2>
<button onClick={close} data-testid="close-by-content">close me</button>
</>
)}
</A.AccordionContent>
<A.AccordionContent
data-testid="accordionPanel-2"
whenActive="accordion-2"
>
{(close) => (
<>
<h2>Accordion panel 2</h2>
<button onClick={close} data-testid="close-by-content">close me</button>
</>
)}
</A.AccordionContent>
<A.AccordionContent
data-testid="accordionPanel-3"
whenActive="accordion-3"
>
Accordion panel 3
</A.AccordionContent>
</A.Accordion>
</Wrapper>
);
};
describe('Accordion ui component tests', () => {
describe('Accordion functionality tests', () => {
it('Should call onOpen and onClose callback to change accordion', () => {
const onOpen = jest.fn();
const onClose = jest.fn();
const { getByTestId } = render(AccordionComponent({onOpen, onClose}));
expect(onOpen).toHaveBeenCalledTimes(0);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('accordion-1'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('accordion-2'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('accordion-1'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('accordion-1'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(1);
});
it('Should close accordion by click on close button on its content', () => {
const onOpen = jest.fn();
const onClose = jest.fn();
const { getByTestId } = render(AccordionComponent({onOpen, onClose}));
expect(onOpen).toHaveBeenCalledTimes(0);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('accordion-1'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(getByTestId('close-by-content'));
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(1);
});
it('Should show accordion panel on click Tab', () => {
const { getByTestId, queryByTestId } = render(AccordionComponent());
const accordionSwitcher = getByTestId('accordionSwitcher');
fireEvent.click(getByTestId('accordion-1'));
expect(accordionSwitcher).toContainElement(getByTestId('accordionPanel-1'));
fireEvent.click(getByTestId('accordion-2'));
expect(accordionSwitcher).toContainElement(getByTestId('accordionPanel-2'));
fireEvent.click(getByTestId('accordion-2'));
expect(queryByTestId('accordionPanel-2')).toBeFalsy();
});
it('Should call onOpen and onClose on onClick event of accordion header', () => {
const onClick = jest.fn();
const onOpen = jest.fn();
const onClose = jest.fn();
const { getByTestId } = render(AccordionComponent({
headProps: {
onClick: (open, close, isActive) => {
onClick();
if(!isActive){
open()
}
else {
close()
}
},
onOpen,
onClose,
}
}));
const accordionHeader = getByTestId('accordion-3');
expect(onClick).toHaveBeenCalledTimes(0);
expect(onOpen).toHaveBeenCalledTimes(0);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(accordionHeader);
expect(onClick).toHaveBeenCalledTimes(1);
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(0);
fireEvent.click(accordionHeader);
expect(onClick).toHaveBeenCalledTimes(2);
expect(onOpen).toHaveBeenCalledTimes(1);
expect(onClose).toHaveBeenCalledTimes(1);
});
it('Should call onOpen on open event of accordion header', () => {
const onOpen = jest.fn();
const { getByTestId } = render(AccordionComponent({
headProps: {
onOpen,
}
}));
const accordionHeader = getByTestId('accordion-3');
expect(onOpen).toHaveBeenCalledTimes(0);
fireEvent.click(accordionHeader);
expect(onOpen).toHaveBeenCalledTimes(1);
});
});
describe('Accordion props tests', () => {
it('Should add class to element', () => {
const { getByTestId } = render(AccordionComponent({initialAccordion: 'accordion-1'}));
expect(getByTestId('accordionHead-1')).toHaveClass('my-custom-class');
expect(getByTestId('accordionPanel-1')).toHaveClass('my-custom-class');
});
test('should have default prop functions', () => {
expect(A.Accordion.defaultProps.onClose).toBeDefined();
expect(A.Accordion.defaultProps.onOpen).toBeDefined();
expect(A.Accordion.defaultProps.animate).toBeDefined();
expect(A.Accordion.defaultProps.initialAccordion).toBeDefined();
expect(typeof A.Accordion.defaultProps.onClose).toBe('function');
expect(typeof A.Accordion.defaultProps.onOpen).toBe('function');
expect(typeof A.Accordion.defaultProps.animate).toBe('boolean');
expect(typeof A.Accordion.defaultProps.initialAccordion).toBe('string');
});
it('Should import exported module', () => {
expect(A).toHaveProperty('Accordion');
expect(A).toHaveProperty('AccordionHead');
expect(A).toHaveProperty('AccordionContent');
});
it('Should import exported module', () => {
const { getByTestId } = render(AccordionComponent({animate: false, initialAccordion: 'accordion-1'}));
expect(getByTestId('accordionPanel-1')).toHaveClass('hideAnimation')
});
});
});
|
"""Module to Integration tests."""
|
require 'spec_helper'
require 'yt/models/policy'
describe Yt::Policy do
subject(:policy) { Yt::Policy.new data: data }
describe '#id' do
context 'given fetching a policy returns an id' do
let(:data) { {"id"=>"S123456789"} }
it { expect(policy.id).to eq 'S123456789' }
end
end
describe '#name' do
context 'given fetching a policy returns a name' do
let(:data) { {"name"=>"Block in all countries"} }
it { expect(policy.name).to eq 'Block in all countries' }
end
end
describe '#description' do
context 'given fetching a policy returns a description' do
let(:data) { {"description"=>"Block videos in every country"} }
it { expect(policy.description).to eq 'Block videos in every country' }
end
end
describe '#updated_at' do
context 'given fetching a policy returns a timeUpdated' do
let(:data) { {"timeUpdated"=>"1970-01-16T20:33:03.675Z"} }
it { expect(policy.updated_at.year).to be 1970 }
end
end
describe '#rules' do
context 'given fetching a policy returns rules' do
let(:data) { {"rules"=>[{"action"=>"track"},{"action"=>"track"}]} }
it { expect(policy.rules.size).to be 2 }
end
end
end |
<gh_stars>10-100
var RadioPage = require('../../test/fixtures/radioPageObject.po');
describe('radios', function () {
beforeEach(function () {
browser.get('http://localhost:3000/radio.html');
});
it('should generate functional radio methods', function () {
var radioPage = new RadioPage();
radioPage.colorByIndexShouldBeVisible(0);
radioPage.colorByIndexShouldBeVisible(1);
radioPage.colorByIndexShouldBeVisible(2);
radioPage.colorByIndexShouldBeEnabled(0);
radioPage.colorByIndexShouldBeEnabled(1);
radioPage.colorByIndexShouldBeEnabled(2);
radioPage.colorByIndexShouldNotBeEnabled(3);
radioPage.colorByIndexShouldNotBeSelected(0);
radioPage.colorByIndexShouldNotBeSelected(1);
radioPage.colorByIndexShouldNotBeSelected(2);
radioPage.colorByValueShouldNotBeSelected('red');
radioPage.colorByValueShouldNotBeSelected('yellow');
radioPage.colorByValueShouldNotBeSelected('blue');
radioPage.clickColorByIndex(0);
radioPage.colorByIndexShouldBeSelected(0);
radioPage.colorByValueShouldBeSelected('red');
radioPage.colorByIndexShouldNotBeSelected(1);
radioPage.colorByIndexShouldNotBeSelected(2);
radioPage.colorByValueShouldNotBeSelected('yellow');
radioPage.colorByValueShouldNotBeSelected('blue');
radioPage.clickColorByIndex(1);
radioPage.colorByIndexShouldNotBeSelected(0);
radioPage.colorByIndexShouldBeSelected(1);
radioPage.colorByValueShouldBeSelected('blue');
radioPage.colorByIndexShouldNotBeSelected(2);
radioPage.colorByValueShouldNotBeSelected('red');
radioPage.colorByValueShouldNotBeSelected('yellow');
radioPage.clickColorByValue('yellow');
radioPage.colorByIndexShouldNotBeSelected(0);
radioPage.colorByIndexShouldNotBeSelected(1);
radioPage.colorByIndexShouldBeSelected(2);
radioPage.colorByValueShouldBeSelected('yellow');
radioPage.colorByValueShouldNotBeSelected('red');
radioPage.colorByValueShouldNotBeSelected('blue');
radioPage.clickColorByValue('red');
radioPage.colorByIndexShouldBeSelected(0);
radioPage.colorByValueShouldBeSelected('red');
radioPage.colorByIndexShouldNotBeSelected(1);
radioPage.colorByIndexShouldNotBeSelected(2);
radioPage.colorByValueShouldNotBeSelected('yellow');
radioPage.colorByValueShouldNotBeSelected('blue');
radioPage.widthByIndexShouldBeVisible(0);
radioPage.widthByIndexShouldBeVisible(1);
radioPage.widthByIndexShouldNotBeVisible(2);
radioPage.widthByIndexShouldBeEnabled(0);
radioPage.widthByIndexShouldBeEnabled(1);
radioPage.widthByIndexShouldBeEnabled(2);
radioPage.widthByIndexShouldNotBeSelected(0);
radioPage.widthByIndexShouldNotBeSelected(1);
radioPage.widthByIndexShouldNotBeSelected(2);
radioPage.widthByValueShouldNotBeSelected('10cm');
radioPage.widthByValueShouldNotBeSelected('20cm');
radioPage.widthByValueShouldNotBeSelected('30cm');
radioPage.clickWidthByIndex(0);
radioPage.widthByIndexShouldBeSelected(0);
radioPage.widthByValueShouldBeSelected('10cm');
radioPage.widthByIndexShouldNotBeSelected(1);
radioPage.widthByIndexShouldNotBeSelected(2);
radioPage.widthByValueShouldNotBeSelected('20cm');
radioPage.widthByValueShouldNotBeSelected('30cm');
radioPage.clickWidthByIndex(1);
radioPage.widthByIndexShouldNotBeSelected(0);
radioPage.widthByIndexShouldBeSelected(1);
radioPage.widthByValueShouldBeSelected('20cm');
radioPage.widthByIndexShouldNotBeSelected(2);
radioPage.widthByValueShouldNotBeSelected('10cm');
radioPage.widthByValueShouldNotBeSelected('30cm');
radioPage.clickWidthByValue('10cm');
radioPage.widthByIndexShouldNotBeSelected(1);
radioPage.widthByIndexShouldNotBeSelected(2);
radioPage.widthByIndexShouldBeSelected(0);
radioPage.widthByValueShouldBeSelected('10cm');
radioPage.widthByValueShouldNotBeSelected('20cm');
radioPage.widthByValueShouldNotBeSelected('30cm');
radioPage.clickWidthByValue('20cm');
radioPage.widthByIndexShouldBeSelected(1);
radioPage.widthByValueShouldBeSelected('20cm');
radioPage.widthByIndexShouldNotBeSelected(0);
radioPage.widthByIndexShouldNotBeSelected(2);
radioPage.widthByValueShouldNotBeSelected('10cm');
radioPage.widthByValueShouldNotBeSelected('30cm');
radioPage.selectedItemByIndexShouldBeVisible(0);
radioPage.selectedItemByIndexShouldBeVisible(1);
radioPage.selectedItemByIndexShouldBeVisible(2);
radioPage.selectedItemByIndexShouldBeEnabled(0);
radioPage.selectedItemByIndexShouldBeEnabled(1);
radioPage.selectedItemByIndexShouldBeEnabled(2);
radioPage.selectedItemByIndexShouldNotBeSelected(0);
radioPage.selectedItemByIndexShouldNotBeSelected(1);
radioPage.selectedItemByIndexShouldNotBeSelected(2);
radioPage.selectedItemByValueShouldNotBeSelected('1');
radioPage.selectedItemByValueShouldNotBeSelected('2');
radioPage.selectedItemByValueShouldNotBeSelected('3');
radioPage.clickSelectedItemByIndex(0);
radioPage.selectedItemByIndexShouldBeSelected(0);
radioPage.selectedItemByValueShouldBeSelected('1');
radioPage.selectedItemByIndexShouldNotBeSelected(1);
radioPage.selectedItemByIndexShouldNotBeSelected(2);
radioPage.selectedItemByValueShouldNotBeSelected('2');
radioPage.selectedItemByValueShouldNotBeSelected('3');
radioPage.clickSelectedItemByIndex(1);
radioPage.selectedItemByIndexShouldNotBeSelected(0);
radioPage.selectedItemByIndexShouldBeSelected(1);
radioPage.selectedItemByValueShouldBeSelected('2');
radioPage.selectedItemByIndexShouldNotBeSelected(2);
radioPage.selectedItemByValueShouldNotBeSelected('1');
radioPage.selectedItemByValueShouldNotBeSelected('3');
radioPage.clickSelectedItemByValue('3');
radioPage.selectedItemByIndexShouldNotBeSelected(0);
radioPage.selectedItemByIndexShouldNotBeSelected(1);
radioPage.selectedItemByIndexShouldBeSelected(2);
radioPage.selectedItemByValueShouldBeSelected('3');
radioPage.selectedItemByValueShouldNotBeSelected('1');
radioPage.selectedItemByValueShouldNotBeSelected('2');
radioPage.clickSelectedItemByValue('1');
radioPage.selectedItemByIndexShouldBeSelected(0);
radioPage.selectedItemByValueShouldBeSelected('1');
radioPage.selectedItemByIndexShouldNotBeSelected(1);
radioPage.selectedItemByIndexShouldNotBeSelected(2);
radioPage.selectedItemByValueShouldNotBeSelected('2');
radioPage.selectedItemByValueShouldNotBeSelected('3');
radioPage.nestedRepeaterRadioFieldByIndexShouldBeVisible(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeVisible(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeVisible(2, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeEnabled(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeEnabled(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeEnabled(2, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 2);
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '3');
radioPage.clickNestedRepeaterRadioFieldByIndex(0, 0);
radioPage.clickNestedRepeaterRadioFieldByIndex(1, 1);
radioPage.clickNestedRepeaterRadioFieldByIndex(2, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(2, 2);
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(0, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(1, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(2, '3');
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 1);
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '2');
radioPage.clickNestedRepeaterRadioFieldByIndex(0, 2);
radioPage.clickNestedRepeaterRadioFieldByIndex(1, 0);
radioPage.clickNestedRepeaterRadioFieldByIndex(2, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(0, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(1, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(2, 0);
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(0, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(1, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(2, '1');
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 2);
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '3');
radioPage.clickNestedRepeaterRadioFieldByValue(0, '1');
radioPage.clickNestedRepeaterRadioFieldByValue(1, '2');
radioPage.clickNestedRepeaterRadioFieldByValue(2, '3');
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(2, 2);
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(0, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(1, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(2, '3');
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 1);
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '2');
radioPage.clickNestedRepeaterRadioFieldByValue(0, '3');
radioPage.clickNestedRepeaterRadioFieldByValue(1, '1');
radioPage.clickNestedRepeaterRadioFieldByValue(2, '1');
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(0, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(1, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldBeSelected(2, 0);
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(0, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(1, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldBeSelected(2, '1');
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 0);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(0, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(1, 2);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 1);
radioPage.nestedRepeaterRadioFieldByIndexShouldNotBeSelected(2, 2);
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '1');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(0, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(1, '3');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '2');
radioPage.nestedRepeaterRadioFieldByValueShouldNotBeSelected(2, '3');
radioPage.nested2RepeaterRadioField2ByIndexShouldBeVisible(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeVisible(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeVisible(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeEnabled(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeEnabled(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeEnabled(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(0, 0, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(1, 1, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(2, 2, 'false');
radioPage.clickNested2RepeaterRadioField2ByIndex(0, 0, 0);
radioPage.clickNested2RepeaterRadioField2ByIndex(1, 1, 1);
radioPage.clickNested2RepeaterRadioField2ByIndex(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(0, 0, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(1, 1, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(2, 2, 'true');
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(0, 0, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(1, 1, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(2, 2, 1);
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(0, 0, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(1, 1, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(2, 2, 'false');
radioPage.clickNested2RepeaterRadioField2ByIndex(0, 0, 1);
radioPage.clickNested2RepeaterRadioField2ByIndex(1, 1, 0);
radioPage.clickNested2RepeaterRadioField2ByIndex(2, 2, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(0, 0, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(1, 1, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(2, 2, 1);
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(0, 0, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(1, 1, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(2, 2, 'false');
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(0, 0, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(1, 1, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(2, 2, 'true');
radioPage.clickNested2RepeaterRadioField2ByValue(0, 0, 'true');
radioPage.clickNested2RepeaterRadioField2ByValue(1, 1, 'false');
radioPage.clickNested2RepeaterRadioField2ByValue(2, 2, 'true');
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(0, 0, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(1, 1, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(2, 2, 'true');
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(0, 0, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(1, 1, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(2, 2, 1);
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(0, 0, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(1, 1, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(2, 2, 'false');
radioPage.clickNested2RepeaterRadioField2ByValue(0, 0, 'false');
radioPage.clickNested2RepeaterRadioField2ByValue(1, 1, 'true');
radioPage.clickNested2RepeaterRadioField2ByValue(2, 2, 'false');
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(0, 0, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(1, 1, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldBeSelected(2, 2, 1);
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(0, 0, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(1, 1, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldBeSelected(2, 2, 'false');
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(0, 0, 0);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(1, 1, 1);
radioPage.nested2RepeaterRadioField2ByIndexShouldNotBeSelected(2, 2, 0);
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(0, 0, 'true');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(1, 1, 'false');
radioPage.nested2RepeaterRadioField2ByValueShouldNotBeSelected(2, 2, 'true');
radioPage.selected2ItemByIndexShouldBeVisible(0, 0);
radioPage.selected2ItemByIndexShouldBeVisible(1, 1);
radioPage.selected2ItemByIndexShouldBeVisible(2, 0);
radioPage.selected2ItemByIndexShouldBeEnabled(0, 0);
radioPage.selected2ItemByIndexShouldBeEnabled(1, 1);
radioPage.selected2ItemByIndexShouldBeEnabled(2, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(0, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(1, 1);
radioPage.selected2ItemByIndexShouldNotBeSelected(2, 0);
radioPage.selected2ItemByValueShouldNotBeSelected(0, 'true');
radioPage.selected2ItemByValueShouldNotBeSelected(1, 'false');
radioPage.selected2ItemByValueShouldNotBeSelected(2, 'true');
radioPage.clickSelected2ItemByIndex(0, 0);
radioPage.clickSelected2ItemByIndex(1, 1);
radioPage.clickSelected2ItemByIndex(2, 0);
radioPage.selected2ItemByIndexShouldBeSelected(0, 0);
radioPage.selected2ItemByIndexShouldBeSelected(1, 1);
radioPage.selected2ItemByIndexShouldBeSelected(2, 0);
radioPage.selected2ItemByValueShouldBeSelected(0, 'true');
radioPage.selected2ItemByValueShouldBeSelected(1, 'false');
radioPage.selected2ItemByValueShouldBeSelected(2, 'true');
radioPage.selected2ItemByIndexShouldNotBeSelected(0, 1);
radioPage.selected2ItemByIndexShouldNotBeSelected(1, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(2, 1);
radioPage.selected2ItemByValueShouldNotBeSelected(0, 'false');
radioPage.selected2ItemByValueShouldNotBeSelected(1, 'true');
radioPage.selected2ItemByValueShouldNotBeSelected(2, 'false');
radioPage.clickSelected2ItemByIndex(0, 1);
radioPage.clickSelected2ItemByIndex(1, 0);
radioPage.clickSelected2ItemByIndex(2, 1);
radioPage.selected2ItemByIndexShouldBeSelected(0, 1);
radioPage.selected2ItemByIndexShouldBeSelected(1, 0);
radioPage.selected2ItemByIndexShouldBeSelected(2, 1);
radioPage.selected2ItemByValueShouldBeSelected(0, 'false');
radioPage.selected2ItemByValueShouldBeSelected(1, 'true');
radioPage.selected2ItemByValueShouldBeSelected(2, 'false');
radioPage.selected2ItemByIndexShouldNotBeSelected(0, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(1, 1);
radioPage.selected2ItemByIndexShouldNotBeSelected(2, 0);
radioPage.selected2ItemByValueShouldNotBeSelected(0, 'true');
radioPage.selected2ItemByValueShouldNotBeSelected(1, 'false');
radioPage.selected2ItemByValueShouldNotBeSelected(2, 'true');
radioPage.clickSelected2ItemByValue(0, 'true');
radioPage.clickSelected2ItemByValue(1, 'false');
radioPage.clickSelected2ItemByValue(2, 'true');
radioPage.selected2ItemByIndexShouldBeSelected(0, 0);
radioPage.selected2ItemByIndexShouldBeSelected(1, 1);
radioPage.selected2ItemByIndexShouldBeSelected(2, 0);
radioPage.selected2ItemByValueShouldBeSelected(0, 'true');
radioPage.selected2ItemByValueShouldBeSelected(1, 'false');
radioPage.selected2ItemByValueShouldBeSelected(2, 'true');
radioPage.selected2ItemByIndexShouldNotBeSelected(0, 1);
radioPage.selected2ItemByIndexShouldNotBeSelected(1, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(2, 1);
radioPage.selected2ItemByValueShouldNotBeSelected(0, 'false');
radioPage.selected2ItemByValueShouldNotBeSelected(1, 'true');
radioPage.selected2ItemByValueShouldNotBeSelected(2, 'false');
radioPage.clickSelected2ItemByValue(0, 'false');
radioPage.clickSelected2ItemByValue(1, 'true');
radioPage.clickSelected2ItemByValue(2, 'false');
radioPage.selected2ItemByIndexShouldBeSelected(0, 1);
radioPage.selected2ItemByIndexShouldBeSelected(1, 0);
radioPage.selected2ItemByIndexShouldBeSelected(2, 1);
radioPage.selected2ItemByValueShouldBeSelected(0, 'false');
radioPage.selected2ItemByValueShouldBeSelected(1, 'true');
radioPage.selected2ItemByValueShouldBeSelected(2, 'false');
radioPage.selected2ItemByIndexShouldNotBeSelected(0, 0);
radioPage.selected2ItemByIndexShouldNotBeSelected(1, 1);
radioPage.selected2ItemByIndexShouldNotBeSelected(2, 0);
radioPage.selected2ItemByValueShouldNotBeSelected(0, 'true');
radioPage.selected2ItemByValueShouldNotBeSelected(1, 'false');
radioPage.selected2ItemByValueShouldNotBeSelected(2, 'true');
});
}); |
def build_shell_command(shell_param1, shell_param2, shell_param3, shell_remote):
shell_command_base = ''
if shell_param1:
shell_command_base = shell_param1 + ' '
else:
shell_command_base = '$null '
if shell_param2:
shell_command_base = shell_command_base + shell_param2 + ' '
else:
shell_command_base = shell_command_base + '$null '
if shell_param3:
shell_command_base = shell_command_base + f'"{shell_param3}"'
else:
shell_command_base = shell_command_base + '$null'
if shell_remote and not shell_command_base.strip():
raise ValueError("A remote command was specified but shell_remote was set to False")
return shell_command_base |
package net.natroutter.hubcore.handlers;
import net.natroutter.hubcore.Handler;
import net.natroutter.hubcore.HubCore;
import net.natroutter.hubcore.events.AdminModeToggleEvent;
import net.natroutter.hubcore.features.SelectorItems.SelectorItemHandler;
import net.natroutter.hubcore.files.Translations;
import net.natroutter.natlibs.handlers.Database.YamlDatabase;
import net.natroutter.natlibs.handlers.LangHandler.language.LangManager;
import net.natroutter.natlibs.utilities.StringHandler;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
public class AdminModeHandler {
private YamlDatabase database;
private LangManager lang;
private SelectorItemHandler selectorItemHandler;
public AdminModeHandler(Handler handler) {
this.database = handler.getYamlDatabase();
this.lang = handler.getLang();
this.selectorItemHandler = handler.getSelectorItemHandler();
}
public boolean isAdmin(Player p) {
return database.getBoolean(p, "Adminmode");
}
public void setAdminMode(Player p, boolean state, boolean silent) {
AdminModeToggleEvent event = new AdminModeToggleEvent(p, state);
Bukkit.getPluginManager().callEvent(event);
if (!event.isCancelled()) {
StringHandler message = new StringHandler(lang.get(Translations.AdminModeToggle));
message.setPrefix(lang.get(Translations.Prefix));
if (state) {
database.save(p, "Adminmode", true);
message.replaceAll("{state}", lang.get(Translations.ToggleStates_on));
} else {
database.save(p, "Adminmode", false);
message.replaceAll("{state}", lang.get(Translations.ToggleStates_off));
selectorItemHandler.update(p);
}
if (!silent) {
message.send(p);
}
}
}
public void ToggleAdmin(Player p) {
ToggleAdmin(p, false);
}
public void ToggleAdmin(Player p, boolean silent) {
boolean state = database.getBoolean(p, "Adminmode");
setAdminMode(p, !state, silent);
}
}
|
"""Unconference Project Settings"""
import dj_database_url
from os import environ
from unipath import FSPath as Path
# Helper lambda for gracefully degrading env variables. Taken from http://rdegges.com/devops-django-part-3-the-heroku-way
env = lambda e, d: environ[e] if environ.has_key(e) else d
# EventBrite API Info
EB_APPKEY = env('EB_APPKEY', None)
EB_USERKEY = env('EB_USERKEY', None)
EB_OAUTHKEY = env('EB_OAUTHKEY', None)
EB_EVENTID = env('EB_EVENTID', None)
# Google Aanalytics Information
GA_ID = env('GA_ID', None)
GA_DOMAIN = env('SITE_DOMAIN', None)
BASE = Path(__file__).absolute().ancestor(2)
APP = Path(__file__).absolute().ancestor(1)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('<NAME>', '<EMAIL>'),
)
MANAGERS = ADMINS
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
MEDIA_URL = '/media/'
if environ.has_key('AWS_STORAGE_BUCKET'):
AWS_STORAGE_BUCKET_NAME = environ['AWS_STORAGE_BUCKET']
AWS_ACCESS_KEY_ID = environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = environ['AWS_SECRET_ACCESS_KEY']
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
S3_URL = 'http://%s.s3.amazonaws.com/' % AWS_STORAGE_BUCKET_NAME
STATIC_URL = S3_URL
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
else:
STATIC_URL = '/static/'
MEDIA_ROOT = BASE.child('media')
STATIC_ROOT = BASE.child('static')
STATICFILES_DIRS = (
BASE.child('design'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = env('SECRET_KEY', 'lo7i8ko)i00be5!%45*l2i6_1$5ylbkv-w1nk87#ge9f^)(cv@')
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.i18n",
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.media',
'django.core.context_processors.static',
'thewall.session.processors.general',
'thewall.session.processors.session_times',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'thewall.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'thewall.wsgi.application'
TEMPLATE_DIRS = [APP.child('templates')]
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Django Admin Related Apps
'django.contrib.admin',
'django.contrib.admindocs',
# Heroku Specific Apps Here
'gunicorn',
# 3rd Party Apps We Need
'djangorestframework',
'storages',
'boto',
'ajax_select',
# Our Apps
'thewall'
)
# Third party configuration
# define the lookup channels in use on the site
AJAX_LOOKUP_CHANNELS = {
# pass a dict with the model and the field to search against
'participant' : {'model':'participants.participant', 'search_field':'name'}
}
# magically include jqueryUI/js/css
AJAX_SELECT_BOOTSTRAP = True
AJAX_SELECT_INLINES = 'inline'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Import from localsettings
try:
from thewall.localsettings import *
except ImportError:
pass
|
import { Wrapper } from "../Wrapper";
import { StyledHeader, StyledNav, StyledList } from "./styles";
import { Title, Subtitle, Link } from "../Text";
function Header() {
return (
<StyledHeader>
<Wrapper.Container>
<StyledNav>
<Link href="/">
<Title>Treetech Alarmes</Title>
</Link>
<StyledList>
<li>
<Link href="/alarmes">Alarmes</Link>
</li>
<li>
<Link href="/equipamentos">Equipamentos</Link>
</li>
</StyledList>
</StyledNav>
</Wrapper.Container>
</StyledHeader>
)
}
export default Header; |
import React, {Component} from 'react'
import {Form, Divider} from 'semantic-ui-react'
import {connect} from 'react-redux'
import {newUser, auth} from '../store/user'
class UserForm extends Component {
constructor() {
super()
this.state = {
userName: '',
firstName: '',
lastName: '',
email: '',
password: ''
}
}
handleChange = evt =>
this.setState({
[evt.target.name]: evt.target.value
})
handleSubmit = evt => {
evt.preventDefault()
const {userName, firstName, lastName, email, password} = this.state
const newUserInfo = {userName, firstName, lastName, email, password}
this.props.addUser(newUserInfo)
this.setState({
userName: '',
firstName: '',
lastName: '',
email: '',
password: ''
})
}
render() {
const {userName, firstName, lastName, email, password} = this.state
const error = this.props.error
return (
<div className="signup">
<Form onSubmit={this.handleSubmit} className="signupForm">
{error && error.response && <div> {error.response.data} </div>}
<Divider horizontal>User Name</Divider>
<Form.Input
placeholder="Username"
name="userName"
value={userName}
onChange={this.handleChange}
/>
<Divider horizontal>First Name</Divider>
<Form.Input
placeholder="First Name"
name="firstName"
value={firstName}
onChange={this.handleChange}
/>
<Divider horizontal>Last Name</Divider>
<Form.Input
placeholder="Last Name"
name="lastName"
value={lastName}
onChange={this.handleChange}
/>
<Divider horizontal>Email</Divider>
<Form.Input
placeholder="email"
name="email"
value={email}
onChange={this.handleChange}
/>
<Divider horizontal>Password</Divider>
<Form.Input
placeholder="Password"
name="password"
value={password}
onChange={this.handleChange}
/>
<Divider horizontal />
<Form.Button color="teal" content="Submit" />
</Form>
</div>
)
}
}
const mapStateToProps = state => {
return {
error: state.user.error
}
}
const mapDispatchToProps = dispatch => {
return {
addUser: user => dispatch(newUser(user))
}
}
export default connect(mapStateToProps, mapDispatchToProps)(UserForm)
|
<gh_stars>1-10
package prefix_sum;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 20159번: 동작 그만, 밑장 빼기냐?
*
* @see https://www.acmicpc.net/problem/20159
*
*/
public class Boj20159 {
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
StringTokenizer st = new StringTokenizer(br.readLine());
int[] cards = new int[N];
for(int i = 0; i < N; i++) {
cards[i] = Integer.parseInt(st.nextToken());
}
System.out.println(flip(N, cards));
}
private static int flip(int n, int[] cards) {
int[][] arr = prefixSum(cards);
int max = 0;
int size = n / 2 - 1;
for(int i = 1; i <= n; i++) { // find sum by prefix sum
int div = i / 2;
if(i % 2 == 0) max = Math.max(arr[1][div] + (arr[0][size] - arr[0][div - 1]), max);
else max = Math.max(arr[1][div] + (arr[0][size] - arr[0][div]) + cards[n - 1], max);
}
return max;
}
private static int[][] prefixSum(int[] c) {
int[][] arr = new int[2][c.length / 2 + 1];
for (int i = 0; i < c.length; i++) {
int div = i / 2;
if(i % 2 != 0) arr[0][div + 1] = arr[0][div] + c[i];
else arr[1][div + 1] = arr[1][div] + c[i];
}
return arr;
}
}
|
#!/bin/bash
MAGTYPE=mag magic -dnull -noconsole <<EOF
drc off
load $1.mag
select top cell
expand
lef write zcomm.lef -hide
quit -noprompt
EOF
# cd $PDKPATH/libs.ref/mag/efs8_pads/
# for x in `ls *.mag`; do echo $x; /home/mk/foss/tools/utils/mag2lef-mag.sh ${x%.*} ; done
# mv *.lef ../../lef/efs8_pads/
# cd ../../lef/efs8_pads/
# for x in `ls *.lef` ; do echo $x ; /home/mk/foss/tools/utils/lef2maglef-mag.sh ${x%.*} ; done
# cd ../../maglef/efs8_pads/
for xmag in ./mag/efs8_pads/*.mag;
do filename=$(basename $xmag);
if [ -f ./maglef/efs8_pads/$filename ];
then xmaglef="./maglef/efs8_pads/$filename";
echo "Modifying $xmaglef"; properties="$(sed -ne '/ properties /,$ p' $xmag | grep "GDS_")";
while IFS= read -r prop;
do
sed -i "/properties/a$prop" $xmaglef
from the maglef
<< properties >>
string LEFclass BLOCK
string FIXED_BBOX 0 0 16000 33000
string LEFview TRUE
<< end >>
from the mag
<< properties >>
string FIXED_BBOX 0 0 16000 33000
string GDS_FILE /ef/tech/SW/EFS8A/libs.ref/gds/efs8_pads/efs8_pads.gds
string GDS_END 19606640
string GDS_START 19506986
<< end >>
for xmag in `ls mag/efs8_pads/*.mag`
do
filename=$(basename $xmag)
[[
|
docker build -t gradunwarp . |
<gh_stars>0
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Options specific to resources managed by Keystone (Domain, User, etc)."""
import six
from keystone.common import validation
from keystone.i18n import _
def _validator(value):
return
def boolean_validator(value):
if value not in (True, False):
raise TypeError(_('Expected boolean value, got %r') % type(value))
def ref_mapper_to_dict_options(ref):
"""Convert the values in _resource_option_mapper to options dict.
NOTE: this is to be called from the relevant `to_dict` methods or
similar and must be called from within the active session context.
:param ref: the DB model ref to extract options from
:returns: Dict of options as expected to be returned out of to_dict in
the `options` key.
"""
options = {}
for opt in ref._resource_option_mapper.values():
if opt.option_id in ref.resource_options_registry.option_ids:
r_opt = ref.resource_options_registry.get_option_by_id(
opt.option_id)
if r_opt is not None:
options[r_opt.option_name] = opt.option_value
return options
def resource_options_ref_to_mapper(ref, option_class):
"""Convert the _resource_options property-dict to options attr map.
The model must have the resource option mapper located in the
``_resource_option_mapper`` attribute.
The model must have the resource option registry located in the
``resource_options_registry` attribute.
The option dict with key(opt_id), value(opt_value) will be pulled from
``ref._resource_options``.
NOTE: This function MUST be called within the active writer session
context!
:param ref: The DB model reference that is actually stored to the
backend.
:param option_class: Class that is used to store the resource option
in the DB.
"""
options = getattr(ref, '_resource_options', None)
if options is not None:
# To ensure everything is clean, no lingering refs.
delattr(ref, '_resource_options')
else:
# _resource_options didn't exist. Work from an empty set.
options = {}
# NOTE(notmorgan): explicitly use .keys() here as the attribute mapper
# has some oddities at times. This guarantees we are working with keys.
set_options = set(ref._resource_option_mapper.keys())
# Get any options that are not registered and slate them for removal from
# the DB. This will delete unregistered options.
clear_options = set_options.difference(
ref.resource_options_registry.option_ids)
options.update({x: None for x in clear_options})
# Set the resource options for user in the Attribute Mapping.
for r_opt_id, r_opt_value in options.items():
if r_opt_value is None:
# Delete any option set explicitly to None, ignore unset
# options.
ref._resource_option_mapper.pop(r_opt_id, None)
else:
# Set any options on the user_ref itself.
opt_obj = option_class(
option_id=r_opt_id,
option_value=r_opt_value)
ref._resource_option_mapper[r_opt_id] = opt_obj
class ResourceOptionRegistry(object):
def __init__(self, registry_name):
self._registered_options = {}
self._registry_type = registry_name
@property
def option_names(self):
return set([opt.option_name for opt in self.options])
@property
def options_by_name(self):
return {opt.option_name: opt
for opt in self._registered_options.values()}
@property
def options(self):
return self._registered_options.values()
@property
def option_ids(self):
return set(self._registered_options.keys())
def get_option_by_id(self, opt_id):
return self._registered_options.get(opt_id, None)
def get_option_by_name(self, name):
for option in self._registered_options.values():
if name == option.option_name:
return option
return None
@property
def json_schema(self):
schema = {'type': 'object',
'properties': {},
'additionalProperties': False}
for opt in self.options:
if opt.json_schema is not None:
# NOTE(notmorgan): All options are nullable. Null indicates
# the option should be reset and removed from the DB store.
schema['properties'][opt.option_name] = validation.nullable(
opt.json_schema)
else:
# NOTE(notmorgan): without 'type' being specified, this
# can be of any-type. We are simply specifying no interesting
# values beyond that the property may exist here.
schema['properties'][opt.option_name] = {}
return schema
def register_option(self, option):
if option in self.options:
# Re-registering the exact same option does nothing.
return
if option.option_id in self._registered_options:
raise ValueError(_('Option %(option_id)s already defined in '
'%(registry)s.') %
{'option_id': option.option_id,
'registry': self._registry_type})
if option.option_name in self.option_names:
raise ValueError(_('Option %(option_name)s already defined in '
'%(registry)s') %
{'option_name': option.option_name,
'registry': self._registry_type})
self._registered_options[option.option_id] = option
class ResourceOption(object):
def __init__(self, option_id, option_name, validator=_validator,
json_schema_validation=None):
"""The base object to define the option(s) to be stored in the DB.
:param option_id: The ID of the option. This will be used to lookup
the option value from the DB and should not be
changed once defined as the values will no longer
be correctly mapped to the keys in the user_ref when
retrieving the data from the DB.
:type option_id: str
:param option_name: The name of the option. This value will be used
to map the value from the user request on a
resource update to the correct option id to be
stored in the database. This value should not be
changed once defined as it will change the
resulting keys in the user_ref.
:type option_name: str
:param validator: A callable that raises TypeError if the value to be
persisted is incorrect. A single argument of the
value to be persisted will be passed to it. No return
value is expected.
:type validator: callable
:param json_schema_validation: Dictionary defining the JSON schema
validation for the option itself. This
is used to generate the JSON Schema
validator(s) used at the API layer
:type json_schema_validation: dict
"""
if not isinstance(option_id, six.string_types) and len(option_id) == 4:
raise TypeError(_('`option_id` must be a string, got %r')
% option_id)
elif len(option_id) != 4:
raise ValueError(_('`option_id` must be 4 characters in '
'length. Got %r') % option_id)
if not isinstance(option_name, six.string_types):
raise TypeError(_('`option_name` must be a string. '
'Got %r') % option_name)
self._option_id = option_id
self._option_name = option_name
self.validator = validator
self._json_schema_validation = json_schema_validation
@property
def json_schema(self):
return self._json_schema_validation or None
@property
def option_name(self):
# NOTE(notmorgan) Option IDs should never be set outside of definition
# time.
return self._option_name
@property
def option_id(self):
# NOTE(notmorgan) Option IDs should never be set outside of definition
# time.
return self._option_id
|
#!/bin/sh
# AWS CLI MUST be installed to run this script
# For more info, please visit:
# https://linuxhint.com/install_aws_cli_ubuntu/
# To run this file:
# $ sh gdelt-to-s3.sh <fileformat>
#
# Example: To process files from September
# $ sh gdelt-to-s3.sh 20200901 20200930
#
# export EC2_ACCESS_KEY=<Access Key>
# export EC2_SECRET_KEY=<Secret Key>
THIS_FILE=${1} # Save first parameter
END_FILE=${2} # Save second parameter
MY_BUCKET='gelt-folder/gdelt-data/'
# Function to download a file, transfer to S3, then delete
transfer_data(){
curl -O http://data.gdeltproject.org/events/$THIS_FILE.export.CSV.zip
unzip $THIS_FILE.export.CSV.zip
echo 'Uploading '${THIS_FILE}.export.CSV' to s3...'
# Copy file to S3
aws s3 cp $THIS_FILE.export.CSV s3://$MY_BUCKET
echo 'Uploaded to s3...'
echo 'Deleting .zip file...'
rm -f $THIS_FILE.export.CSV.zip
echo 'Deleted .zip file...'
echo 'Deleting .csv file...'
rm -f $THIS_FILE.export.CSV
echo 'Deleted .csv file...'
echo 'Done with '$THIS_FILE''
}
# Loop over the date range provided
for THIS_FILE in `seq $THIS_FILE $END_FILE`
do
transfer_data $THIS_FILE
done
# Print out number of objects in bucket as well as bucket size
aws s3 ls --summarize --human-readable --recursive s3://gelt-folder | tail -n 2 | awk -F" " '{print $1 $2 $3 $4}'
|
<reponame>infamousSs/zod
package com.infamous.zod.ftp.um.config;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.infamous.zod.ftp.FTPServerConfigProperties;
import com.infamous.zod.ftp.model.FTPUser;
import com.infamous.zod.ftp.um.FTPDataStore;
import com.infamous.zod.ftp.um.FTPUserManager;
import java.util.Collections;
import javax.persistence.EntityManager;
import org.apache.ftpserver.ftplet.User;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatcher;
import org.springframework.boot.ExitCodeEvent;
import org.springframework.boot.ExitCodeGenerator;
import org.springframework.context.ApplicationContext;
class AddAdminUserConfigurationTest {
private AddAdminUserConfiguration m_addAdminUserConfiguration;
private FTPServerConfigProperties m_serverConfigProperties;
private FTPUserManager m_ftpUserManager;
private FTPDataStore m_ds;
private ApplicationContext m_applicationContext;
@BeforeEach
public void setup() {
m_serverConfigProperties = mock(FTPServerConfigProperties.class);
m_ftpUserManager = mock(FTPUserManager.class);
m_ds = mock(FTPDataStore.class);
m_applicationContext = mock(ApplicationContext.class);
mockFtpServerConfig();
mockAppContext();
when(m_ftpUserManager.hashPassword("password")).thenReturn("<PASSWORD>");
m_addAdminUserConfiguration = new AddAdminUserConfiguration(m_serverConfigProperties, m_ftpUserManager, m_ds,
m_applicationContext);
}
public void mockFtpServerConfig() {
when(m_serverConfigProperties.getUsername()).thenReturn("admin");
when(m_serverConfigProperties.getPassword()).thenReturn("password");
when(m_serverConfigProperties.getRootFolder()).thenReturn("src/test/resources/ftp");
when(m_serverConfigProperties.getWorkspace()).thenReturn("wanderer");
}
public void mockAppContext() {
when(m_applicationContext.getBeansOfType(ExitCodeGenerator.class)).thenReturn(Collections.emptyMap());
}
@Test
public void testDoPostConstruct_WhenDBIsNotConnected() {
mockOpenOrCloseDB(false);
assertFalse(m_ds.isOpen());
assertDoesNotThrow(() -> m_addAdminUserConfiguration.doPostConstruct());
verify(m_applicationContext).getBeansOfType(eq(ExitCodeGenerator.class));
verify(m_applicationContext).publishEvent(any(ExitCodeEvent.class));
}
public void mockOpenOrCloseDB(boolean isOpen) {
EntityManager mockEm = mock(EntityManager.class);
when(m_ds.isOpen()).thenReturn(isOpen);
}
@Test
public void testDoPostConstruct_ExceptionOccurWhenSavingUser() {
mockOpenOrCloseDB(true);
assertTrue(m_ds.isOpen());
when(m_serverConfigProperties.getPassword()).thenReturn("12");
assertDoesNotThrow(() -> m_addAdminUserConfiguration.doPostConstruct());
verify(m_applicationContext).getBeansOfType(eq(ExitCodeGenerator.class));
verify(m_applicationContext).publishEvent(any(ExitCodeEvent.class));
}
@Test
public void testDoPostConstruct_Successfully() throws Exception {
mockOpenOrCloseDB(true);
assertTrue(m_ds.isOpen());
assertDoesNotThrow(() -> m_addAdminUserConfiguration.doPostConstruct());
verify(m_ftpUserManager).hashPassword(eq("password"));
verify(m_ftpUserManager).save(argThat(user -> user instanceof FTPUser
&& user.getName().equals("admin")
&& !user.getPassword().equals("password")
&& ((FTPUser) user).isAdmin()
&& user.getHomeDirectory().equals("src/test/resources/ftp/wanderer")));
verify(m_applicationContext, never()).getBeansOfType(eq(ExitCodeGenerator.class));
verify(m_applicationContext, never()).publishEvent(any(ExitCodeEvent.class));
}
} |
<reponame>madebymany/fog<gh_stars>1-10
Shindo.tests('Fog::Compute[:openstack] | tenant requests', ['openstack']) do
@tenant_format = {
'id' => String,
'name' => String,
'enabled' => Fog::Boolean,
'description' => String
}
tests('success') do
tests('#list_tenants').formats({'tenants' => [@tenant_format]}) do
Fog::Compute[:openstack].list_tenants.body
end
tests('#set_tenant("admin")').succeeds do
Fog::Compute[:openstack].set_tenant("admin")
end
end
end
|
if [[ -n $SSH_CONNECTION ]]; then
export PS1='%m:%3~$(git_info_for_prompt)%# '
else
export PS1='%3~$(git_info_for_prompt)%# '
fi
export LSCOLORS="exfxcxdxbxegedabagacad"
export CLICOLOR=true
autoload -U $DOTFILES/functions/*(:t)
setopt NO_HUP
setopt NO_LIST_BEEP
setopt LOCAL_OPTIONS # allow functions to have local options
setopt LOCAL_TRAPS # allow functions to have local traps
setopt PROMPT_SUBST
setopt CORRECT
setopt COMPLETE_IN_WORD
setopt IGNORE_EOF
# don't expand aliases _before_ completion has finished
# like: git comm-[tab]
setopt complete_aliases
zle -N newtab
bindkey '^[^[[D' backward-word
bindkey '^[^[[C' forward-word
bindkey '^[[5D' beginning-of-line
bindkey '^[[5C' end-of-line
bindkey '^[[3~' delete-char
bindkey '^[^N' newtab
bindkey '^?' backward-delete-char
|
package com.segmentify.segmentifysdk;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Button;
import com.google.firebase.iid.FirebaseInstanceId;
import com.segmentify.segmentifyandroidsdk.SegmentifyManager;
import com.segmentify.segmentifyandroidsdk.model.BannerGroupViewModel;
import com.segmentify.segmentifyandroidsdk.model.BannerOperationsModel;
import com.segmentify.segmentifyandroidsdk.model.CheckoutModel;
import com.segmentify.segmentifyandroidsdk.model.InternalBannerModel;
import com.segmentify.segmentifyandroidsdk.model.NotificationModel;
import com.segmentify.segmentifyandroidsdk.model.NotificationType;
import com.segmentify.segmentifyandroidsdk.model.PageModel;
import com.segmentify.segmentifyandroidsdk.model.ProductModel;
import com.segmentify.segmentifyandroidsdk.model.RecommendationModel;
import com.segmentify.segmentifyandroidsdk.utils.SegmentifyCallback;
import java.util.ArrayList;
import java.util.Arrays;
public class EventActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
PageModel model = new PageModel();
model.setCategory("Home Page");
SegmentifyManager.INSTANCE.sendPageView(model, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
PageModel pageModel = new PageModel();
pageModel.setCategory("Product Page");
pageModel.setLang("EN");
pageModel.setRegion("EU");
SegmentifyManager.INSTANCE.sendPageView(pageModel, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
ProductModel productModel = new ProductModel();
ArrayList<String> categories = new ArrayList<String>();
categories.add("Shop");
categories.add("Toys");
productModel.setProductId("30000-1");
productModel.setTitle("Toys Forky");
productModel.setUrl("https://ihalilaltun.me/toys-forky/");
productModel.setImage("https://cdn11.bigcommerce.com/s-5ylnei6or5/images/stencil/500x500/products/1982/5015/2929_Forky_TS4_23_wModel__61743.1559248389.jpg?c=2");
productModel.setCategories(categories);
productModel.setPrice(45.75);
SegmentifyManager.INSTANCE.sendProductView(productModel, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
ArrayList<ProductModel> productList = new ArrayList<>();
ProductModel productPurchaseModel = new ProductModel();
productPurchaseModel.setPrice(45.75);
productPurchaseModel.setQuantity(2.3);
productPurchaseModel.setProductId("30000-1");
productList.add(productPurchaseModel);
CheckoutModel checkoutModel = new CheckoutModel();
checkoutModel.setProductList(productList);
checkoutModel.setTotalPrice(156.0);
SegmentifyManager.INSTANCE.sendViewBasket(checkoutModel, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
SegmentifyManager.INSTANCE.sendPageView("Checkout Success Page", null, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
ArrayList<ProductModel> checkOutProductList = new ArrayList<>();
ProductModel checkOutProductModel = new ProductModel();
checkOutProductModel.setPrice(78.0);
checkOutProductModel.setQuantity(2.75);
checkOutProductModel.setProductId("30000-1");
checkOutProductList.add(productModel);
CheckoutModel checkoutModel1 = new CheckoutModel();
checkoutModel1.setProductList(productList);
checkoutModel1.setTotalPrice(156.0);
checkoutModel1.setOrderNo("order1");
SegmentifyManager.INSTANCE.sendPurchase(checkoutModel1, new SegmentifyCallback<ArrayList<RecommendationModel>>() {
@Override
public void onDataLoaded(ArrayList<RecommendationModel> data) {
if (data != null) {
System.out.println(data);
}
}
});
BannerOperationsModel bannerImpressionOperationModel = new BannerOperationsModel();
bannerImpressionOperationModel.setBannerType("impression");
bannerImpressionOperationModel.setTitle("New Season Men Shoes");
bannerImpressionOperationModel.setGroup("Home Page Slider");
bannerImpressionOperationModel.setOrder(1);
SegmentifyManager.INSTANCE.sendBannerImpressionEvent(bannerImpressionOperationModel);
BannerOperationsModel bannerClickOperationModel = new BannerOperationsModel();
bannerClickOperationModel.setBannerType("click");
bannerClickOperationModel.setTitle("New Season Women Shoes");
bannerClickOperationModel.setGroup("Home Page Slider");
bannerClickOperationModel.setOrder(2);
SegmentifyManager.INSTANCE.sendBannerClickEvent(bannerClickOperationModel);
BannerOperationsModel bannerUpdateOperationModel = new BannerOperationsModel();
bannerUpdateOperationModel.setBannerType("update");
bannerUpdateOperationModel.setTitle("New Season Women Shoes");
bannerUpdateOperationModel.setGroup("Home Page Slider");
bannerUpdateOperationModel.setOrder(3);
SegmentifyManager.INSTANCE.sendBannerUpdateEvent(bannerUpdateOperationModel);
BannerGroupViewModel bannerGroupViewModel = new BannerGroupViewModel();
bannerGroupViewModel.setGroup("Home Page Slider");
ArrayList<InternalBannerModel> internalBannerModels = new ArrayList<>();
InternalBannerModel internalBannerModel = new InternalBannerModel();
internalBannerModel.setTitle("Gorgeous Duo T-Shirt & Trousers");
internalBannerModel.setOrder(1);
internalBannerModel.setImage("https://www.example.com/gorgeous-duo-tshirt-trousers.jpg");
internalBannerModel.setUrls(new ArrayList<>(Arrays.asList("https://www.example.com/gorgeous-duo-tshirt-trousers")));
internalBannerModels.add(internalBannerModel);
internalBannerModel = new InternalBannerModel();
internalBannerModel.setTitle("Ready to Renew");
internalBannerModel.setOrder(2);
internalBannerModel.setImage("https://www.example.com/ready-to-renew.jpg");
internalBannerModel.setUrls(new ArrayList<>(Arrays.asList("https://www.example.com/ready-to-renew")));
internalBannerModels.add(internalBannerModel);
SegmentifyManager.INSTANCE.sendBannerGroupViewEvent(bannerGroupViewModel);
Button subscribeButton = findViewById(R.id.button);
subscribeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
NotificationModel model = new NotificationModel();
model.setDeviceToken(FirebaseInstanceId.getInstance().getToken());
model.setType(NotificationType.PERMISSION_INFO);
SegmentifyManager.INSTANCE.sendNotification(model);
}
});
}
} |
import React from 'react'
import ProjectBlock from '../projectBlock'
import { links } from '../../../data/links'
import ProjectImage from '../../../images/Lush Delay/Lush Delay UI.svg'
import { Typography } from '@material-ui/core'
export default () => {
const containerStyle = {
width: '100%',
height: '100%',
display: 'flex',
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'hsl(4, 35%, 25%)'
}
const imageStyle = {
width: '80%',
maxWidth: '30rem'
}
const projectImage = (
<div style={containerStyle}>
<img src={ProjectImage} style={imageStyle} alt="Lush Delay"/>
</div>
)
return (
<ProjectBlock name="Lush Delay" Image={projectImage} url={links.lushDelay.home} coreTechnology="C++ / JUCE" githubUrl={links.lushDelay.github}>
<Typography component="h4" variant="h5" gutterBottom>
A Delay Effect Plugin with lots of Versatility
</Typography>
<Typography gutterBottom>
Quickly alternate between timed delays, doubling effects, slap delays, modulation, and more.
</Typography>
</ProjectBlock>
)
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.