text stringlengths 1 1.05M |
|---|
<filename>packages/staker/src/i18n.js
export const i18nValidator = {
'en': {
needAddr: 'Please pass the index/address or define SAIHUBOT_VALIDATOR first',
query: 'Query validator\'s balance...',
accountBalance: 'Account Balance',
validatorBalance: '(Eth2) Validator Balance',
xdaiBalance: 'xDai Chain Balance',
},
'zh_TW': {
needAddr: '請傳入索引/地址,或是預先定義 SAIHUBOT_VALIDATOR 參數',
query: '查詢驗證器餘額中...',
accountBalance: '地址餘額',
validatorBalance: '(Eth2) 驗證者餘額',
xdaiBalance: 'xDai 鏈上餘額',
},
props: [],
};
export const i18nAddr = {
'en': {
needAddr: 'Please pass the address or define SAIHUBOT_ADDR first',
pick: 'pick address explorer from the list',
random: 'Random',
},
'zh_TW': {
needAddr: '請傳入地址,或是預先定義 SAIHUBOT_ADDR 參數',
pick: '從列表中選取合適的地址探索工具',
random: '隨機',
},
props: [],
};
export const i18nBalance = {
'en': {
query: 'Query current balance...',
addr: 'Address',
token: 'Symbol',
balance: 'Balance',
source: 'Source',
needAddr: 'Please pass the address or define SAIHUBOT_ADDR first',
},
'zh_TW': {
query: '查詢餘額中...',
addr: '地址',
token: '幣種',
balance: '餘額',
source: '來源',
needAddr: '請傳入地址或是預先定義 SAIHUBOT_ADDR 參數',
},
props: []
};
|
package interpreter.interpreter;
import interpreter.ast.Node;
/**
* Created by Thomas on 2-3-2015.
*/
public interface Interpreter {
public void interpret(final Node node);
}
|
import React, {Component} from "react";
import Fold from "pawjs/src/components/fold";
import ReactLogo from "src/resources/images/reactjs.svg";
import SmallImage from "src/resources/images/mario-large.png?sizes=100w+200w+400w+800w&placeholder";
import Picture from "pawjs/src/components/picture/picture";
// import axios from "axios";
export default class Home extends Component {
// initialize state
state = {
selectedFile: null
}
fileSelectedHandler = event => {
console.log("file chosen:");
// select first file in array
console.log(event.target.files[0]);
this.setState({
selectedFile: event.target.files[0]
});
}
fileSelectedHandler = () => {
console.log("POST");
}
render() {
return (
<div className="mt-4">
<img style={{ maxWidth: "150px" }} className="img-fluid mx-auto d-block" src={ReactLogo} alt="ReactJS"/>
<h1 className="h2 text-center mt-3">React PWA</h1>
<input type = "file" onChange={this.fileSelectedHandler}/>
<button onClick={this.fileSelectedHandler}>Upload</button>
<Fold placeholder={() => <div className="text-center mt-3 mb-3">Loading folded content.. Please wait..</div>}>
<p>Image sample converted to webp, uploaded/coded as jpg</p>
<Picture
pictureClassName="d-inline-block my-4"
image={SmallImage}
alt="Small Image"
imgClassName="mw-100"
/>
</Fold>
</div>
);
}
} |
<reponame>desrtfx/AdventOfCode2017
/**
*
*/
/**
* @author desrtfx
*
*/
package day05; |
#!/usr/bin/env bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
echo "Repo uses 'go mod'."
if ! (set -x; env GO111MODULE=on go mod tidy); then
echo "ERROR: vendor check failed."
exit 1
elif [ "$(git status --porcelain -- go.mod go.sum | wc -l)" -gt 0 ]; then
echo "ERROR: go module files *not* up-to-date, they did get modified by 'GO111MODULE=on go mod tidy':";
git diff -- go.mod go.sum
exit 1
elif [ -d vendor ]; then
if ! (set -x; env GO111MODULE=on go mod vendor); then
echo "ERROR: vendor check failed."
exit 1
elif [ "$(git status --porcelain -- vendor | wc -l)" -gt 0 ]; then
echo "ERROR: vendor directory *not* up-to-date, it did get modified by 'GO111MODULE=on go mod vendor':"
git status -- vendor
git diff -- vendor
exit 1
else
echo "Go dependencies and vendor directory up-to-date."
fi
else
echo "Go dependencies up-to-date."
fi
|
#!/usr/bin/env bash
DELAY=0.02
|
#!/bin/bash -eE
SCRIPT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)
# shellcheck source=env.sh
. "${SCRIPT_DIR}/env.sh"
echo "INFO: setup TON node..."
SETUP_USER="$(id --user)"
SETUP_GROUP="$(id --group)"
echo "INFO: Getting my public IP..."
until [ "$(echo "${MY_ADDR}" | grep "\." -o | wc -l)" -eq 3 ] ; do
set +e
MY_ADDR="$(curl -sS ipv4bot.whatismyipaddress.com)":${ADNL_PORT}
set -e
done
echo "INFO: MY_ADDR = ${MY_ADDR}"
sudo rm -rf "${TON_WORK_DIR}"
sudo mkdir -p "${TON_WORK_DIR}"
sudo chown "${SETUP_USER}:${SETUP_GROUP}" "${TON_WORK_DIR}"
mkdir -p "${TON_WORK_DIR}/etc"
mkdir -p "${TON_WORK_DIR}/db"
cp "${CONFIGS_DIR}/${NETWORK_TYPE}/ton-global.config.json" "${TON_WORK_DIR}/etc/ton-global.config.json"
echo "INFO: generate initial ${TON_WORK_DIR}/db/config.json..."
"${TON_BUILD_DIR}/validator-engine/validator-engine" -C "${TON_WORK_DIR}/etc/ton-global.config.json" --db "${TON_WORK_DIR}/db" --ip "${MY_ADDR}"
sudo mkdir -p "${KEYS_DIR}"
sudo chown "${SETUP_USER}:${SETUP_GROUP}" "${KEYS_DIR}"
chmod 700 "${KEYS_DIR}"
cd "${KEYS_DIR}"
"${UTILS_DIR}/generate-random-id" -m keys -n server > "${KEYS_DIR}/keys_s"
"${UTILS_DIR}/generate-random-id" -m keys -n liteserver > "${KEYS_DIR}/keys_l"
"${UTILS_DIR}/generate-random-id" -m keys -n client > "${KEYS_DIR}/keys_c"
chmod 600 "${KEYS_DIR}"/*
find "${KEYS_DIR}"
mv "${KEYS_DIR}/server" "${TON_WORK_DIR}/db/keyring/$(awk '{print $1}' "${KEYS_DIR}/keys_s")"
mv "${KEYS_DIR}/liteserver" "${TON_WORK_DIR}/db/keyring/$(awk '{print $1}' "${KEYS_DIR}/keys_l")"
awk '{
if (NR == 1) {
server_id = $2
} else if (NR == 2) {
client_id = $2
} else if (NR == 3) {
liteserver_id = $2
} else {
print $0;
if ($1 == "\"control\"") {
print " {";
print " \"id\": \"" server_id "\","
print " \"port\": 3030,"
print " \"allowed\": ["
print " {";
print " \"id\": \"" client_id "\","
print " \"permissions\": 15"
print " }";
print " ]"
print " }";
} else if ($1 == "\"liteservers\"") {
print " {";
print " \"id\": \"" liteserver_id "\","
print " \"port\": 3031"
print " }";
}
}
}' "${KEYS_DIR}/keys_s" "${KEYS_DIR}/keys_c" "${KEYS_DIR}/keys_l" "${TON_WORK_DIR}/db/config.json" > "${TON_WORK_DIR}/db/config.json.tmp"
mv "${TON_WORK_DIR}/db/config.json.tmp" "${TON_WORK_DIR}/db/config.json"
find "${TON_WORK_DIR}"
echo "INFO: setup TON node... DONE"
|
Advanced Bash-Scripting Guide:
Prev Chapter 16. External Filters, Programs and Commands Next
16.4. Text Processing Commands
Commands affecting text and text files
sort
File sort utility, often used as a filter in a pipe. This command sorts a text stream or file forwards or backwards, or according to various keys or character positions. Using the -m option, it merges presorted input files. The info page lists its many capabilities and options. See Example 11-10, Example 11-11, and Example A-8.
tsort
Topological sort, reading in pairs of whitespace-separated strings and sorting according to input patterns. The original purpose of tsort was to sort a list of dependencies for an obsolete version of the ld linker in an "ancient" version of UNIX.
The results of a tsort will usually differ markedly from those of the standard sort command, above.
uniq
This filter removes duplicate lines from a sorted file. It is often seen in a pipe coupled with sort.
cat list-1 list-2 list-3 | sort | uniq > final.list
# Concatenates the list files,
# sorts them,
# removes duplicate lines,
# and finally writes the result to an output file.
The useful -c option prefixes each line of the input file with its number of occurrences.
bash$ cat testfile
This line occurs only once.
This line occurs twice.
This line occurs twice.
This line occurs three times.
This line occurs three times.
This line occurs three times.
bash$ uniq -c testfile
1 This line occurs only once.
2 This line occurs twice.
3 This line occurs three times.
bash$ sort testfile | uniq -c | sort -nr
3 This line occurs three times.
2 This line occurs twice.
1 This line occurs only once.
The sort INPUTFILE | uniq -c | sort -nr command string produces a frequency of occurrence listing on the INPUTFILE file (the -nr options to sort cause a reverse numerical sort). This template finds use in analysis of log files and dictionary lists, and wherever the lexical structure of a document needs to be examined.
Example 16-12. Word Frequency Analysis
#!/bin/bash
# wf.sh: Crude word frequency analysis on a text file.
# This is a more efficient version of the "wf2.sh" script.
# Check for input file on command-line.
ARGS=1
E_BADARGS=85
E_NOFILE=86
if [ $# -ne "$ARGS" ] # Correct number of arguments passed to script?
then
echo "Usage: `basename $0` filename"
exit $E_BADARGS
fi
if [ ! -f "$1" ] # Check if file exists.
then
echo "File \"$1\" does not exist."
exit $E_NOFILE
fi
########################################################
# main ()
sed -e 's/\.//g' -e 's/\,//g' -e 's/ /\
/g' "$1" | tr 'A-Z' 'a-z' | sort | uniq -c | sort -nr
# =========================
# Frequency of occurrence
# Filter out periods and commas, and
#+ change space between words to linefeed,
#+ then shift characters to lowercase, and
#+ finally prefix occurrence count and sort numerically.
# Arun Giridhar suggests modifying the above to:
# . . . | sort | uniq -c | sort +1 [-f] | sort +0 -nr
# This adds a secondary sort key, so instances of
#+ equal occurrence are sorted alphabetically.
# As he explains it:
# "This is effectively a radix sort, first on the
#+ least significant column
#+ (word or string, optionally case-insensitive)
#+ and last on the most significant column (frequency)."
#
# As Frank Wang explains, the above is equivalent to
#+ . . . | sort | uniq -c | sort +0 -nr
#+ and the following also works:
#+ . . . | sort | uniq -c | sort -k1nr -k
########################################################
exit 0
# Exercises:
# ---------
# 1) Add 'sed' commands to filter out other punctuation,
#+ such as semicolons.
# 2) Modify the script to also filter out multiple spaces and
#+ other whitespace.
bash$ cat testfile
This line occurs only once.
This line occurs twice.
This line occurs twice.
This line occurs three times.
This line occurs three times.
This line occurs three times.
bash$ ./wf.sh testfile
6 this
6 occurs
6 line
3 times
3 three
2 twice
1 only
1 once
expand, unexpand
The expand filter converts tabs to spaces. It is often used in a pipe.
The unexpand filter converts spaces to tabs. This reverses the effect of expand.
cut
A tool for extracting fields from files. It is similar to the print $N command set in awk, but more limited. It may be simpler to use cut in a script than awk. Particularly important are the -d (delimiter) and -f (field specifier) options.
Using cut to obtain a listing of the mounted filesystems:
cut -d ' ' -f1,2 /etc/mtab
Using cut to list the OS and kernel version:
uname -a | cut -d" " -f1,3,11,12
Using cut to extract message headers from an e-mail folder:
bash$ grep '^Subject:' read-messages | cut -c10-80
Re: Linux suitable for mission-critical apps?
MAKE MILLIONS WORKING AT HOME!!!
Spam complaint
Re: Spam complaint
Using cut to parse a file:
# List all the users in /etc/passwd.
FILENAME=/etc/passwd
for user in $(cut -d: -f1 $FILENAME)
do
echo $user
done
# Thanks, Oleg Philon for suggesting this.
cut -d ' ' -f2,3 filename is equivalent to awk -F'[ ]' '{ print $2, $3 }' filename
Note
It is even possible to specify a linefeed as a delimiter. The trick is to actually embed a linefeed (RETURN) in the command sequence.
bash$ cut -d'
' -f3,7,19 testfile
This is line 3 of testfile.
This is line 7 of testfile.
This is line 19 of testfile.
Thank you, Jaka Kranjc, for pointing this out.
See also Example 16-48.
paste
Tool for merging together different files into a single, multi-column file. In combination with cut, useful for creating system log files.
bash$ cat items
alphabet blocks
building blocks
cables
bash$ cat prices
$1.00/dozen
$2.50 ea.
$3.75
bash$ paste items prices
alphabet blocks $1.00/dozen
building blocks $2.50 ea.
cables $3.75
join
Consider this a special-purpose cousin of paste. This powerful utility allows merging two files in a meaningful fashion, which essentially creates a simple version of a relational database.
The join command operates on exactly two files, but pastes together only those lines with a common tagged field (usually a numerical label), and writes the result to stdout. The files to be joined should be sorted according to the tagged field for the matchups to work properly.
File: 1.data
100 Shoes
200 Laces
300 Socks
File: 2.data
100 $40.00
200 $1.00
300 $2.00
bash$ join 1.data 2.data
File: 1.data 2.data
100 Shoes $40.00
200 Laces $1.00
300 Socks $2.00
Note
The tagged field appears only once in the output.
head
lists the beginning of a file to stdout. The default is 10 lines, but a different number can be specified. The command has a number of interesting options.
Example 16-13. Which files are scripts?
#!/bin/bash
# script-detector.sh: Detects scripts within a directory.
TESTCHARS=2 # Test first 2 characters.
SHABANG='#!' # Scripts begin with a "sha-bang."
for file in * # Traverse all the files in current directory.
do
if [[ `head -c$TESTCHARS "$file"` = "$SHABANG" ]]
# head -c2 #!
# The '-c' option to "head" outputs a specified
#+ number of characters, rather than lines (the default).
then
echo "File \"$file\" is a script."
else
echo "File \"$file\" is *not* a script."
fi
done
exit 0
# Exercises:
# ---------
# 1) Modify this script to take as an optional argument
#+ the directory to scan for scripts
#+ (rather than just the current working directory).
#
# 2) As it stands, this script gives "false positives" for
#+ Perl, awk, and other scripting language scripts.
# Correct this.
Example 16-14. Generating 10-digit random numbers
#!/bin/bash
# rnd.sh: Outputs a 10-digit random number
# Script by Stephane Chazelas.
head -c4 /dev/urandom | od -N4 -tu4 | sed -ne '1s/.* //p'
# =================================================================== #
# Analysis
# --------
# head:
# -c4 option takes first 4 bytes.
# od:
# -N4 option limits output to 4 bytes.
# -tu4 option selects unsigned decimal format for output.
# sed:
# -n option, in combination with "p" flag to the "s" command,
# outputs only matched lines.
# The author of this script explains the action of 'sed', as follows.
# head -c4 /dev/urandom | od -N4 -tu4 | sed -ne '1s/.* //p'
# ----------------------------------> |
# Assume output up to "sed" --------> |
# is 0000000 1198195154\n
# sed begins reading characters: 0000000 1198195154\n.
# Here it finds a newline character,
#+ so it is ready to process the first line (0000000 1198195154).
# It looks at its <range><action>s. The first and only one is
# range action
# 1 s/.* //p
# The line number is in the range, so it executes the action:
#+ tries to substitute the longest string ending with a space in the line
# ("0000000 ") with nothing (//), and if it succeeds, prints the result
# ("p" is a flag to the "s" command here, this is different
#+ from the "p" command).
# sed is now ready to continue reading its input. (Note that before
#+ continuing, if -n option had not been passed, sed would have printed
#+ the line once again).
# Now, sed reads the remainder of the characters, and finds the
#+ end of the file.
# It is now ready to process its 2nd line (which is also numbered '$' as
#+ it's the last one).
# It sees it is not matched by any <range>, so its job is done.
# In few word this sed commmand means:
# "On the first line only, remove any character up to the right-most space,
#+ then print it."
# A better way to do this would have been:
# sed -e 's/.* //;q'
# Here, two <range><action>s (could have been written
# sed -e 's/.* //' -e q):
# range action
# nothing (matches line) s/.* //
# nothing (matches line) q (quit)
# Here, sed only reads its first line of input.
# It performs both actions, and prints the line (substituted) before
#+ quitting (because of the "q" action) since the "-n" option is not passed.
# =================================================================== #
# An even simpler altenative to the above one-line script would be:
# head -c4 /dev/urandom| od -An -tu4
exit
See also Example 16-39.
tail
lists the (tail) end of a file to stdout. The default is 10 lines, but this can be changed with the -n option. Commonly used to keep track of changes to a system logfile, using the -f option, which outputs lines appended to the file.
Example 16-15. Using tail to monitor the system log
#!/bin/bash
filename=sys.log
cat /dev/null > $filename; echo "Creating / cleaning out file."
# Creates the file if it does not already exist,
#+ and truncates it to zero length if it does.
# : > filename and > filename also work.
tail /var/log/messages > $filename
# /var/log/messages must have world read permission for this to work.
echo "$filename contains tail end of system log."
exit 0
Tip
To list a specific line of a text file, pipe the output of head to tail -n 1. For example head -n 8 database.txt | tail -n 1 lists the 8th line of the file database.txt.
To set a variable to a given block of a text file:
var=$(head -n $m $filename | tail -n $n)
# filename = name of file
# m = from beginning of file, number of lines to end of block
# n = number of lines to set variable to (trim from end of block)
Note
Newer implementations of tail deprecate the older tail -$LINES filename usage. The standard tail -n $LINES filename is correct.
See also Example 16-5, Example 16-39 and Example 32-6.
grep
A multi-purpose file search tool that uses Regular Expressions. It was originally a command/filter in the venerable ed line editor: g/re/p -- global - regular expression - print.
grep pattern [file...]
Search the target file(s) for occurrences of pattern, where pattern may be literal text or a Regular Expression.
bash$ grep '[rst]ystem.$' osinfo.txt
The GPL governs the distribution of the Linux operating system.
If no target file(s) specified, grep works as a filter on stdout, as in a pipe.
bash$ ps ax | grep clock
765 tty1 S 0:00 xclock
901 pts/1 S 0:00 grep clock
The -i option causes a case-insensitive search.
The -w option matches only whole words.
The -l option lists only the files in which matches were found, but not the matching lines.
The -r (recursive) option searches files in the current working directory and all subdirectories below it.
The -n option lists the matching lines, together with line numbers.
bash$ grep -n Linux osinfo.txt
2:This is a file containing information about Linux.
6:The GPL governs the distribution of the Linux operating system.
The -v (or --invert-match) option filters out matches.
grep pattern1 *.txt | grep -v pattern2
# Matches all lines in "*.txt" files containing "pattern1",
# but ***not*** "pattern2".
The -c (--count) option gives a numerical count of matches, rather than actually listing the matches.
grep -c txt *.sgml # (number of occurrences of "txt" in "*.sgml" files)
# grep -cz .
# ^ dot
# means count (-c) zero-separated (-z) items matching "."
# that is, non-empty ones (containing at least 1 character).
#
printf 'a b\nc d\n\n\n\n\n\000\n\000e\000\000\nf' | grep -cz . # 3
printf 'a b\nc d\n\n\n\n\n\000\n\000e\000\000\nf' | grep -cz '$' # 5
printf 'a b\nc d\n\n\n\n\n\000\n\000e\000\000\nf' | grep -cz '^' # 5
#
printf 'a b\nc d\n\n\n\n\n\000\n\000e\000\000\nf' | grep -c '$' # 9
# By default, newline chars (\n) separate items to match.
# Note that the -z option is GNU "grep" specific.
# Thanks, S.C.
The --color (or --colour) option marks the matching string in color (on the console or in an xterm window). Since grep prints out each entire line containing the matching pattern, this lets you see exactly what is being matched. See also the -o option, which shows only the matching portion of the line(s).
Example 16-16. Printing out the From lines in stored e-mail messages
#!/bin/bash
# from.sh
# Emulates the useful 'from' utility in Solaris, BSD, etc.
# Echoes the "From" header line in all messages
#+ in your e-mail directory.
MAILDIR=~/mail/* # No quoting of variable. Why?
# Maybe check if-exists $MAILDIR: if [ -d $MAILDIR ] . . .
GREP_OPTS="-H -A 5 --color" # Show file, plus extra context lines
#+ and display "From" in color.
TARGETSTR="^From" # "From" at beginning of line.
for file in $MAILDIR # No quoting of variable.
do
grep $GREP_OPTS "$TARGETSTR" "$file"
# ^^^^^^^^^^ # Again, do not quote this variable.
echo
done
exit $?
# You might wish to pipe the output of this script to 'more'
#+ or redirect it to a file . . .
When invoked with more than one target file given, grep specifies which file contains matches.
bash$ grep Linux osinfo.txt misc.txt
osinfo.txt:This is a file containing information about Linux.
osinfo.txt:The GPL governs the distribution of the Linux operating system.
misc.txt:The Linux operating system is steadily gaining in popularity.
Tip
To force grep to show the filename when searching only one target file, simply give /dev/null as the second file.
bash$ grep Linux osinfo.txt /dev/null
osinfo.txt:This is a file containing information about Linux.
osinfo.txt:The GPL governs the distribution of the Linux operating system.
If there is a successful match, grep returns an exit status of 0, which makes it useful in a condition test in a script, especially in combination with the -q option to suppress output.
SUCCESS=0 # if grep lookup succeeds
word=Linux
filename=data.file
grep -q "$word" "$filename" # The "-q" option
#+ causes nothing to echo to stdout.
if [ $? -eq $SUCCESS ]
# if grep -q "$word" "$filename" can replace lines 5 - 7.
then
echo "$word found in $filename"
else
echo "$word not found in $filename"
fi
Example 32-6 demonstrates how to use grep to search for a word pattern in a system logfile.
Example 16-17. Emulating grep in a script
#!/bin/bash
# grp.sh: Rudimentary reimplementation of grep.
E_BADARGS=85
if [ -z "$1" ] # Check for argument to script.
then
echo "Usage: `basename $0` pattern"
exit $E_BADARGS
fi
echo
for file in * # Traverse all files in $PWD.
do
output=$(sed -n /"$1"/p $file) # Command substitution.
if [ ! -z "$output" ] # What happens if "$output" is not quoted?
then
echo -n "$file: "
echo "$output"
fi # sed -ne "/$1/s|^|${file}: |p" is equivalent to above.
echo
done
echo
exit 0
# Exercises:
# ---------
# 1) Add newlines to output, if more than one match in any given file.
# 2) Add features.
How can grep search for two (or more) separate patterns? What if you want grep to display all lines in a file or files that contain both "pattern1" and "pattern2"?
One method is to pipe the result of grep pattern1 to grep pattern2.
For example, given the following file:
# Filename: tstfile
This is a sample file.
This is an ordinary text file.
This file does not contain any unusual text.
This file is not unusual.
Here is some text.
Now, let's search this file for lines containing both "file" and "text" . . .
bash$ grep file tstfile
# Filename: tstfile
This is a sample file.
This is an ordinary text file.
This file does not contain any unusual text.
This file is not unusual.
bash$ grep file tstfile | grep text
This is an ordinary text file.
This file does not contain any unusual text.
Now, for an interesting recreational use of grep . . .
Example 16-18. Crossword puzzle solver
#!/bin/bash
# cw-solver.sh
# This is actually a wrapper around a one-liner (line 46).
# Crossword puzzle and anagramming word game solver.
# You know *some* of the letters in the word you're looking for,
#+ so you need a list of all valid words
#+ with the known letters in given positions.
# For example: w...i....n
# 1???5????10
# w in position 1, 3 unknowns, i in the 5th, 4 unknowns, n at the end.
# (See comments at end of script.)
E_NOPATT=71
DICT=/usr/share/dict/word.lst
# ^^^^^^^^ Looks for word list here.
# ASCII word list, one word per line.
# If you happen to need an appropriate list,
#+ download the author's "yawl" word list package.
# http://ibiblio.org/pub/Linux/libs/yawl-0.3.2.tar.gz
# or
# http://bash.deta.in/yawl-0.3.2.tar.gz
if [ -z "$1" ] # If no word pattern specified
then #+ as a command-line argument . . .
echo #+ . . . then . . .
echo "Usage:" #+ Usage message.
echo
echo ""$0" \"pattern,\""
echo "where \"pattern\" is in the form"
echo "xxx..x.x..."
echo
echo "The x's represent known letters,"
echo "and the periods are unknown letters (blanks)."
echo "Letters and periods can be in any position."
echo "For example, try: sh cw-solver.sh w...i....n"
echo
exit $E_NOPATT
fi
echo
# ===============================================
# This is where all the work gets done.
grep ^"$1"$ "$DICT" # Yes, only one line!
# | |
# ^ is start-of-word regex anchor.
# $ is end-of-word regex anchor.
# From _Stupid Grep Tricks_, vol. 1,
#+ a book the ABS Guide author may yet get around
#+ to writing . . . one of these days . . .
# ===============================================
echo
exit $? # Script terminates here.
# If there are too many words generated,
#+ redirect the output to a file.
$ sh cw-solver.sh w...i....n
wellington
workingman
workingmen
egrep -- extended grep -- is the same as grep -E. This uses a somewhat different, extended set of Regular Expressions, which can make the search a bit more flexible. It also allows the boolean | (or) operator.
bash $ egrep 'matches|Matches' file.txt
Line 1 matches.
Line 3 Matches.
Line 4 contains matches, but also Matches
fgrep -- fast grep -- is the same as grep -F. It does a literal string search (no Regular Expressions), which generally speeds things up a bit.
Note
On some Linux distros, egrep and fgrep are symbolic links to, or aliases for grep, but invoked with the -E and -F options, respectively.
Example 16-19. Looking up definitions in Webster's 1913 Dictionary
#!/bin/bash
# dict-lookup.sh
# This script looks up definitions in the 1913 Webster's Dictionary.
# This Public Domain dictionary is available for download
#+ from various sites, including
#+ Project Gutenberg (http://www.gutenberg.org/etext/247).
#
# Convert it from DOS to UNIX format (with only LF at end of line)
#+ before using it with this script.
# Store the file in plain, uncompressed ASCII text.
# Set DEFAULT_DICTFILE variable below to path/filename.
E_BADARGS=85
MAXCONTEXTLINES=50 # Maximum number of lines to show.
DEFAULT_DICTFILE="/usr/share/dict/webster1913-dict.txt"
# Default dictionary file pathname.
# Change this as necessary.
# Note:
# ----
# This particular edition of the 1913 Webster's
#+ begins each entry with an uppercase letter
#+ (lowercase for the remaining characters).
# Only the *very first line* of an entry begins this way,
#+ and that's why the search algorithm below works.
if [[ -z $(echo "$1" | sed -n '/^[A-Z]/p') ]]
# Must at least specify word to look up, and
#+ it must start with an uppercase letter.
then
echo "Usage: `basename $0` Word-to-define [dictionary-file]"
echo
echo "Note: Word to look up must start with capital letter,"
echo "with the rest of the word in lowercase."
echo "--------------------------------------------"
echo "Examples: Abandon, Dictionary, Marking, etc."
exit $E_BADARGS
fi
if [ -z "$2" ] # May specify different dictionary
#+ as an argument to this script.
then
dictfile=$DEFAULT_DICTFILE
else
dictfile="$2"
fi
# ---------------------------------------------------------
Definition=$(fgrep -A $MAXCONTEXTLINES "$1 \\" "$dictfile")
# Definitions in form "Word \..."
#
# And, yes, "fgrep" is fast enough
#+ to search even a very large text file.
# Now, snip out just the definition block.
echo "$Definition" |
sed -n '1,/^[A-Z]/p' |
# Print from first line of output
#+ to the first line of the next entry.
sed '$d' | sed '$d'
# Delete last two lines of output
#+ (blank line and first line of next entry).
# ---------------------------------------------------------
exit $?
# Exercises:
# ---------
# 1) Modify the script to accept any type of alphabetic input
# + (uppercase, lowercase, mixed case), and convert it
# + to an acceptable format for processing.
#
# 2) Convert the script to a GUI application,
# + using something like 'gdialog' or 'zenity' . . .
# The script will then no longer take its argument(s)
# + from the command-line.
#
# 3) Modify the script to parse one of the other available
# + Public Domain Dictionaries, such as the U.S. Census Bureau Gazetteer.
Note
See also Example A-41 for an example of speedy fgrep lookup on a large text file.
agrep (approximate grep) extends the capabilities of grep to approximate matching. The search string may differ by a specified number of characters from the resulting matches. This utility is not part of the core Linux distribution.
Tip
To search compressed files, use zgrep, zegrep, or zfgrep. These also work on non-compressed files, though slower than plain grep, egrep, fgrep. They are handy for searching through a mixed set of files, some compressed, some not.
To search bzipped files, use bzgrep.
look
The command look works like grep, but does a lookup on a "dictionary," a sorted word list. By default, look searches for a match in /usr/dict/words, but a different dictionary file may be specified.
Example 16-20. Checking words in a list for validity
#!/bin/bash
# lookup: Does a dictionary lookup on each word in a data file.
file=words.data # Data file from which to read words to test.
echo
echo "Testing file $file"
echo
while [ "$word" != end ] # Last word in data file.
do # ^^^
read word # From data file, because of redirection at end of loop.
look $word > /dev/null # Don't want to display lines in dictionary file.
# Searches for words in the file /usr/share/dict/words
#+ (usually a link to linux.words).
lookup=$? # Exit status of 'look' command.
if [ "$lookup" -eq 0 ]
then
echo "\"$word\" is valid."
else
echo "\"$word\" is invalid."
fi
done <"$file" # Redirects stdin to $file, so "reads" come from there.
echo
exit 0
# ----------------------------------------------------------------
# Code below line will not execute because of "exit" command above.
# Stephane Chazelas proposes the following, more concise alternative:
while read word && [[ $word != end ]]
do if look "$word" > /dev/null
then echo "\"$word\" is valid."
else echo "\"$word\" is invalid."
fi
done <"$file"
exit 0
sed, awk
Scripting languages especially suited for parsing text files and command output. May be embedded singly or in combination in pipes and shell scripts.
sed
Non-interactive "stream editor", permits using many ex commands in batch mode. It finds many uses in shell scripts.
awk
Programmable file extractor and formatter, good for manipulating and/or extracting fields (columns) in structured text files. Its syntax is similar to C.
wc
wc gives a "word count" on a file or I/O stream:
bash $ wc /usr/share/doc/sed-4.1.2/README
13 70 447 README
[13 lines 70 words 447 characters]
wc -w gives only the word count.
wc -l gives only the line count.
wc -c gives only the byte count.
wc -m gives only the character count.
wc -L gives only the length of the longest line.
Using wc to count how many .txt files are in current working directory:
$ ls *.txt | wc -l
# Will work as long as none of the "*.txt" files
#+ have a linefeed embedded in their name.
# Alternative ways of doing this are:
# find . -maxdepth 1 -name \*.txt -print0 | grep -cz .
# (shopt -s nullglob; set -- *.txt; echo $#)
# Thanks, S.C.
Using wc to total up the size of all the files whose names begin with letters in the range d - h
bash$ wc [d-h]* | grep total | awk '{print $3}'
71832
Using wc to count the instances of the word "Linux" in the main source file for this book.
bash$ grep Linux abs-book.sgml | wc -l
138
See also Example 16-39 and Example 20-8.
Certain commands include some of the functionality of wc as options.
... | grep foo | wc -l
# This frequently used construct can be more concisely rendered.
... | grep -c foo
# Just use the "-c" (or "--count") option of grep.
# Thanks, S.C.
tr
character translation filter.
Caution
Must use quoting and/or brackets, as appropriate. Quotes prevent the shell from reinterpreting the special characters in tr command sequences. Brackets should be quoted to prevent expansion by the shell.
Either tr "A-Z" "*" <filename or tr A-Z \* <filename changes all the uppercase letters in filename to asterisks (writes to stdout). On some systems this may not work, but tr A-Z '[**]' will.
The -d option deletes a range of characters.
echo "abcdef" # abcdef
echo "abcdef" | tr -d b-d # aef
tr -d 0-9 <filename
# Deletes all digits from the file "filename".
The --squeeze-repeats (or -s) option deletes all but the first instance of a string of consecutive characters. This option is useful for removing excess whitespace.
bash$ echo "XXXXX" | tr --squeeze-repeats 'X'
X
The -c "complement" option inverts the character set to match. With this option, tr acts only upon those characters not matching the specified set.
bash$ echo "acfdeb123" | tr -c b-d +
+c+d+b++++
Note that tr recognizes POSIX character classes. [1]
bash$ echo "abcd2ef1" | tr '[:alpha:]' -
----2--1
Example 16-21. toupper: Transforms a file to all uppercase.
#!/bin/bash
# Changes a file to all uppercase.
E_BADARGS=85
if [ -z "$1" ] # Standard check for command-line arg.
then
echo "Usage: `basename $0` filename"
exit $E_BADARGS
fi
tr a-z A-Z <"$1"
# Same effect as above, but using POSIX character set notation:
# tr '[:lower:]' '[:upper:]' <"$1"
# Thanks, S.C.
# Or even . . .
# cat "$1" | tr a-z A-Z
# Or dozens of other ways . . .
exit 0
# Exercise:
# Rewrite this script to give the option of changing a file
#+ to *either* upper or lowercase.
# Hint: Use either the "case" or "select" command.
Example 16-22. lowercase: Changes all filenames in working directory to lowercase.
#!/bin/bash
#
# Changes every filename in working directory to all lowercase.
#
# Inspired by a script of John Dubois,
#+ which was translated into Bash by Chet Ramey,
#+ and considerably simplified by the author of the ABS Guide.
for filename in * # Traverse all files in directory.
do
fname=`basename $filename`
n=`echo $fname | tr A-Z a-z` # Change name to lowercase.
if [ "$fname" != "$n" ] # Rename only files not already lowercase.
then
mv $fname $n
fi
done
exit $?
# Code below this line will not execute because of "exit".
#--------------------------------------------------------#
# To run it, delete script above line.
# The above script will not work on filenames containing blanks or newlines.
# Stephane Chazelas therefore suggests the following alternative:
for filename in * # Not necessary to use basename,
# since "*" won't return any file containing "/".
do n=`echo "$filename/" | tr '[:upper:]' '[:lower:]'`
# POSIX char set notation.
# Slash added so that trailing newlines are not
# removed by command substitution.
# Variable substitution:
n=${n%/} # Removes trailing slash, added above, from filename.
[[ $filename == $n ]] || mv "$filename" "$n"
# Checks if filename already lowercase.
done
exit $?
Example 16-23. du: DOS to UNIX text file conversion.
#!/bin/bash
# Du.sh: DOS to UNIX text file converter.
E_WRONGARGS=85
if [ -z "$1" ]
then
echo "Usage: `basename $0` filename-to-convert"
exit $E_WRONGARGS
fi
NEWFILENAME=$1.unx
CR='\015' # Carriage return.
# 015 is octal ASCII code for CR.
# Lines in a DOS text file end in CR-LF.
# Lines in a UNIX text file end in LF only.
tr -d $CR < $1 > $NEWFILENAME
# Delete CR's and write to new file.
echo "Original DOS text file is \"$1\"."
echo "Converted UNIX text file is \"$NEWFILENAME\"."
exit 0
# Exercise:
# --------
# Change the above script to convert from UNIX to DOS.
Example 16-24. rot13: ultra-weak encryption.
#!/bin/bash
# rot13.sh: Classic rot13 algorithm,
# encryption that might fool a 3-year old
# for about 10 minutes.
# Usage: ./rot13.sh filename
# or ./rot13.sh <filename
# or ./rot13.sh and supply keyboard input (stdin)
cat "$@" | tr 'a-zA-Z' 'n-za-mN-ZA-M' # "a" goes to "n", "b" to "o" ...
# The cat "$@" construct
#+ permits input either from stdin or from files.
exit 0
Example 16-25. Generating "Crypto-Quote" Puzzles
#!/bin/bash
# crypto-quote.sh: Encrypt quotes
# Will encrypt famous quotes in a simple monoalphabetic substitution.
# The result is similar to the "Crypto Quote" puzzles
#+ seen in the Op Ed pages of the Sunday paper.
key=ETAOINSHRDLUBCFGJMQPVWZYXK
# The "key" is nothing more than a scrambled alphabet.
# Changing the "key" changes the encryption.
# The 'cat "$@"' construction gets input either from stdin or from files.
# If using stdin, terminate input with a Control-D.
# Otherwise, specify filename as command-line parameter.
cat "$@" | tr "a-z" "A-Z" | tr "A-Z" "$key"
# | to uppercase | encrypt
# Will work on lowercase, uppercase, or mixed-case quotes.
# Passes non-alphabetic characters through unchanged.
# Try this script with something like:
# "Nothing so needs reforming as other people's habits."
# --Mark Twain
#
# Output is:
# "CFPHRCS QF CIIOQ MINFMBRCS EQ FPHIM GIFGUI'Q HETRPQ."
# --BEML PZERC
# To reverse the encryption:
# cat "$@" | tr "$key" "A-Z"
# This simple-minded cipher can be broken by an average 12-year old
#+ using only pencil and paper.
exit 0
# Exercise:
# --------
# Modify the script so that it will either encrypt or decrypt,
#+ depending on command-line argument(s).
Of course, tr lends itself to code obfuscation.
#!/bin/bash
# jabh.sh
x="wftedskaebjgdBstbdbsmnjgz"
echo $x | tr "a-z" 'oh, turtleneck Phrase Jar!'
# Based on the Wikipedia "Just another Perl hacker" article.
tr variants
The tr utility has two historic variants. The BSD version does not use brackets (tr a-z A-Z), but the SysV one does (tr '[a-z]' '[A-Z]'). The GNU version of tr resembles the BSD one.
fold
A filter that wraps lines of input to a specified width. This is especially useful with the -s option, which breaks lines at word spaces (see Example 16-26 and Example A-1).
fmt
Simple-minded file formatter, used as a filter in a pipe to "wrap" long lines of text output.
Example 16-26. Formatted file listing.
#!/bin/bash
WIDTH=40 # 40 columns wide.
b=`ls /usr/local/bin` # Get a file listing...
echo $b | fmt -w $WIDTH
# Could also have been done by
# echo $b | fold - -s -w $WIDTH
exit 0
See also Example 16-5.
Tip
A powerful alternative to fmt is Kamil Toman's par utility, available from http://www.cs.berkeley.edu/~amc/Par/.
col
This deceptively named filter removes reverse line feeds from an input stream. It also attempts to replace whitespace with equivalent tabs. The chief use of col is in filtering the output from certain text processing utilities, such as groff and tbl.
column
Column formatter. This filter transforms list-type text output into a "pretty-printed" table by inserting tabs at appropriate places.
Example 16-27. Using column to format a directory listing
#!/bin/bash
# colms.sh
# A minor modification of the example file in the "column" man page.
(printf "PERMISSIONS LINKS OWNER GROUP SIZE MONTH DAY HH:MM PROG-NAME\n" \
; ls -l | sed 1d) | column -t
# ^^^^^^ ^^
# The "sed 1d" in the pipe deletes the first line of output,
#+ which would be "total N",
#+ where "N" is the total number of files found by "ls -l".
# The -t option to "column" pretty-prints a table.
exit 0
colrm
Column removal filter. This removes columns (characters) from a file and writes the file, lacking the range of specified columns, back to stdout. colrm 2 4 <filename removes the second through fourth characters from each line of the text file filename.
Caution
If the file contains tabs or nonprintable characters, this may cause unpredictable behavior. In such cases, consider using expand and unexpand in a pipe preceding colrm.
nl
Line numbering filter: nl filename lists filename to stdout, but inserts consecutive numbers at the beginning of each non-blank line. If filename omitted, operates on stdin.
The output of nl is very similar to cat -b, since, by default nl does not list blank lines.
Example 16-28. nl: A self-numbering script.
#!/bin/bash
# line-number.sh
# This script echoes itself twice to stdout with its lines numbered.
echo " line number = $LINENO" # 'nl' sees this as line 4
# (nl does not number blank lines).
# 'cat -n' sees it correctly as line #6.
nl `basename $0`
echo; echo # Now, let's try it with 'cat -n'
cat -n `basename $0`
# The difference is that 'cat -n' numbers the blank lines.
# Note that 'nl -ba' will also do so.
exit 0
# -----------------------------------------------------------------
pr
Print formatting filter. This will paginate files (or stdout) into sections suitable for hard copy printing or viewing on screen. Various options permit row and column manipulation, joining lines, setting margins, numbering lines, adding page headers, and merging files, among other things. The pr command combines much of the functionality of nl, paste, fold, column, and expand.
pr -o 5 --width=65 fileZZZ | more gives a nice paginated listing to screen of fileZZZ with margins set at 5 and 65.
A particularly useful option is -d, forcing double-spacing (same effect as sed -G).
gettext
The GNU gettext package is a set of utilities for localizing and translating the text output of programs into foreign languages. While originally intended for C programs, it now supports quite a number of programming and scripting languages.
The gettext program works on shell scripts. See the info page.
msgfmt
A program for generating binary message catalogs. It is used for localization.
iconv
A utility for converting file(s) to a different encoding (character set). Its chief use is for localization.
# Convert a string from UTF-8 to UTF-16 and print to the BookList
function write_utf8_string {
STRING=$1
BOOKLIST=$2
echo -n "$STRING" | iconv -f UTF8 -t UTF16 | \
cut -b 3- | tr -d \\n >> "$BOOKLIST"
}
# From Peter Knowles' "booklistgen.sh" script
#+ for converting files to Sony Librie/PRS-50X format.
# (http://booklistgensh.peterknowles.com)
recode
Consider this a fancier version of iconv, above. This very versatile utility for converting a file to a different encoding scheme. Note that recode is not part of the standard Linux installation.
TeX, gs
TeX and Postscript are text markup languages used for preparing copy for printing or formatted video display.
TeX is Donald Knuth's elaborate typsetting system. It is often convenient to write a shell script encapsulating all the options and arguments passed to one of these markup languages.
Ghostscript (gs) is a GPL-ed Postscript interpreter.
texexec
Utility for processing TeX and pdf files. Found in /usr/bin on many Linux distros, it is actually a shell wrapper that calls Perl to invoke Tex.
texexec --pdfarrange --result=Concatenated.pdf *pdf
# Concatenates all the pdf files in the current working directory
#+ into the merged file, Concatenated.pdf . . .
# (The --pdfarrange option repaginates a pdf file. See also --pdfcombine.)
# The above command-line could be parameterized and put into a shell script.
enscript
Utility for converting plain text file to PostScript
For example, enscript filename.txt -p filename.ps produces the PostScript output file filename.ps.
groff, tbl, eqn
Yet another text markup and display formatting language is groff. This is the enhanced GNU version of the venerable UNIX roff/troff display and typesetting package. Manpages use groff.
The tbl table processing utility is considered part of groff, as its function is to convert table markup into groff commands.
The eqn equation processing utility is likewise part of groff, and its function is to convert equation markup into groff commands.
Example 16-29. manview: Viewing formatted manpages
#!/bin/bash
# manview.sh: Formats the source of a man page for viewing.
# This script is useful when writing man page source.
# It lets you look at the intermediate results on the fly
#+ while working on it.
E_WRONGARGS=85
if [ -z "$1" ]
then
echo "Usage: `basename $0` filename"
exit $E_WRONGARGS
fi
# ---------------------------
groff -Tascii -man $1 | less
# From the man page for groff.
# ---------------------------
# If the man page includes tables and/or equations,
#+ then the above code will barf.
# The following line can handle such cases.
#
# gtbl < "$1" | geqn -Tlatin1 | groff -Tlatin1 -mtty-char -man
#
# Thanks, S.C.
exit $? # See also the "maned.sh" script.
See also Example A-39.
lex, yacc
The lex lexical analyzer produces programs for pattern matching. This has been replaced by the nonproprietary flex on Linux systems.
The yacc utility creates a parser based on a set of specifications. This has been replaced by the nonproprietary bison on Linux systems.
Notes
[1]
This is only true of the GNU version of tr, not the generic version often found on commercial UNIX systems.
Prev Home Next
Time / Date Commands Up File and Archiving Commands |
<filename>C2CRIBuildDir/projects/C2C-RI/src/jameleon-test-suite-3_3-RC1-C2CRI/jameleon-core/src/java/net/sf/jameleon/util/TemplateProcessor.java<gh_stars>0
/*
Jameleon - An automation testing tool..
Copyright (C) 2003-2007 <NAME> (<EMAIL>)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111AssertLevel.NO_FUNCTION07 USA
*/
package net.sf.jameleon.util;
import org.apache.velocity.Template;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.io.VelocityWriter;
import org.apache.velocity.app.Velocity;
import java.io.*;
import java.util.Iterator;
import java.util.Map;
public class TemplateProcessor {
protected Template template;
protected String templateName;
public TemplateProcessor(String templateName){
Velocity.setProperty("resource.loader", "classpath");
Velocity.setProperty("classpath.resource.loader.class", "net.sf.jameleon.util.VelocityClasspathResourceLoader");
this.templateName = templateName;
initVelocity();
}
private void initVelocity(){
try{
Velocity.init();
}catch(Exception rnfe){
//The tests can still run. The results simply won't be outputted. Why would this happen?
System.err.println("Can not write results to file: ");
rnfe.printStackTrace();
}
}
public void transform(File toFile, Map params){
JameleonUtility.createDirStructure(toFile.getParentFile());
VelocityWriter vw = null;
try{
vw = new VelocityWriter(new FileWriter(toFile));
transformToWriter(vw, params);
}catch(IOException ioe){ioe.printStackTrace();}finally{
if (vw != null){
try {
vw.close();
} catch (IOException e) {e.printStackTrace();}
}
}
}
public String transformToString(Map params){
StringWriter sw = new StringWriter();
transformToWriter(sw, params);
return sw.getBuffer().toString();
}
public void transformToWriter(Writer w, Map params){
try{
VelocityContext context = new VelocityContext();
Iterator it = params.keySet().iterator();
String key;
while (it.hasNext()) {
key = (String)it.next();
context.put(key, params.get(key));
}
template = Velocity.getTemplate(templateName);
template.merge( context, w );
}catch(Exception e){
System.err.println("ERROR: Loading Velocity Template: " + templateName);
e.printStackTrace();
}
}
}
|
#!/bin/sh
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Delete :CustomS3UploadHeaders2'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2 array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:0:extension string "--$$S3DefaultExtension$$--"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:0:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:0:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:0:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:0:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:extension string "html"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:1:value string "text/html; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:3:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:3:name string "X-UA-Compatible"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:1:headers:3:value string "IE=edge,chrome=1"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:extension string "xhtml"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:1:value string "application/xhtml+xml; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:3:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:3:name string "X-UA-Compatible"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:2:headers:3:value string "IE=edge,chrome=1"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:extension string "xml"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:1:value string "application/xml; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:3:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:extension string "atom"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:1:value string "application/atom+xml; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:4:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:extension string "rss"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:1:value string "application/rss+xml; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:5:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:extension string "svg"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:1:value string "image/svg+xml; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:6:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:extension string "css"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:1:value string "text/css; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:7:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:extension string "js"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:1:value string "application/javascript; charset=UTF-8"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:2:enabled bool false'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:2:name string "Content-Encoding"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:8:headers:2:value string "gzip"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:extension string "eot"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:9:headers:1:value string "application/vnd.ms-fontobject"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:extension string "woff"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:10:headers:1:value string "application/font-woff"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:extension string "ttf"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:11:headers:1:value string "application/x-font-ttf"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:extension string "gif"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:12:headers:1:value string "image/gif"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:extension string "jpg"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:13:headers:1:value string "image/jpeg"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:extension string "png"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:14:headers:1:value string "image/png"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:extension string "ico"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers array'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:0:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:0:name string "x-amz-storage-class"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:0:value string "REDUCED_REDUNDANCY"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:1:enabled bool true'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:1:name string "Content-Type"'
/usr/libexec/PlistBuddy "$HOME/Library/Preferences/com.panic.Transmit.plist" -c 'Add :CustomS3UploadHeaders2:15:headers:1:value string "image/vnd.microsoft.icon"'
|
//===============================================================================
// @ IvCatmullRom.cpp
//
// CatmullRom spline
// ------------------------------------------------------------------------------
// Copyright (C) 2008-2015 by <NAME> and <NAME>.
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
//===============================================================================
//-------------------------------------------------------------------------------
//-- Dependencies ---------------------------------------------------------------
//-------------------------------------------------------------------------------
#include "IvCatmullRom.h"
#include <IvAssert.h>
#include <IvMath.h>
#include <IvRendererHelp.h>
#include <IvRenderer.h>
#include <IvResourceManager.h>
#include <IvVertexFormats.h>
#include <IvVertexBuffer.h>
#include <float.h>
//-------------------------------------------------------------------------------
//-- Static Members -------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-- Methods --------------------------------------------------------------------
//-------------------------------------------------------------------------------
//-------------------------------------------------------------------------------
// @ IvCatmullRom::IvCatmullRom()
//-------------------------------------------------------------------------------
// Constructor
//-------------------------------------------------------------------------------
IvCatmullRom::IvCatmullRom() :
mPositions( 0 ),
mTimes( 0 ),
mLengths( 0 ),
mTotalLength( 0.0f ),
mCount( 0 ),
mCurveVertices( 0 ),
mPointVertices( 0 )
{
} // End of IvCatmullRom::IvCatmullRom()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::IvCatmullRom()
//-------------------------------------------------------------------------------
// Destructor
//-------------------------------------------------------------------------------
IvCatmullRom::~IvCatmullRom()
{
Clean();
} // End of IvCatmullRom::~IvCatmullRom()
//-------------------------------------------------------------------------------
// @ operator<<()
//-------------------------------------------------------------------------------
// Text output for debugging
//-------------------------------------------------------------------------------
IvWriter&
operator<<(IvWriter& out, const IvCatmullRom& source)
{
out << source.mCount << eol;
for (unsigned int i = 0; i < source.mCount; ++i )
{
out << source.mTimes[i] << ':' << source.mPositions[i] << eol;
}
return out;
} // End of operator<<()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Initialize()
//-------------------------------------------------------------------------------
// Set up sample points
//-------------------------------------------------------------------------------
bool
IvCatmullRom::Initialize( const IvVector3* positions,
const float* times,
unsigned int count )
{
// make sure not already initialized
if (mCount != 0)
return false;
// make sure data is valid
if ( count < 4 || !positions || !times )
return false;
// set up arrays
mPositions = new IvVector3[count];
mTimes = new float[count];
mCount = count;
// copy data
unsigned int i;
for ( i = 0; i < count; ++i )
{
mPositions[i] = positions[i];
mTimes[i] = times[i];
}
// set up curve segment lengths
mLengths = new float[count-1];
mTotalLength = 0.0f;
for ( i = 0; i < count-1; ++i )
{
mLengths[i] = SegmentArcLength(i, 0.0f, 1.0f);
mTotalLength += mLengths[i];
}
return RebuildVertexBuffers();
} // End of IvCatmullRom::Initialize()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Clean()
//-------------------------------------------------------------------------------
// Clean out data
//-------------------------------------------------------------------------------
void IvCatmullRom::Clean()
{
if (mCurveVertices)
{
IvRenderer::mRenderer->GetResourceManager()->Destroy(mCurveVertices);
mCurveVertices = 0;
}
if (mPointVertices)
{
IvRenderer::mRenderer->GetResourceManager()->Destroy(mPointVertices);
mPointVertices = 0;
}
delete[] mPositions;
delete [] mTimes;
delete [] mLengths;
mTotalLength = 0.0f;
mCount = 0;
} // End of IvCatmullRom::Clean()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Evaluate()
//-------------------------------------------------------------------------------
// Evaluate spline
//-------------------------------------------------------------------------------
IvVector3
IvCatmullRom::Evaluate( float t )
{
// make sure data is valid
ASSERT( mCount >= 2 );
if ( mCount < 2 )
return IvVector3::origin;
// handle boundary conditions
if ( t <= mTimes[0] )
return mPositions[0];
else if ( t >= mTimes[mCount-1] )
return mPositions[mCount-1];
// find segment and parameter
unsigned int i; // segment #
for ( i = 0; i < mCount-1; ++i )
{
if ( t <= mTimes[i+1] )
{
break;
}
}
ASSERT( i >= 0 && i < mCount );
float t0 = mTimes[i];
float t1 = mTimes[i+1];
float u = (t - t0)/(t1 - t0);
// quadratic Catmull-Rom for Q_0
if (i == 0)
{
IvVector3 A = mPositions[0] - 2.0f*mPositions[1] + mPositions[2];
IvVector3 B = 4.0f*mPositions[1] - 3.0f*mPositions[0] - mPositions[2];
return mPositions[0] + (0.5f*u)*(B + u*A);
}
// quadratic Catmull-Rom for Q_n-1
else if (i >= mCount-2)
{
i = mCount-2;
IvVector3 A = mPositions[i-1] - 2.0f*mPositions[i] + mPositions[i+1];
IvVector3 B = mPositions[i+1] - mPositions[i-1];
return mPositions[i] + (0.5f*u)*(B + u*A);
}
// cubic Catmull-Rom for interior segments
else
{
// evaluate
IvVector3 A = 3.0f*mPositions[i]
- mPositions[i-1]
- 3.0f*mPositions[i+1]
+ mPositions[i+2];
IvVector3 B = 2.0f*mPositions[i-1]
- 5.0f*mPositions[i]
+ 4.0f*mPositions[i+1]
- mPositions[i+2];
IvVector3 C = mPositions[i+1] - mPositions[i-1];
return mPositions[i] + (0.5f*u)*(C + u*(B + u*A));
}
} // End of IvCatmullRom::Evaluate()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Velocity()
//-------------------------------------------------------------------------------
// Evaluate derivative at parameter t
//-------------------------------------------------------------------------------
IvVector3
IvCatmullRom::Velocity( float t )
{
// make sure data is valid
ASSERT( mCount >= 2 );
if ( mCount < 2 )
return IvVector3::origin;
// handle boundary conditions
if ( t <= mTimes[0] )
t = 0.0f;
else if ( t > mTimes[mCount-1] )
t = mTimes[mCount-1];
// find segment and parameter
unsigned int i;
for ( i = 0; i < mCount-1; ++i )
{
if ( t <= mTimes[i+1] )
{
break;
}
}
float t0 = mTimes[i];
float t1 = mTimes[i+1];
float u = (t - t0)/(t1 - t0);
// evaluate
// quadratic Catmull-Rom for Q_0
if (i == 0)
{
IvVector3 A = mPositions[0] - 2.0f*mPositions[1] + mPositions[2];
IvVector3 B = 4.0f*mPositions[1] - 3.0f*mPositions[0] - mPositions[2];
return 0.5f*B + u*A;
}
// quadratic Catmull-Rom for Q_n-1
else if (i >= mCount-2)
{
i = mCount-2;
IvVector3 A = mPositions[i-1] - 2.0f*mPositions[i] + mPositions[i+1];
IvVector3 B = mPositions[i+1] - mPositions[i-1];
return 0.5f*B + u*A;
}
// cubic Catmull-Rom for interior segments
else
{
// evaluate
IvVector3 A = 3.0f*mPositions[i]
- mPositions[i-1]
- 3.0f*mPositions[i+1]
+ mPositions[i+2];
IvVector3 B = 2.0f*mPositions[i-1]
- 5.0f*mPositions[i]
+ 4.0f*mPositions[i+1]
- mPositions[i+2];
IvVector3 C = mPositions[i+1] - mPositions[i-1];
return 0.5f*C + u*(B + 1.5f*u*A);
}
} // End of IvCatmullRom::Velocity()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Acceleration()
//-------------------------------------------------------------------------------
// Evaluate second derivative at parameter t
//-------------------------------------------------------------------------------
IvVector3
IvCatmullRom::Acceleration( float t )
{
// make sure data is valid
ASSERT( mCount >= 2 );
if ( mCount < 2 )
return IvVector3::origin;
// handle boundary conditions
if ( t <= mTimes[0] )
t = 0.0f;
else if ( t > mTimes[mCount-1] )
t = mTimes[mCount-1];
// find segment and parameter
unsigned int i;
for ( i = 0; i < mCount-1; ++i )
{
if ( t <= mTimes[i+1] )
{
break;
}
}
float t0 = mTimes[i];
float t1 = mTimes[i+1];
float u = (t - t0)/(t1 - t0);
// evaluate
// quadratic Catmull-Rom for Q_0
if (i == 0)
{
return mPositions[0] - 2.0f*mPositions[1] + mPositions[2];
}
// quadratic Catmull-Rom for Q_n-1
else if (i >= mCount-2)
{
i = mCount-2;
return mPositions[i-1] - 2.0f*mPositions[i] + mPositions[i+1];
}
// cubic Catmull-Rom for interior segments
else
{
// evaluate
IvVector3 A = 3.0f*mPositions[i]
- mPositions[i-1]
- 3.0f*mPositions[i+1]
+ mPositions[i+2];
IvVector3 B = 2.0f*mPositions[i-1]
- 5.0f*mPositions[i]
+ 4.0f*mPositions[i+1]
- mPositions[i+2];
return B + (3.0f*u)*A;
}
} // End of IvCatmullRom::Acceleration()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::FindParameterByDistance()
//-------------------------------------------------------------------------------
// Find parameter s distance in arc length from Q(t1)
// Returns max float if can't find it
//
// This extends the approach in the text and uses a mixture of bisection and
// Newton-Raphson to find the root. The result is more stable than Newton-
// Raphson alone because a) we won't end up with a situation where we divide by
// zero in the Newton-Raphson step and b) the end result converges faster.
//
// See Numerical Recipes or http://www.essentialmath.com/blog for more details.
//-------------------------------------------------------------------------------
float
IvCatmullRom::FindParameterByDistance( float t1, float s )
{
// initialize bisection endpoints
float a = t1;
float b = mTimes[mCount-1];
// ensure that we remain within valid parameter space
if ( s >= ArcLength(t1, b) )
return b;
if ( s <= 0.0f )
return a;
// make first guess
float p = t1 + s*(mTimes[mCount-1]-mTimes[0])/mTotalLength;
// iterate and look for zeros
for ( UInt32 i = 0; i < 32; ++i )
{
// compute function value and test against zero
float func = ArcLength(t1, p) - s;
if ( ::IvAbs(func) < 1.0e-03f )
{
return p;
}
// update bisection endpoints
if ( func < 0.0f )
{
a = p;
}
else
{
b = p;
}
// get speed along curve
float speed = Velocity(p).Length();
// if result will lie outside [a,b]
if ( ((p-a)*speed - func)*((p-b)*speed - func) > -1.0e-3f )
{
// do bisection
p = 0.5f*(a+b);
}
else
{
// otherwise Newton-Raphson
p -= func/speed;
}
}
// done iterating, return failure case
return FLT_MAX;
} // End of IvCatmullRom::FindParameterByDistance()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::ArcLength()
//-------------------------------------------------------------------------------
// Find length of curve between parameters t1 and t2
//-------------------------------------------------------------------------------
float
IvCatmullRom::ArcLength( float t1, float t2 )
{
if ( t2 <= t1 )
return 0.0f;
if ( t1 < mTimes[0] )
t1 = mTimes[0];
if ( t2 > mTimes[mCount-1] )
t2 = mTimes[mCount-1];
// find segment and parameter
unsigned int seg1;
for ( seg1 = 0; seg1 < mCount-1; ++seg1 )
{
if ( t1 <= mTimes[seg1+1] )
{
break;
}
}
float u1 = (t1 - mTimes[seg1])/(mTimes[seg1+1] - mTimes[seg1]);
// find segment and parameter
unsigned int seg2;
for ( seg2 = 0; seg2 < mCount-1; ++seg2 )
{
if ( t2 <= mTimes[seg2+1] )
{
break;
}
}
float u2 = (t2 - mTimes[seg2])/(mTimes[seg2+1] - mTimes[seg2]);
float result;
// both parameters lie in one segment
if ( seg1 == seg2 )
{
result = SegmentArcLength( seg1, u1, u2 );
}
// parameters cross segments
else
{
result = SegmentArcLength( seg1, u1, 1.0f );
for ( UInt32 i = seg1+1; i < seg2; ++i )
result += mLengths[i];
result += SegmentArcLength( seg2, 0.0f, u2 );
}
return result;
} // End of IvCatmullRom::ArcLength()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::SegmentArcLength()
//-------------------------------------------------------------------------------
// Find length of curve segment between parameters u1 and u2
//-------------------------------------------------------------------------------
float
IvCatmullRom::SegmentArcLength( UInt32 i, float u1, float u2 )
{
static const float x[] =
{
0.0000000000f, 0.5384693101f, -0.5384693101f, 0.9061798459f, -0.9061798459f
};
static const float c[] =
{
0.5688888889f, 0.4786286705f, 0.4786286705f, 0.2369268850f, 0.2369268850f
};
ASSERT(i >= 0 && i < mCount-1);
if ( u2 <= u1 )
return 0.0f;
if ( u1 < 0.0f )
u1 = 0.0f;
if ( u2 > 1.0f )
u2 = 1.0f;
// use Gaussian quadrature
float sum = 0.0f;
IvVector3 A, B, C;
if (i == 0)
{
A = mPositions[0] - 2.0f*mPositions[1] + mPositions[2];
B = 4.0f*mPositions[1] - 3.0f*mPositions[0] - mPositions[2];
}
// quadratic Catmull-Rom for Q_n-1
else if (i >= mCount-2)
{
i = mCount-2;
A = mPositions[i-1] - 2.0f*mPositions[i] + mPositions[i+1];
B = mPositions[i+1] - mPositions[i-1];
}
// cubic Catmull-Rom for interior segments
else
{
A = 3.0f*mPositions[i]
- mPositions[i-1]
- 3.0f*mPositions[i+1]
+ mPositions[i+2];
B = 2.0f*mPositions[i-1]
- 5.0f*mPositions[i]
+ 4.0f*mPositions[i+1]
- mPositions[i+2];
C = mPositions[i+1] - mPositions[i-1];
}
for ( UInt32 j = 0; j < 5; ++j )
{
float u = 0.5f*((u2 - u1)*x[j] + u2 + u1);
IvVector3 derivative;
if ( i == 0 || i >= mCount-2)
derivative = 0.5f*B + u*A;
else
derivative = 0.5f*C + u*(B + 1.5f*u*A);
sum += c[j]*derivative.Length();
}
sum *= 0.5f*(u2-u1);
return sum;
} // End of IvCatmullRom::SegmentArcLength()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::RebuildVertexBuffers()
//-------------------------------------------------------------------------------
// Rebuilds vertex buffer rendering data for newly created spline
// Uses direct evaluation
//-------------------------------------------------------------------------------
bool
IvCatmullRom::RebuildVertexBuffers()
{
// build Catmull-Rom spline
// make sure the vertex buffer is appropriate for current curve data
// count verts
UInt32 numverts = 1; // for final vertex
for (float t = mTimes[0]; t <= mTimes[mCount-1]; t += 0.1f)
{
++numverts;
}
if ( mCurveVertices && mCurveVertices->GetVertexCount() != numverts )
{
IvRenderer::mRenderer->GetResourceManager()->Destroy(mCurveVertices);
mCurveVertices = 0;
}
if ( mCurveVertices == 0 )
{
mCurveVertices = IvRenderer::mRenderer->GetResourceManager()->CreateVertexBuffer(kCPFormat, numverts,
nullptr, kDefaultUsage);
}
IvCPVertex* curveDataPtr = (IvCPVertex*) mCurveVertices->BeginLoadData();
UInt32 currentVertex = 0;
for (float t = mTimes[0]; t <= mTimes[mCount-1]; t += 0.1f)
{
curveDataPtr[currentVertex].position = Evaluate( t );
curveDataPtr[currentVertex++].color = kOrange;
}
curveDataPtr[currentVertex].position = Evaluate( mTimes[mCount-1] );
curveDataPtr[currentVertex++].color = kOrange;
if (!mCurveVertices->EndLoadData())
return false;
// set up sample points
// make sure the vertex buffer is appropriate for current curve data
if ( mPointVertices && mPointVertices->GetVertexCount() != mCount )
{
IvRenderer::mRenderer->GetResourceManager()->Destroy(mPointVertices);
mPointVertices = 0;
}
if ( mPointVertices == 0 )
{
mPointVertices = IvRenderer::mRenderer->GetResourceManager()->CreateVertexBuffer(kCPFormat, mCount,
nullptr, kDefaultUsage);
}
IvCPVertex *pointDataPtr = (IvCPVertex*) mPointVertices->BeginLoadData();
for ( UInt32 i = 0; i < mCount; ++i )
{
pointDataPtr[i].position = mPositions[i];
pointDataPtr[i].color = kRed;
}
if (!mPointVertices->EndLoadData())
return false;
return true;
} // End of IvCatmullRom::RebuildVertexBuffers()
//-------------------------------------------------------------------------------
// @ IvCatmullRom::Render()
//-------------------------------------------------------------------------------
// Render spline
//-------------------------------------------------------------------------------
void
IvCatmullRom::Render()
{
IvRenderer::mRenderer->Draw( kLineStripPrim, mCurveVertices );
IvRenderer::mRenderer->Draw( kPointListPrim, mPointVertices );
} // End of IvCatmullRom::Render()
|
package com.didi.drouter.page;
import static androidx.fragment.app.FragmentPagerAdapter.BEHAVIOR_SET_USER_VISIBLE_HINT;
import static com.didi.drouter.page.IPageRouter.IPageObserver.CHANGED_BY_SHOW;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.fragment.app.Fragment;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentPagerAdapter;
import androidx.fragment.app.FragmentTransaction;
import androidx.viewpager.widget.PagerAdapter;
import androidx.viewpager.widget.ViewPager;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Created by gaowei on 2020/4/1
*/
public class RouterPageViewPager extends RouterPageAbs {
private final ViewPager viewPager;
private final FragmentManager fragmentManager;
private final ViewPagerAdapter adapter;
private final List<String> curUriList = new ArrayList<>();
private final List<IPageBean> curInfoList = new ArrayList<>();
private boolean changeByShow = false;
@Deprecated
public RouterPageViewPager(FragmentManager manager, ViewPager container) {
this(manager, container, BEHAVIOR_SET_USER_VISIBLE_HINT);
}
public RouterPageViewPager(FragmentManager manager, ViewPager container, int behavior) {
fragmentManager = manager;
adapter = new ViewPagerAdapter(manager, behavior);
viewPager = container;
viewPager.setAdapter(adapter);
viewPager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
}
@Override
public void onPageSelected(int position) {
// position will change but except empty to non-empty at first time.
notifyPageChangedFromIndex(position, false,
changeByShow ? CHANGED_BY_SHOW : IPageObserver.CHANGED_BY_SCROLL);
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
}
public void update(IPageBean... beanList) {
update(Arrays.asList(beanList));
}
@SuppressWarnings("unchecked")
// bean uri may be empty
public void update(@NonNull List<IPageBean> uriList) {
List<String> lastUriList = (List<String>) ((ArrayList<String>) curUriList).clone();
curUriList.clear();
curInfoList.clear();
for (int i = 0; i < uriList.size(); i++) {
curUriList.add(uriList.get(i).getPageUri());
curInfoList.add(uriList.get(i));
}
clearFmCache(lastUriList);
int lastPosition = viewPager.getCurrentItem();
changeByShow = true;
adapter.notifyDataSetChanged();
changeByShow = false;
int curPosition = viewPager.getCurrentItem();
// notifyDataSetChanged is a sync method for getCurrentItem, instantiateItem, onPageSelected,
// If showing position not changed, no trigger onPageSelected, so active it by self.
if (lastPosition == curPosition) {
// although position is not changed, but fragment(uri) maybe has changed, so check it.
notifyPageChangedFromIndex(viewPager.getCurrentItem(), true, CHANGED_BY_SHOW);
}
}
private void notifyPageChangedFromIndex(int position, boolean filter, int changeType) {
IPageBean toBean = curInfoList.get(position);
notifyPageChanged(toBean, changeType, filter);
}
@Override
// It works as long as uri match success.
public void showPage(@NonNull IPageBean bean) {
int position;
if ((position = curUriList.indexOf(bean.getPageUri())) != -1) {
// if same with last, no trigger onPageSelected.
changeByShow = true;
// setCurrentItem is a sync method for onPageSelected.
viewPager.setCurrentItem(position, false);
changeByShow = false;
}
}
private class ViewPagerAdapter extends FragmentPagerAdapter {
ViewPagerAdapter(FragmentManager fm, int behavior) {
super(fm, behavior);
}
@NonNull
@Override
public Fragment getItem(int position) {
Fragment fragment = createFragment(curUriList.get(position));
Bundle info = null;
if (curInfoList.get(position) != null && curInfoList.get(position).getPageInfo() != null) {
info = curInfoList.get(position).getPageInfo();
}
putArgsForFragment(fragment, info);
return fragment;
}
@Override
public int getCount() {
return curUriList.size();
}
@Override
public int getItemPosition(@NonNull Object object) {
return PagerAdapter.POSITION_NONE;
}
}
private void clearFmCache(List<String> last) {
// Update difference uri (page), for the same position. We should remove it first.
FragmentTransaction transaction = fragmentManager.beginTransaction();
for (int position = 0; position < last.size(); position++) {
if (position < curUriList.size() && curUriList.get(position).equals(last.get(position))) {
continue;
}
String name = makeFragmentName(viewPager.getId(), position);
Fragment fragment = fragmentManager.findFragmentByTag(name);
if (fragment != null) {
transaction.remove(fragment);
transaction.commitNowAllowingStateLoss();
}
}
}
private static String makeFragmentName(int viewId, long position) {
return "android:switcher:" + viewId + ":" + position;
}
}
|
/*******************************************************************************
* Copyright 2016 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package uk.org.kano.insuranceportal.service;
import java.util.Set;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import uk.org.kano.insuranceportal.model.domain.AbstractPolicyRole;
import uk.org.kano.insuranceportal.model.domain.Policy;
import uk.org.kano.insuranceportal.repository.PolicyRepository;
/**
* A default {@link PersonService}.
*
* @author timh
*
*/
@Transactional
@Service
public class PolicyServiceImpl implements PolicyService {
@Autowired
private PolicyRepository policyRepository;
@Override
public Policy findPolicy(String id) {
Assert.notNull(id, "Invalid policy ID");
return policyRepository.findByPolicyId(id);
}
@Override
public Set<Policy> findPolicyByPerson(String id) {
Assert.notNull(id, "Invalid person ID");
return policyRepository.findByPersonId(id);
}
@Override
public Set<Policy> findPolicyByPersonRole(String id, Class<? extends AbstractPolicyRole> role) {
Assert.notNull(id, "Invalid person ID");
Assert.notNull(role, "Invalid role");
return policyRepository.findByPersonIdAndRole(id, role);
}
@Override
public Set<Policy> findPolicyByPersons(Set<String> ids) {
Assert.notNull(ids, "Invalid person ID");
Assert.notEmpty(ids, "Invalid person ID");
return policyRepository.findByPersonIds(ids);
}
@Override
public Set<Policy> findPolicyByPersonsRole(Set<String> ids, Class<? extends AbstractPolicyRole> role) {
Assert.notNull(ids, "Invalid person ID");
Assert.notNull(ids, "Invalid person ID");
Assert.notNull(role, "Invalid role");
return policyRepository.findByPersonIdsAndRole(ids, role);
}
}
|
def generate_combinations(letters):
combs = []
# Base case
if len(letters) == 1:
for letter in letters:
combs.append(letter)
else:
for letter in letters:
subLetters = [x for x in letters if x != letter]
subCombs = generate_combinations(subLetters)
for comb in subCombs:
combs.append(letter+comb)
return combs |
require 'fog/core/model'
module Fog
module OracleCloud
class Database
class Backup < Fog::Model
identity :db_tag, :aliases=>'dbTag'
attribute :backup_complete_date, :aliases=>'backupCompleteDate'
attribute :status
attribute :database_id
def completed?
status == "COMPLETED"
end
private
# Had to override reload as we need to pass the database_id
def reload
requires :identity, :database_id
data = begin
collection.get(database_id, identity)
rescue Excon::Errors::SocketError
nil
end
return unless data
new_attributes = data.attributes
merge_attributes(new_attributes)
self
end
end
end
end
end
|
<filename>projects/ngx-magic-table/src/public_api.ts
import { from } from 'rxjs';
/*
* Public API Surface of ngx-magic-table
*/
export * from './lib/ngx-magic-table/ngx-magic-table.component';
export * from './lib/ngx-column-template/ngx-column-template.component';
export * from './lib/ngx-magic-table.module';
export * from './lib/models/cells-info';
export * from './lib/models/interface';
|
#!/bin/sh
echo "import Plutus.Contracts.Game"
echo "import Ledger.Ada"
echo "args = LockParams { secretWord = \"eagle\", amount = lovelaceValueOf 90 }"
echo "import Data.Aeson"
echo "import Data.ByteString.Lazy.Char8 as BSL"
echo "BSL.putStrLn $ encode args"
|
GEVENT = False
import multiprocessing
from optparse import OptionParser
arg_parser = OptionParser()
arg_parser.add_option("-a", "--agency", dest="agency", action="store", type="string", default=None, help="Specify an agency to which to limit the dump.")
arg_parser.add_option("-d", "--docket", dest="docket", action="store", type="string", default=None, help="Specify a docket to which to limit the dump.")
arg_parser.add_option("-A", "--all", dest="process_all", action="store_true", default=False, help="Replace existing MR data with new data.")
arg_parser.add_option("-p", "--pretend", dest="pretend", action="store_true", default=False, help="Don't actually write anything to the database.")
arg_parser.add_option("-n", "--no-children", dest="no_children", action="store_true", default=False, help="Don't spawn child processes.")
arg_parser.add_option("-r", "--resume", dest="resume_db", action="store", type="string", default=None, help="Resume a previous aggregation task (HERE BE DRAGONS)")
arg_parser.add_option("-m", "--multi", dest="multi", action="store", type="int", default=multiprocessing.cpu_count(), help="Set number of worker processes. Defaults to number of cores if not specified.")
def run_client():
from mincemeat import Client, DEFAULT_PORT
import time
import socket
import os
print "[%s] Starting worker" % os.getpid()
while True:
time.sleep(2)
try:
client = Client()
client.password = ""
client.conn('localhost', DEFAULT_PORT)
return
except socket.error as v:
if v.errno == 54:
print "[%s] Caught a socket error 54; resetting worker" % os.getpid()
else:
print "[%s] Caught a socket error %s; giving up" % (os.getpid(), v.errno)
return
def run(options, args):
print 'Running aggregates...'
num_workers = options.multi
pool = multiprocessing.Pool(num_workers)
if not options.no_children:
for i in range(num_workers):
pool.apply_async(run_client)
from aggregates import run_aggregates
run_aggregates(options)
pool.terminate()
print "Aggregates complete."
return {'success': True}
|
/*
* Copyright (C) 2012-2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package info.archinnov.achilles.internal.persistence.operations;
import com.datastax.driver.core.RegularStatement;
import com.datastax.driver.core.SimpleStatement;
import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Update;
import info.archinnov.achilles.internal.validation.Validator;
public class NativeQueryValidator {
public void validateUpsertOrDelete(RegularStatement regularStatement) {
Validator.validateTrue(
isUpsertStatement(regularStatement) || isDeleteStatement(regularStatement),
"The statement '%s' should be an INSERT, an UPDATE or a DELETE",
regularStatement.getQueryString()
);
}
public void validateSelect(RegularStatement regularStatement) {
Validator.validateTrue(isSelectStatement(regularStatement),"The statement '%s' should be a SELECT",regularStatement.getQueryString());
}
public boolean isSelectStatement(RegularStatement regularStatement) {
if (isSimpleStatement(regularStatement)) {
return regularStatement.getQueryString().toLowerCase().trim().startsWith("select ");
} else {
return regularStatement instanceof Select || regularStatement instanceof Select.Where;
}
}
public boolean isInsertStatement(RegularStatement regularStatement) {
if (isSimpleStatement(regularStatement)) {
return regularStatement.getQueryString().toLowerCase().trim().startsWith("insert into ");
} else {
return regularStatement instanceof Insert || regularStatement instanceof Insert.Options;
}
}
public boolean isUpdateStatement(RegularStatement regularStatement) {
if (isSimpleStatement(regularStatement)) {
return regularStatement.getQueryString().toLowerCase().trim().startsWith("update ");
} else {
return regularStatement instanceof Update.Where || regularStatement instanceof Update.Options;
}
}
public boolean isDeleteStatement(RegularStatement regularStatement) {
if (isSimpleStatement(regularStatement)) {
return regularStatement.getQueryString().toLowerCase().trim().startsWith("delete ");
} else {
return regularStatement instanceof Delete.Where || regularStatement instanceof Delete.Options;
}
}
public boolean isUpsertStatement(RegularStatement regularStatement) {
return isInsertStatement(regularStatement) || isUpdateStatement(regularStatement);
}
public boolean isSimpleStatement(RegularStatement regularStatement) {
return regularStatement instanceof SimpleStatement;
}
public static enum Singleton {
INSTANCE;
private final NativeQueryValidator instance = new NativeQueryValidator();
public NativeQueryValidator get() {
return instance;
}
}
}
|
<filename>src/test/sdg/equations/NonLinear3.java
package sdg.equations;
import sdg.Equation;
/**
* Represents the SDE:
* dX(t) = [sin(X(t))^3 * cos(X(t)) - sin(X(t))^2] dt - sin(X(t))^2 dW(t)
*/
public class NonLinear3 extends Equation {
public NonLinear3(double x0, double t0, double tN) {
super(x0, t0, tN);
}
@Override
public double drift(double X, double t)
{
return Math.pow(Math.sin(X), 3) * Math.cos(X) - Math.pow(Math.sin(X), 2);
}
@Override
public double driftPrime(double X, double t)
{
return Math.sin(X) * (Math.sin(3 * X) - 2 * Math.cos(X));
}
@Override
public double diffusion(double X, double t)
{
return -Math.pow(Math.sin(X), 2);
}
@Override
public double diffusionPrime(double X, double t)
{
return -2 * Math.sin(X) * Math.cos(X);
}
@Override
public double diffusionDoublePrime(double X, double t)
{
return -2 * Math.cos(2 * X);
}
@Override
public double exactSolution(double t, double totalNoise) {
return Math.PI/2 - Math.atan(1.0 / Math.tan(this.x0) + t + totalNoise);
}
}
|
/*
* @Descripttion: 运维管理
* @version: 1.0.0
* @Author: LSC
* @Date: 2021-06-17 10:31:01
* @LastEditors: LSC
* @LastEditTime: 2021-06-17 10:33:18
*/
import view from '@/components/view.vue'
export default {
title: '运维管理',
path: 'mochaITOM',
name: 'mochaITOM',
component: view,
children: [
{
title: '故障报警',
path: '/mochaITOM/faultAlarm',
name: 'faultAlarm',
component: (resolve) =>
require(['@/views/smartElectricity/mochaITOM/faultAlarm.vue'], resolve)
},
{
title: '隐患管理',
path: '/mochaITOM/hiddenDange',
name: 'hiddenDange',
component: (resolve) =>
require(['@/views/smartElectricity/mochaITOM/hiddenDange.vue'], resolve)
},
{
title: '智能巡检',
path: '/mochaITOM/IntelligentIns',
name: 'IntelligentIns',
component: (resolve) =>
require(['@/views/smartElectricity/mochaITOM/IntelligentIns.vue'], resolve)
}
]
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.configuration.internal;
import java.util.function.BiFunction;
import java.util.function.Supplier;
import org.apache.ignite.configuration.ConfigurationChanger;
import org.apache.ignite.configuration.ConfigurationTree;
import org.apache.ignite.configuration.RootKey;
import org.apache.ignite.configuration.storage.ConfigurationStorage;
import org.apache.ignite.configuration.tree.InnerNode;
/** */
public class RootKeyImpl<T extends ConfigurationTree<VIEW, ?>, VIEW> extends RootKey<T, VIEW> {
/** */
private final String rootName;
/** */
private final Class<? extends ConfigurationStorage> storageType;
/** */
private final Supplier<InnerNode> rootSupplier;
/** */
private final BiFunction<RootKey<T, VIEW>, ConfigurationChanger, T> publicRootCreator;
/** */
public RootKeyImpl(
String rootName,
Class<? extends ConfigurationStorage> storageType,
Supplier<InnerNode> rootSupplier,
BiFunction<RootKey<T, VIEW>, ConfigurationChanger, T> publicRootCreator
) {
this.rootName = rootName;
this.storageType = storageType;
this.rootSupplier = rootSupplier;
this.publicRootCreator = publicRootCreator;
}
/** {@inheritDoc} */
@Override public String key() {
return rootName;
}
/** {@inheritDoc} */
@Override protected Class<? extends ConfigurationStorage> getStorageType() {
return storageType;
}
/** {@inheritDoc} */
@Override protected InnerNode createRootNode() {
return rootSupplier.get();
}
/** {@inheritDoc} */
@Override protected T createPublicRoot(ConfigurationChanger changer) {
return publicRootCreator.apply(this, changer);
}
}
|
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import UserPlusSvg from '@rsuite/icon-font/lib/legacy/UserPlus';
const UserPlus = createSvgIcon({
as: UserPlusSvg,
ariaLabel: 'user plus',
category: 'legacy',
displayName: 'UserPlus'
});
export default UserPlus;
|
#!/bin/bash
cd `dirname $0`
sfctl application upload --path SmokeTestCSharp --show-progress
sfctl application provision --application-type-build-path SmokeTestCSharp
sfctl application upgrade --app-id fabric:/SmokeTestCSharp --app-version $1 --parameters "{}" --mode Monitored
cd - |
<filename>index.js
var crypto = require('crypto')
var randomBytesClassic = crypto.randomBytes
module.exports = function (seed) {
randomBytes.seed = seed = seed || randomBytesClassic(32)
randomBytes.currentSeed = seed
return randomBytes
function randomBytes (n) {
var result = Buffer.allocUnsafe(n)
var used = 0
while (used < result.length) {
randomBytes.currentSeed = seed = next(seed)
seed.copy(result, used)
used += seed.length
}
return result
}
}
function next (seed) {
return crypto.createHash('sha256').update(seed).digest()
}
|
def selection_sort(arr):
n = len(arr)
for i in range(n):
min_idx = i
for j in range(i+1, n):
if arr[min_idx] > arr[j]:
min_idx = j
arr[i], arr[min_idx] = arr[min_idx], arr[i]
arr = [4, 5, 13, 2, 9, 7]
selection_sort(arr)
print(arr)
# Output: [2, 4, 5, 7, 9, 13] |
<reponame>kathesama/gaugemeter-app
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { IonicModule } from '@ionic/angular';
import { DispositivoPageRoutingModule } from './dispositivo-routing.module';
import { DispositivoPage } from './dispositivo.page';
import { UnidadPipe } from '../pipes/unidad.pipe';
import { ActuadorPipe } from '../pipes/actuador.pipe';
@NgModule({
imports: [
CommonModule,
FormsModule,
IonicModule,
DispositivoPageRoutingModule
],
declarations: [DispositivoPage, UnidadPipe, ActuadorPipe]
})
export class DispositivoPageModule {}
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.vtn.table.impl;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.slf4j.LoggerFactory.getLogger;
import org.onlab.osgi.DefaultServiceDirectory;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.packet.EthType.EtherType;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onosproject.core.ApplicationId;
import org.onosproject.net.DeviceId;
import org.onosproject.net.PortNumber;
import org.onosproject.net.behaviour.ExtensionTreatmentResolver;
import org.onosproject.net.driver.DriverHandler;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.instructions.ExtensionTreatment;
import org.onosproject.net.flow.instructions.ExtensionTreatmentType;
import org.onosproject.net.flowobjective.DefaultForwardingObjective;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.flowobjective.ForwardingObjective.Flag;
import org.onosproject.net.flowobjective.Objective;
import org.onosproject.net.flowobjective.Objective.Operation;
import org.onosproject.vtn.table.ArpService;
import org.onosproject.vtnrsc.SegmentationId;
import org.slf4j.Logger;
/**
* ArpTable class providing the rules in ARP table.
*/
public class ArpServiceImpl implements ArpService {
private final Logger log = getLogger(getClass());
private static final int ARP_PRIORITY = 0xffff;
private static final short ARP_RESPONSE = 0x2;
private static final EtherType ARP_TYPE = EtherType.ARP;
private final FlowObjectiveService flowObjectiveService;
private final ApplicationId appId;
/**
* Construct a ArpServiceImpl object.
*
* @param appId the application id of vtn
*/
public ArpServiceImpl(ApplicationId appId) {
this.appId = checkNotNull(appId, "ApplicationId can not be null");
ServiceDirectory serviceDirectory = new DefaultServiceDirectory();
this.flowObjectiveService = serviceDirectory.get(FlowObjectiveService.class);
}
@Override
public void programArpRules(DriverHandler hander, DeviceId deviceId,
IpAddress dstIP, SegmentationId srcVni,
MacAddress dstMac, Operation type) {
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchEthType(ARP_TYPE.ethType().toShort())
.matchArpTpa(Ip4Address.valueOf(dstIP.toString()))
.matchTunnelId(Long.parseLong(srcVni.segmentationId())).build();
ExtensionTreatmentResolver resolver = hander
.behaviour(ExtensionTreatmentResolver.class);
ExtensionTreatment ethSrcToDst = resolver
.getExtensionInstruction(ExtensionTreatmentType.ExtensionTreatmentTypes
.NICIRA_MOV_ETH_SRC_TO_DST.type());
ExtensionTreatment arpShaToTha = resolver
.getExtensionInstruction(ExtensionTreatmentType.ExtensionTreatmentTypes
.NICIRA_MOV_ARP_SHA_TO_THA.type());
ExtensionTreatment arpSpaToTpa = resolver
.getExtensionInstruction(ExtensionTreatmentType.ExtensionTreatmentTypes
.NICIRA_MOV_ARP_SPA_TO_TPA.type());
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.extension(ethSrcToDst, deviceId)
.setEthSrc(dstMac).setArpOp(ARP_RESPONSE)
.extension(arpShaToTha, deviceId)
.extension(arpSpaToTpa, deviceId)
.setArpSha(dstMac).setArpSpa(dstIP)
.setOutput(PortNumber.IN_PORT).build();
ForwardingObjective.Builder objective = DefaultForwardingObjective
.builder().withTreatment(treatment).withSelector(selector)
.fromApp(appId).withFlag(Flag.SPECIFIC)
.withPriority(ARP_PRIORITY);
if (type.equals(Objective.Operation.ADD)) {
log.debug("PrivateArpRules-->ADD");
flowObjectiveService.forward(deviceId, objective.add());
} else {
log.debug("PrivateArpRules-->REMOVE");
flowObjectiveService.forward(deviceId, objective.remove());
}
}
}
|
// Use parseInt() in the convertToInteger function so it converts the input string str into an integer, and returns it.
// convertToInteger should use the parseInt() function
// convertToInteger("56") should return a number
// convertToInteger("56") should return 56
// convertToInteger("77") should return 77
// convertToInteger("JamesBond") should return NaN
function convertToInteger(str) {
return parseInt(str);
}
convertToInteger("56"); |
import { Component, getComponentByDomNode, startComponents } from '@gondel/core';
import { createElement } from 'react';
import { createGondelReactLoader, GondelReactComponent } from './GondelReactComponent';
const createComponentStateHTML = (initialState: object = {}) => {
const tree = document.createElement('div');
const initialScript = document.createElement('script');
initialScript.type = 'text/json';
initialScript.innerHTML = JSON.stringify(initialState);
tree.appendChild(initialScript);
return tree;
};
describe('@gondel/plugin-react', () => {
describe('GondelReactComponent', () => {
describe('constructor', () => {
it('should be constructable', () => {
const root = document.createElement('div');
expect(() => new GondelReactComponent<{}>(root, 'example')).not.toThrow();
});
it('should expose gondel lifecycle methods', () => {
const root = document.createElement('div');
const c = new GondelReactComponent(root, 'example');
expect((c as any).start).toBeDefined();
expect((c as any).stop).toBeDefined();
});
it('should read child script config', () => {
const root = createComponentStateHTML({ theme: 'light', loaded: true });
class TestComponent extends GondelReactComponent<{
theme: 'light' | 'dark';
loaded: boolean;
}> {
_componentName = 'TestComponent';
darken = () => this.setState({ theme: 'dark' });
lighten = () => this.setState({ theme: 'light' });
unload = () => this.setState({ loaded: false });
}
const component = new TestComponent(root, 'test');
expect(component.state.theme).toEqual('light');
expect(component.state.loaded).toBe(true);
});
});
describe('state', () => {
it('should expose an initial default state', () => {
const root = document.createElement('div');
class TestComponent extends GondelReactComponent {
_componentName = 'TestComponent';
}
const component = new TestComponent(root, 'stub');
// check if state & setter are defined
expect(component.state).toBeDefined();
expect(component.setState).toBeDefined();
// check initial state
expect(component.state).toEqual({});
// check state updates
component.setState({ a: 10, b: 'test' });
expect(component.state).toEqual({
a: 10,
b: 'test',
});
});
it('should read the custom initial state correctly', () => {
class TestComponent extends GondelReactComponent<{ username: string }> {
_componentName = 'TestComponent';
state = { username: 'max' };
}
const root = document.createElement('div');
const c = new TestComponent(root, 'test');
expect(c.state).toEqual({ username: 'max' });
});
it('should re-render on state changes', () => {
let renderCount = 0;
class TestComponent extends GondelReactComponent<{ username: string }> {
_componentName = 'TestComponent';
state = { username: 'max' };
render() {
renderCount++;
return `${this.state.username}`;
}
}
const root = document.createElement('div');
const c = new TestComponent(root, 'test');
expect(renderCount).toEqual(0);
expect(c.state).toEqual({ username: 'max' });
// initial paint
const output = c.render();
expect(renderCount).toEqual(1);
expect(output).toEqual('max');
// update paint
c.setState({ username: 'lisa' });
const outputAfterUpdate = c.render();
expect(renderCount).toEqual(2);
expect(c.state.username).toEqual('lisa');
expect(outputAfterUpdate).toEqual('lisa');
});
});
describe('render', () => {
it('should be able to render React apps syncronously', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"><script type="text/json">{ "title": "Hello World"}</script></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title);
}
const loader = () => TestTitleSpan;
const GondelReactLoaderComponent = createGondelReactLoader(loader);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
start() {}
componentDidMount() {
resolve();
}
}
startComponents(root);
});
expect(root.innerHTML).toBe('<div data-g-name="Greeter"><span>Hello World</span></div>');
});
it('should be able to render React apps asyncronously', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"><script type="text/json">{ "title": "Hello World"}</script></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title);
}
const loader = async () => TestTitleSpan;
const GondelReactLoaderComponent = createGondelReactLoader(loader);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
componentDidMount() {
resolve();
}
}
startComponents(root);
});
expect(root.innerHTML).toBe('<div data-g-name="Greeter"><span>Hello World</span></div>');
});
it('should be able to render React apps named asyncronously', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"><script type="text/json">{ "title": "Hello World"}</script></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title);
}
const loader = async () => ({ TestTitleSpan } as const);
const GondelReactLoaderComponent = createGondelReactLoader(loader, 'TestTitleSpan');
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
componentDidMount() {
resolve();
}
}
startComponents(root);
});
expect(root.innerHTML).toBe('<div data-g-name="Greeter"><span>Hello World</span></div>');
});
it('should execute hooks during rendering', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"></div>`;
const hooks: string[] = [];
await new Promise<void>((resolve) => {
const loader = () => () => createElement('span', null, 'Hello World');
const GondelReactLoaderComponent = createGondelReactLoader(loader);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
componentDidMount() {
hooks.push('componentDidMount');
setTimeout(() => {
this.stop();
});
}
componentWillUnmount() {
hooks.push('componentWillUnmount');
resolve();
}
}
startComponents(root);
});
expect(hooks).toEqual(['componentDidMount', 'componentWillUnmount']);
});
it('should render after the start method is done', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title);
}
const loader = () => TestTitleSpan;
const GondelReactLoaderComponent = createGondelReactLoader(loader);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
start() {
return new Promise<void>((resolve) => {
setTimeout(() => {
this.setState({
title: 'Lazy loaded data',
});
resolve();
});
});
}
componentDidMount() {
resolve();
}
}
startComponents(root);
});
expect(root.innerHTML).toBe('<div data-g-name="Greeter"><span>Lazy loaded data</span></div>');
});
it('should render after the start method is done using a callback', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title);
}
const loader = () => TestTitleSpan;
const GondelReactLoaderComponent = createGondelReactLoader(loader);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
start(resolve: () => void) {
setTimeout(() => {
this.setState({
title: 'Lazy loaded data',
});
resolve();
});
}
componentDidMount() {
resolve();
}
}
startComponents(root);
});
expect(root.innerHTML).toBe('<div data-g-name="Greeter"><span>Lazy loaded data</span></div>');
});
it('should rerender once setState is called', async () => {
const root = document.createElement('div');
root.innerHTML = `<div data-g-name="Greeter"></div>`;
await new Promise<void>((resolve) => {
function TestTitleSpan(props: { title: string }) {
return createElement('span', null, props.title || '');
}
const GondelReactLoaderComponent = createGondelReactLoader(() => TestTitleSpan);
@Component('Greeter')
class Greeter extends GondelReactLoaderComponent {
componentDidMount() {
resolve();
}
componentDidUpdate() {}
shouldComponentUpdate() {
return true;
}
}
startComponents(root);
});
const component = getComponentByDomNode<any>(root.firstElementChild!);
component.setState({ title: 'update using getComponentByDomNode' });
expect(root.innerHTML).toBe(
'<div data-g-name="Greeter"><span>update using getComponentByDomNode</span></div>'
);
});
it('base class should throw an error if no app provided', () => {
const root = document.createElement('div');
const c = new GondelReactComponent<{}>(root, 'test');
expect(() => c.render()).toThrow('undefined could not render please add a render method');
});
it('custom class should throw an error if no render method is provided', () => {
class TestComponent extends GondelReactComponent<{}> {
_componentName = 'TestComponent';
}
const root = document.createElement('div');
const c = new TestComponent(root, 'test');
expect(() => c.render()).toThrow('TestComponent could not render please add a render method');
});
it('custom class should throw an error if invalid App provided', () => {
class TestComponent extends GondelReactComponent<{}> {
App = null as any; // fake invalid react component
_componentName = 'TestComponent';
}
const root = document.createElement('div');
const c = new TestComponent(root, 'test');
expect(() => {
c.render();
}).toThrow('TestComponent could not render ensure that you are returning a React component');
});
it('custom class should not throw an error if invalid App and custom render provided', () => {
class TestComponent extends GondelReactComponent<{}> {
App = null as any; // fake invalid react component
_componentName = 'TestComponent';
render() {
return `Value: ${this.App}`;
}
}
const root = document.createElement('div');
const c = new TestComponent(root, 'test');
expect(() => c.render()).not.toThrow();
expect(c.render()).toEqual('Value: null');
});
});
});
});
|
package org.mammon.messaging;
public interface PublicationConverter<T extends Identifiable> {
Identifiable convert(T object, String newIdentity);
}
|
#!/bin/bash
# This script deploys a dev version of the extension to the dev environment for development / testing purposes.
# It also shares / installs it into the given Azure organization.
# Arguments:
# $1 - Extension version
# $2 - Organization that will be shared the extension
# Handle arguments
INPUT_PARAM_AZ_EXT_NEW_VERSION="$1"
INPUT_PARAM_AZ_ORG="$2"
# Check if the Azure CLI is already installed. If not, install it.
az -v >/dev/null 2>&1
if [[ ! $? -eq 0 ]]; then
echo "Intalling AZ Cli..."
platform=$OSTYPE
echo "Platform: ${platform}"
if [[ $platform == "linux-gnu" ]]; then
curl -sL https://aka.ms/InstallAzureCLIDeb | sudo bash
az extension add --name azure-devops
elif [[ $platform == "darwin"* ]]; then
brew -v >/dev/null 2>&1
if [[ $? -eq 0 ]]; then
brew update && brew install azure-cli
else
echo "You need to have brew or install AZ Cli manually"
exit 1
fi
else
echo "Platform ${platform} not supported"
exit 1
fi
fi
# echo "See if the extension is installed..."
az devops extension show \
--publisher-name $DEV_AZ_PUBLISHER \
--extension-name $DEV_AZ_EXTENSION_ID \
--organization "https://dev.azure.com/${INPUT_PARAM_AZ_ORG}/"
if [[ $? -eq 0 ]]; then
echo "Extension is installed in org ${INPUT_PARAM_AZ_ORG}... uninstall it"
# Unistall the extinsion if it has been already installed in this organization.
# This may not be required it works much more consistently with it.
echo "Uninstall extension..."
az devops extension uninstall \
--publisher-name $DEV_AZ_PUBLISHER \
--extension-name $DEV_AZ_EXTENSION_ID \
--organization "https://dev.azure.com/${INPUT_PARAM_AZ_ORG}/" --yes
echo "Extension uninstalled"
else
echo "Extension not already installed."
fi
echo "About to deploy to dev environment using:"
echo "INPUT_PARAM_AZ_EXT_NEW_VERSION: ${INPUT_PARAM_AZ_EXT_NEW_VERSION}"
echo "DEV_AZ_PUBLISHER: ${DEV_AZ_PUBLISHER}"
echo "DEV_AZ_EXTENSION_ID: ${DEV_AZ_EXTENSION_ID}"
echo "DEV_AZ_TASK_NAME: ${DEV_AZ_TASK_NAME}"
echo "DEV_AZ_TASK_FRIENDLY_NAME: ${DEV_AZ_TASK_FRIENDLY_NAME}"
echo "INPUT_PARAM_AZ_ORG: ${INPUT_PARAM_AZ_ORG}"
echo "DEV_AZ_TASK_ID: ${DEV_AZ_TASK_ID}"
# Updating version in task.json file
node "${PWD}/scripts/update-task-json-dev.js" ${INPUT_PARAM_AZ_EXT_NEW_VERSION}
# Override version
OVERRIDE_JSON="{ \"name\": \"${DEV_AZ_EXTENSION_NAME}\", \"version\": \"${INPUT_PARAM_AZ_EXT_NEW_VERSION}\" }"
# See if the snykTask/dist and snykTask/node_modules folders are present
echo "Checking for snykTask/dist folder..."
ls -la snykTask/dist
echo "checking snykTask/node_modules..."
ls -la snykTask/node_modules
# Publishing and sharing extension
echo "Publishing and sharing extension..."
echo "OVERRIDE_JSON: ${OVERRIDE_JSON}"
echo "About to call \`tfx extension publish...\`"
tfx extension publish --manifest-globs vss-extension-dev.json \
--version $INPUT_PARAM_AZ_EXT_NEW_VERSION \
--share-with $INPUT_PARAM_AZ_ORG \
--extension-id $DEV_AZ_EXTENSION_ID \
--publisher $DEV_AZ_PUBLISHER \
--override $OVERRIDE_JSON \
--token $DEV_AZURE_DEVOPS_EXT_PAT
publish_exit_code=$?
if [[ publish_exit_code -eq 0 ]]; then
echo "Extension published and shared with Azure org"
else
echo "Extension failed to pubish with exit code ${publish_exit_code}"
exit ${publish_exit_code}
fi
# re-install all dependencies. The dev deps were pruned off in ci-build.sh
echo "reinstalling all dependencies..."
npm install
echo "Run script to install the dev extension into the dev org in Azure DevOps..."
node ./ops/deploy/dist/install-extension-to-dev-org.js "${INPUT_PARAM_AZ_EXT_NEW_VERSION}"
if [[ ! $? -eq 0 ]]; then
echo "failed installing dev extension at correct version"
exit 1
fi
# Updating version in task.json file
node "${PWD}/scripts/recovery-task-json-dev.js"
echo "Extension installed"
|
<reponame>abin1525/rose-edg<gh_stars>1-10
#ifndef PAR_PRO_SPOT_TGBA_HH
#define PAR_PRO_SPOT_TGBA_HH
#include <cassert>
//CodeThorn includes
#include "ParProSpotState.h"
#include "ParProSpotSuccIter.h"
#include "StateRepresentations.h"
// BOOST includes
#include "boost/unordered_set.hpp"
//SPOT includes
#include "tgba/tgba.hh"
#include "tgba/succiter.hh"
#include "ltlvisit/apcollect.hh"
#include "tgba/bdddict.hh"
#include "bdd.h"
#include "tgba/state.hh"
#include "ltlast/atomic_prop.hh"
using namespace std;
namespace CodeThorn {
// An adapter which takes a CodeThorn ParProTransitionGraph and adheres to SPOT's TGBA interface
class ParProSpotTgba : public spot::tgba {
public:
ParProSpotTgba(ParProTransitionGraph& ctstg, const spot::ltl::atomic_prop_set& sap,
spot::bdd_dict& dic);
~ParProSpotTgba();
// return the initial state of this tgba
spot::state* get_init_state() const;
//return an iterator over "local_state"'s successors
spot::tgba_succ_iterator* succ_iter (const spot::state* local_state,
const spot::state*, const spot::tgba*) const;
//return the dictionary used to store the atomic propositions used for transitions
spot::bdd_dict* get_dict() const;
//return a string representation of the given state
std::string format_state(const spot::state* state) const;
// returns a string representing the condition of the transition that "iter" is currently pointing at
std::string transition_annotation(const spot::tgba_succ_iterator* iter) const;
//returns bdd::false, no particular accepting states exist. All (partial) paths through
// the automaton define valid runs
bdd all_acceptance_conditions() const;
//returns bdd_true, see above
bdd neg_acceptance_conditions() const;
protected:
//two functions to help the SPOT library reducing the number of successor states traversed
// currently not implemented (returns bdd::true, no filtering takes place)
bdd compute_support_conditions(const spot::state* state) const;
bdd compute_support_variables(const spot::state* state) const;
private:
// CodeThorn's TransitionGraph (adaptee)
ParProTransitionGraph& stg;
// the dictionary of atomic propositions used by this automaton
spot::bdd_dict& dict;
//map atomic propositions as used in the LTL formulae to their
// corresponding slots in the bdd_dict dictionary table.
boost::unordered_map<std::string, int> propName2DictNum;
};
} //end of namespace CodeThorn
#endif
|
int maxCoin(int M, int denominations[], int n)
{
int arr[n];
// initialize array with 0
for (int i = 0; i < n; i++)
arr[i] = 0;
for (int i = n - 1; i >= 0; i--) {
// compute no. of coins to get amount M
arr[i] = M / denominations[i];
// reduce amount M
M = M % denominations[i];
}
// compute sum of coins
int sum = 0;
for (int i = 0; i < n; i++)
sum += arr[i];
// return sum
return sum;
} |
#!/bin/sh
if [ "$IS_CONTAINER" != "" ]; then
go mod vendor
go mod verify
git diff --exit-code
else
podman run --rm \
--env IS_CONTAINER=TRUE \
--volume "${PWD}:/go/src/github.com/openshift/installer:z" \
--workdir /go/src/github.com/openshift/installer \
docker.io/openshift/origin-release:golang-1.13 \
./hack/verify-vendor.sh "${@}"
fi
|
var execSync = require('child_process').execSync;
var execAsync = require('child_process').exec;
var fs = require('fs');
var md5 = require('js-md5');
var async = require('async');
var http = require('http');
var https = require('https');
var path = require('path');
var urlencode = require('urlencode');
const debug = require('debug')('doc-processor-server')
let downloadHelper = require('./lib/tools.js').downloadHelper;
exports.download = function download(hash, dlname, conf, res, next) {
downloadHelper("zip", "zip", hash, dlname, conf, res, next);
}
exports.zip = function zip(result, conf, job, res, next) {
debug("zip process started");
let rnd = Math.floor(Math.random() * 1000)
let hash = md5(JSON.stringify(job));
let nonce = hash + "-" + rnd
//Mkdirs
let jobdir = conf.tmpFolder + "/job-zip-" + nonce;
var indir = jobdir + "/in"
fs.mkdirSync(jobdir);
fs.mkdirSync(indir);
//Download
var threads = 15;
async.eachLimit(job.files, threads, function (task, next) {
if (!fs.existsSync(indir + "/" + task.folder)) {
fs.mkdirSync(indir + "/" + task.folder);
}
var filename = path.basename(task.uri);
var urlencodedFilename = urlencode(path.basename(task.uri));
var urlprefix = path.dirname(task.uri);
var url = urlprefix + "/" + urlencodedFilename;
var file = fs.createWriteStream(indir + "/" + task.folder + "/" + filename);
debug("go for " + url);
if ((task.uri.startsWith("https"))) {
var request = https.get(url, function (response) {
if (response.statusCode === 200) {
response.pipe(file);
file.on('finish', function () {
file.close();
next();
});
} else {
let e = new Error("At least one document could not be retrieved.");
if (conf.deleteFilesEvenOnErrors) {
debug("remove " + jobdir);
execSync("rm -rf " + jobdir);
}
debug("Error retrieving (failFast="+conf.failFast+") "+url);
// res.writeHead(e.code);
// res.end(e.message);
if (conf.failFast===true) {
res.send(new Error("Error retrieving "+url));
}
else {
file.close();
next();
}
}
});
} else {
var request = http.get(url, function (response) {
if (response.statusCode === 200) {
response.pipe(file);
file.on('finish', function () {
file.close();
next();
});
} else {
let e = {
code: 500,
message: "At least one document could not be retrieved."
};
// res.writeHead(e.code);
//res.end(e.message);
if (conf.failFast===true) {
res.send(new Error("Error retrieving "+url));
}
else {
file.close();
next();
}
}
});
}
}, function (err) {
debug(job.files.length + ' downloads finished');
//Zip the results
if (!err) {
//Only changes the encoding of the filenames
let encodingCMD="";
if (job.encoding && conf.allowedEncodings.indexOf(job.encoding)!=-1) {
encodingCMD="convmv --notest -r -f "+conf.serverSourceEncoding+" -t "+job.encoding+" * && ";
}
var cmd = encodingCMD+"zip -r -X ../out.zip *"
execAsync(cmd, {
"cwd": indir
}, function (error, stdout, stderr) {
if (error) {
let e = {
code: 500,
message: "Error within the zip command."
};
if (conf.deleteFilesEvenOnErrors) {
debug("remove " + jobdir);
execSync("rm -rf " + jobdir);
}
//res.writeHead(e.code);
//res.end(e.message);
debug(error);
res.send(new Error("Error within the zip command.",error));
} else {
//return the result
var filepath = jobdir + "/out.zip";
fs.readFile(filepath, function (err, data) {
if (err) {
if (conf.deleteFilesEvenOnErrors) {
debug("remove " + jobdir);
execSync("rm -rf " + jobdir);
}
let e = {
code: 500,
message: "Could not find the output file."
};
// res.writeHead(e.code);
// res.end(e.message);
res.send(new Error("Could not find the output file.",err));
return;
}
if (result === 'DOWNLOAD') {
res.writeHead(200, {
"Content-Disposition":"filename=" + job.name + ".zip",
"Content-Type":"application/zip"
});
res.end(data);
if (!conf.keepFilesForDebugging) {
debug("remove " + jobdir);
execSync("rm -rf " + jobdir);
}
return next();
} else if (result === 'STATUS') {
res.contentType = 'application/json'
//res.writeHead(200);
res.send(200, {
status: 200,
id: nonce,
href: conf.server+"/api/download/zip/"+nonce+"/"+job.name
});
return next();
} else {
throw {
error: "result has to be either DOWNLOAD or STATUS (it was " + result + ")"
};
return next();
}
});
}
});
} else {
if (conf.deleteFilesEvenOnErrors) {
debug("remove " + jobdir);
execSync("rm -rf " + jobdir);
}
debug("Zipping skipped due to an error", err);
res.send(new Error("Zipping skipped due to an error", err));
return next();
}
});
} |
unset IFS
set -o errexit -o nounset -o noglob
export PATH=/bin:/sbin:/usr/bin:/usr/sbin
export LC_ALL=C
|
<reponame>uwplse/legato
package edu.washington.cse.instrumentation.analysis;
import java.util.HashSet;
import java.util.Set;
import soot.SootMethod;
import soot.jimple.toolkits.callgraph.Edge;
import soot.jimple.toolkits.callgraph.EdgePredicate;
public class ReflectionEdgePredicate implements EdgePredicate {
public static final String[] REFLECTION_SIGS = new String[]{
"<java.lang.Class: java.lang.Object newInstance()>",
"<java.lang.Class: java.lang.Class forName(java.lang.String)>",
"<java.lang.reflect.Method: java.lang.Object invoke(java.lang.Object,java.lang.Object[])>",
"<java.lang.reflect.Constructor: java.lang.Object invoke(java.lang.Object[])>",
};
private final Set<String> toIgnore = new HashSet<>();
public ReflectionEdgePredicate() {
for(final String sig : REFLECTION_SIGS) {
toIgnore.add(sig);
}
}
@Override
public boolean want(final Edge e) {
final SootMethod m = e.getTgt().method();
return !toIgnore.contains(m.getSignature());
}
}
|
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.auth.models import User
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
follows = models.ManyToManyField(
"self", related_name="followed_by", symmetrical=False, blank=True
)
def __str__(self):
return self.user.username
@receiver(post_save, sender=User)
def create_profile(sender, instance, created, **kwargs):
if created:
user_profile = Profile(user=instance)
user_profile.save()
user_profile.follows.add(instance.profile)
user_profile.save()
|
<filename>pkg/pipeline/run.go
/*
Copyright 2021 The Crossplane Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package pipeline
import (
"fmt"
"os/exec"
"path/filepath"
"sort"
"strings"
"github.com/crossplane/terrajet/pkg/config"
"github.com/crossplane/crossplane-runtime/pkg/errors"
)
// Run runs the Terrajet code generation pipelines.
func Run(pc *config.Provider, rootDir string) { // nolint:gocyclo
// Note(turkenh): nolint reasoning - this is the main function of the code
// generation pipeline. We didn't want to split it into multiple functions
// for better readability considering the straightforward logic here.
// Group resources based on their Group and API Versions.
// An example entry in the tree would be:
// ec2.awsjet.crossplane.io -> v1alpha1 -> aws_vpc
resourcesGroups := map[string]map[string]map[string]*config.Resource{}
for name, resource := range pc.Resources {
group := pc.RootGroup
if resource.ShortGroup != "" {
group = strings.ToLower(resource.ShortGroup) + "." + pc.RootGroup
}
if len(resourcesGroups[group]) == 0 {
resourcesGroups[group] = map[string]map[string]*config.Resource{}
}
if len(resourcesGroups[group][resource.Version]) == 0 {
resourcesGroups[group][resource.Version] = map[string]*config.Resource{}
}
resourcesGroups[group][resource.Version][name] = resource
}
// Add ProviderConfig API package to the list of API version packages.
apiVersionPkgList := make([]string, 0)
for _, p := range pc.BasePackages.APIVersion {
apiVersionPkgList = append(apiVersionPkgList, filepath.Join(pc.ModulePath, p))
}
// Add ProviderConfig controller package to the list of controller packages.
controllerPkgList := make([]string, 0)
for _, p := range pc.BasePackages.Controller {
controllerPkgList = append(controllerPkgList, filepath.Join(pc.ModulePath, p))
}
count := 0
for group, versions := range resourcesGroups {
for version, resources := range versions {
versionGen := NewVersionGenerator(rootDir, pc.ModulePath, group, version)
crdGen := NewCRDGenerator(versionGen.Package(), rootDir, pc.ShortName, group, version)
tfGen := NewTerraformedGenerator(versionGen.Package(), rootDir, group, version)
ctrlGen := NewControllerGenerator(rootDir, pc.ModulePath, group)
for _, name := range sortedResources(resources) {
paramTypeName, err := crdGen.Generate(resources[name])
if err != nil {
panic(errors.Wrapf(err, "cannot generate crd for resource %s", name))
}
if err := tfGen.Generate(resources[name], paramTypeName); err != nil {
panic(errors.Wrapf(err, "cannot generate terraformed for resource %s", name))
}
ctrlPkgPath, err := ctrlGen.Generate(resources[name], versionGen.Package().Path())
if err != nil {
panic(errors.Wrapf(err, "cannot generate controller for resource %s", name))
}
controllerPkgList = append(controllerPkgList, ctrlPkgPath)
count++
}
if err := versionGen.Generate(); err != nil {
panic(errors.Wrap(err, "cannot generate version files"))
}
apiVersionPkgList = append(apiVersionPkgList, versionGen.Package().Path())
}
}
if err := NewRegisterGenerator(rootDir, pc.ModulePath).Generate(apiVersionPkgList); err != nil {
panic(errors.Wrap(err, "cannot generate register file"))
}
if err := NewSetupGenerator(rootDir, pc.ModulePath).Generate(controllerPkgList); err != nil {
panic(errors.Wrap(err, "cannot generate setup file"))
}
// NOTE(muvaf): gosec linter requires that the whole command is hard-coded.
// So, we set the directory of the command instead of passing in the directory
// as an argument to "find".
apisCmd := exec.Command("bash", "-c", "goimports -w $(find . -iname 'zz_*')")
apisCmd.Dir = filepath.Clean(filepath.Join(rootDir, "apis"))
if out, err := apisCmd.CombinedOutput(); err != nil {
panic(errors.Wrap(err, "cannot run goimports for apis folder: "+string(out)))
}
internalCmd := exec.Command("bash", "-c", "goimports -w $(find . -iname 'zz_*')")
internalCmd.Dir = filepath.Clean(filepath.Join(rootDir, "internal"))
if out, err := internalCmd.CombinedOutput(); err != nil {
panic(errors.Wrap(err, "cannot run goimports for internal folder: "+string(out)))
}
fmt.Printf("\nGenerated %d resources!\n", count)
}
func sortedResources(m map[string]*config.Resource) []string {
result := make([]string, len(m))
i := 0
for g := range m {
result[i] = g
i++
}
sort.Strings(result)
return result
}
|
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.camera.app;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.android.camera.util.Gusterpolator;
import com.android.camera.widget.Cling;
import com.android.camera.widget.ExternalViewerButton;
import com.android.camera2.R;
/**
* Shows controls at the bottom of the screen for editing, viewing a photo
* sphere image and creating a tiny planet from a photo sphere image.
*/
class FilmstripBottomPanel implements CameraAppUI.BottomPanel {
private static final int ANIM_DURATION = 150;
private final AppController mController;
private final ViewGroup mLayout;
private final View mControlLayout;
private final View mMiddleFiller;
private Listener mListener;
private ImageButton mEditButton;
private ExternalViewerButton mViewButton;
private ImageButton mDeleteButton;
private ImageButton mShareButton;
private View mProgressLayout;
private TextView mProgressText;
private View mProgressErrorLayout;
private TextView mProgressErrorText;
private ProgressBar mProgressBar;
private boolean mTinyPlanetEnabled;
public FilmstripBottomPanel(AppController controller, ViewGroup bottomControlsLayout) {
mController = controller;
mLayout = bottomControlsLayout;
mMiddleFiller = mLayout.findViewById(R.id.filmstrip_bottom_control_middle_filler);
mControlLayout = mLayout.findViewById(R.id.bottom_control_panel);
setupEditButton();
setupViewButton();
setupDeleteButton();
setupShareButton();
setupProgressUi();
}
@Override
public void setListener(Listener listener) {
mListener = listener;
}
@Override
public void setClingForViewer(int viewerType, Cling cling) {
mViewButton.setClingForViewer(viewerType, cling);
}
@Override
public void clearClingForViewer(int viewerType) {
mViewButton.clearClingForViewer(viewerType);
}
@Override
public Cling getClingForViewer(int viewerType) {
return mViewButton.getClingForViewer(viewerType);
}
@Override
public void setVisible(boolean visible) {
if (visible) {
mLayout.setVisibility(View.VISIBLE);
} else {
mLayout.setVisibility(View.INVISIBLE);
}
}
@Override
public void setEditButtonVisibility(boolean visible) {
mEditButton.setVisibility(visible ? View.VISIBLE : View.GONE);
updateMiddleFillerLayoutVisibility();
}
@Override
public void setEditEnabled(boolean enabled) {
mEditButton.setEnabled(enabled);
}
@Override
public void setViewerButtonVisibility(int state) {
mViewButton.setState(state);
updateMiddleFillerLayoutVisibility();
}
@Override
public void setViewEnabled(boolean enabled) {
mViewButton.setEnabled(enabled);
}
@Override
public void setTinyPlanetEnabled(boolean enabled) {
mTinyPlanetEnabled = enabled;
}
@Override
public void setDeleteButtonVisibility(boolean visible) {
mDeleteButton.setVisibility(visible ? View.VISIBLE : View.INVISIBLE);
}
@Override
public void setDeleteEnabled(boolean enabled) {
mDeleteButton.setEnabled(enabled);
}
@Override
public void setShareButtonVisibility(boolean visible) {
mShareButton.setVisibility(visible ? View.VISIBLE : View.INVISIBLE);
}
@Override
public void setShareEnabled(boolean enabled) {
mShareButton.setEnabled(enabled);
}
@Override
public void setProgressText(CharSequence text) {
mProgressText.setText(text);
}
@Override
public void setProgress(int progress) {
mProgressBar.setProgress(progress);
}
@Override
public void showProgressError(CharSequence message) {
hideControls();
hideProgress();
mProgressErrorLayout.setVisibility(View.VISIBLE);
mProgressErrorText.setText(message);
}
@Override
public void hideProgressError() {
mProgressErrorLayout.setVisibility(View.INVISIBLE);
}
@Override
public void showProgress() {
mProgressLayout.setVisibility(View.VISIBLE);
hideProgressError();
}
@Override
public void hideProgress() {
mProgressLayout.setVisibility(View.INVISIBLE);
}
@Override
public void showControls() {
mControlLayout.setVisibility(View.VISIBLE);
}
@Override
public void hideControls() {
mControlLayout.setVisibility(View.INVISIBLE);
}
private void setupEditButton() {
mEditButton = (ImageButton) mLayout.findViewById(R.id.filmstrip_bottom_control_edit);
mEditButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mTinyPlanetEnabled) {
mController.openContextMenu(mEditButton);
} else if (mListener != null) {
mListener.onEdit();
}
}
});
mController.registerForContextMenu(mEditButton);
mEditButton.setLongClickable(false);
}
private void setupViewButton() {
mViewButton = (ExternalViewerButton) mLayout.findViewById(
R.id.filmstrip_bottom_control_view);
mViewButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mListener != null) {
mListener.onExternalViewer();
}
}
});
}
private void setupDeleteButton() {
mDeleteButton = (ImageButton) mLayout.findViewById(R.id.filmstrip_bottom_control_delete);
mDeleteButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mListener != null) {
mListener.onDelete();
}
}
});
}
private void setupShareButton() {
mShareButton = (ImageButton) mLayout.findViewById(R.id.filmstrip_bottom_control_share);
mShareButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (mListener != null) {
mListener.onShare();
}
}
});
}
private void setupProgressUi() {
mProgressLayout = mLayout.findViewById(R.id.bottom_progress_panel);
mProgressText = (TextView) mLayout.findViewById(R.id.bottom_session_progress_text);
mProgressBar = (ProgressBar) mLayout.findViewById(R.id.bottom_session_progress_bar);
mProgressBar.setMax(100);
mProgressLayout.setVisibility(View.INVISIBLE);
mProgressErrorText = (TextView) mLayout.findViewById(R.id.bottom_progress_error_text);
mProgressErrorLayout = mLayout.findViewById(R.id.bottom_progress_error_panel);
mProgressErrorLayout.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mListener != null) {
mListener.onProgressErrorClicked();
}
}
});
}
/**
* Updates the visibility of the middle filler view in the center. The
* middle filler view should be visible when edit button and viewer buttons
* are both visible.
*/
private void updateMiddleFillerLayoutVisibility() {
if (mEditButton.getVisibility() == View.VISIBLE &&
mViewButton.getVisibility() == View.VISIBLE) {
mMiddleFiller.setVisibility(View.INVISIBLE);
} else {
mMiddleFiller.setVisibility(View.GONE);
}
}
public void show() {
ObjectAnimator animator = ObjectAnimator
.ofFloat(mLayout, "translationY", mLayout.getHeight(), 0.0f);
animator.setDuration(ANIM_DURATION);
animator.setInterpolator(Gusterpolator.INSTANCE);
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
mViewButton.updateClingVisibility();
}
});
mViewButton.hideClings();
animator.start();
}
public void hide() {
int offset = mLayout.getHeight();
if (mLayout.getTranslationY() < offset) {
ObjectAnimator animator = ObjectAnimator
.ofFloat(mLayout, "translationY", mLayout.getTranslationY(), offset);
animator.setDuration(ANIM_DURATION);
animator.setInterpolator(Gusterpolator.INSTANCE);
mViewButton.hideClings();
animator.start();
}
}
}
|
from tempfile import NamedTemporaryFile
from datetime import datetime, timedelta
from pdfminer.high_level import extract_text
import urllib
import urllib.request
import shutil
import json
import os
import sys
BASE_URL = 'https://oklahoma.gov/content/dam/ok/en/covid19/documents/weekly-epi-report'
REPORT = '%Y.%m.%d Weekly Epi Report.pdf'
TOTAL_TESTS = 'Specimens tested, count'
def atoi(val):
'''Raises exception on failure to parse'''
if isinstance(val, int):
return val
return int(val.replace(",", ''))
def get_data(filepath):
text = extract_text(filepath)
lines = text.splitlines()
lines = [x.strip() for x in lines if x.strip()]
data = {}
data['date'] = lines[2]
for i, x in enumerate(lines):
if x == TOTAL_TESTS:
# read the next number and abort
data[x] = atoi(lines[i+7*2])
break
return data
def main(base_dir):
# download report
# open it
# parse it
# store numbers, as json, somewhere
data = {}
day = datetime.now()
# we won't always have a document ready, in this case, try to find a previous one
# but don't go beyond -8 days, it's a weekly report
for i in range(8):
day = datetime.now() - timedelta(days=i)
report_url = day.strftime(REPORT)
report_url = urllib.parse.quote(report_url)
url = BASE_URL + "/" + report_url
print(url)
req = urllib.request.Request(url)
try:
with urllib.request.urlopen(req) as response, NamedTemporaryFile(delete=True) as tmpfile:
shutil.copyfileobj(response, tmpfile)
tmpfile.flush()
data = get_data(tmpfile.name)
print(data)
if data:
json.dump(data, open(os.path.join(base_dir, "ok_pcr_specimens.json"), 'w'))
break
except urllib.error.HTTPError as e:
# if it's 404 -- it's ok
print(day, e.getcode())
if e.getcode() != 404:
print(str(e))
if __name__ == "__main__":
# where to store?
base_dir = '.'
if len(sys.argv) > 1:
base_dir = sys.argv[1]
main(base_dir)
|
def factorial(num):
if num == 0:
return 1
else:
return num * factorial(num - 1) |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.schema.builder;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.ignite.internal.schema.SchemaTableImpl;
import org.apache.ignite.schema.Column;
import org.apache.ignite.schema.ColumnarIndex;
import org.apache.ignite.schema.IndexColumn;
import org.apache.ignite.schema.PrimaryIndex;
import org.apache.ignite.schema.SchemaBuilders;
import org.apache.ignite.schema.SchemaTable;
import org.apache.ignite.schema.TableIndex;
import org.apache.ignite.schema.builder.SchemaTableBuilder;
import static org.apache.ignite.schema.PrimaryIndex.PRIMARY_KEY_INDEX_NAME;
/**
* Table builder.
*/
public class SchemaTableBuilderImpl implements SchemaTableBuilder {
/** Schema name. */
private final String schemaName;
/** Table name. */
private final String tableName;
/** Columns. */
private final LinkedHashMap<String, Column> columns = new LinkedHashMap<>();
/** Indices. */
private final Map<String, TableIndex> indices = new HashMap<>();
/**
* Constructor.
*
* @param schemaName Schema name.
* @param tableName Table name.
*/
public SchemaTableBuilderImpl(String schemaName, String tableName) {
this.schemaName = schemaName;
this.tableName = tableName;
}
/** {@inheritDoc} */
@Override public SchemaTableBuilderImpl columns(Column... columns) {
for (int i = 0; i < columns.length; i++) {
if (this.columns.put(columns[i].name(), columns[i]) != null)
throw new IllegalArgumentException("Column with same name already exists: columnName=" + columns[i].name());
}
return this;
}
/** {@inheritDoc} */
@Override public SchemaTableBuilder withIndex(TableIndex index) {
if (index instanceof PrimaryIndex) {
if (!PRIMARY_KEY_INDEX_NAME.equals(index.name()))
throw new IllegalArgumentException("Not valid index name for a primary index: " + index.name());
}
else if (PRIMARY_KEY_INDEX_NAME.equals(index.name()))
throw new IllegalArgumentException("Not valid index name for a secondary index: " + index.name());
if (indices.put(index.name(), index) != null)
throw new IllegalArgumentException("Index with same name already exists: " + index.name());
return this;
}
/** {@inheritDoc} */
@Override public SchemaTableBuilder withPrimaryKey(String colName) {
withIndex(SchemaBuilders.pkIndex().addIndexColumn(colName).done().build());
return this;
}
/** {@inheritDoc} */
@Override public SchemaTableBuilder withHints(Map<String, String> hints) {
// No op.
return this;
}
/** {@inheritDoc} */
@Override public SchemaTable build() {
assert schemaName != null : "Table name was not specified.";
assert columns.size() >= 2 : "Key or/and value columns was not defined.";
validateIndices();
return new SchemaTableImpl(
schemaName,
tableName,
columns,
Collections.unmodifiableMap(indices)
);
}
/**
* Validate indices.
*/
private void validateIndices() {
assert indices.values().stream()
.filter(ColumnarIndex.class::isInstance)
.map(ColumnarIndex.class::cast)
.flatMap(idx -> idx.columns().stream())
.map(IndexColumn::name)
.allMatch(columns::containsKey) : "Index column doesn't exists in schema.";
assert indices.containsKey(PRIMARY_KEY_INDEX_NAME) : "Primary key index is not configured.";
assert !((PrimaryIndex)indices.get(PRIMARY_KEY_INDEX_NAME)).affinityColumns().isEmpty() : "Primary key must have one affinity column at least.";
// Note: E.g. functional index is not columnar index as it index an expression result only.
assert indices.values().stream().allMatch(ColumnarIndex.class::isInstance) : "Columnar indices are supported only.";
}
}
|
public static void CountOccurencesInString(string s)
{
string[] words = s.Split(' ');
Dictionary<string, int> counts = new Dictionary<string, int>();
foreach(string w in words)
{
if(counts.ContainsKey(w))
counts[w]++;
else
counts[w] = 1;
}
var sortedDict = from entry in counts orderby entry.Value descending select entry;
foreach (KeyValuePair<string, int> i in sortedDict)
{
Console.WriteLine("{0}, {1}", i.Key, i.Value);
}
} |
// -!- C++ -!- //////////////////////////////////////////////////////////////
//
// System :
// Module :
// Object Name : $RCSfile$
// Revision : $Revision$
// Date : $Date$
// Author : $Author$
// Created By : <NAME>
// Created : Sun Aug 26 20:36:02 2018
// Last Modified : <181124.1406>
//
// Description
//
// Notes
//
// History
//
/////////////////////////////////////////////////////////////////////////////
//
// Copyright (C) 2018 <NAME> D/B/A Deepwoods Software
// 51 Locke Hill Road
// Wendell, MA 01379-9728
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
//
//
//
//////////////////////////////////////////////////////////////////////////////
static const char rcsid[] = "@(#) : $Id$";
#include "openlcb/EventHandlerTemplates.hxx"
#include "openlcb/ConfigRepresentation.hxx"
#include "utils/ConfigUpdateListener.hxx"
#include "utils/ConfigUpdateService.hxx"
#include "openlcb/RefreshLoop.hxx"
#include "SignalRepeat.hxx"
ConfigUpdateListener::UpdateAction Signal3::apply_configuration(int fd,
bool initial_load,
BarrierNotifiable *done)
{
AutoNotify n(done);
openlcb::EventId cfg_stop_event = cfg_.stop_event().read(fd);
openlcb::EventId cfg_approach_event = cfg_.approach_event().read(fd);
openlcb::EventId cfg_clear_event = cfg_.clear_event().read(fd);
if (cfg_stop_event != stop_ ||
cfg_approach_event != approach_ ||
cfg_clear_event != clear_) {
if (!initial_load) {
openlcb::EventRegistry::instance()->unregister_handler(this);
}
stop_ = cfg_stop_event;
approach_ = cfg_approach_event;
clear_ = cfg_clear_event;
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, stop_), 0);
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, approach_), 0);
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, clear_), 0);
return REINIT_NEEDED; // Causes events identify.
}
return UPDATED;
}
void Signal3::handle_event_report(const EventRegistryEntry &entry,
EventReport *event,
BarrierNotifiable *done)
{
if (event->event == stop_) {
red_->set();
yellow_->clr();
green_->clr();
} else if (event->event == approach_) {
red_->clr();
yellow_->set();
green_->clr();
} else if (event->event == clear_) {
red_->clr();
yellow_->clr();
green_->set();
}
done->notify();
}
void Signal3::handle_identify_global(const EventRegistryEntry ®istry_entry,
EventReport *event, BarrierNotifiable *done)
{
if (event->dst_node && event->dst_node != node_)
{
done->notify();
}
SendAllConsumersIdentified(event,done);
done->maybe_done();
}
void Signal3::handle_identify_consumer(const EventRegistryEntry ®istry_entry,
EventReport *event,
BarrierNotifiable *done)
{
SendConsumerIdentified(event,done);
done->maybe_done();
}
void Signal3::SendAllConsumersIdentified(EventReport *event,BarrierNotifiable *done)
{
openlcb::Defs::MTI mti_s, mti_a, mti_c;
mti_s = mti_a = mti_c = openlcb::Defs::MTI_CONSUMER_IDENTIFIED_UNKNOWN;
event->event_write_helper<1>()->WriteAsync(node_, mti_s, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(stop_),
done->new_child());
event->event_write_helper<2>()->WriteAsync(node_, mti_a, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(approach_),
done->new_child());
event->event_write_helper<3>()->WriteAsync(node_, mti_c, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(clear_),
done->new_child());
}
void Signal3::SendConsumerIdentified(EventReport *event,BarrierNotifiable *done)
{
openlcb::Defs::MTI mti = openlcb::Defs::MTI_CONSUMER_IDENTIFIED_UNKNOWN;
if (event->event == stop_ ||
event->event == approach_ ||
event->event == clear_)
event->event_write_helper<1>()->WriteAsync(node_, mti, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(event->event),
done->new_child());
}
ConfigUpdateListener::UpdateAction Signal3over2::apply_configuration(int fd,
bool initial_load,
BarrierNotifiable *done)
{
AutoNotify n(done);
openlcb::EventId cfg_stop_event = cfg_.stop_event().read(fd);
openlcb::EventId cfg_approach_event = cfg_.approach_event().read(fd);
openlcb::EventId cfg_approachLimited_event = cfg_.approachLimited_event().read(fd);
openlcb::EventId cfg_clear_event = cfg_.clear_event().read(fd);
if (cfg_stop_event != stop_ ||
cfg_approach_event != approach_ ||
cfg_approachLimited_event != approachLimited_ ||
cfg_clear_event != clear_) {
if (!initial_load) {
openlcb::EventRegistry::instance()->unregister_handler(this);
}
stop_ = cfg_stop_event;
approach_ = cfg_approach_event;
clear_ = cfg_clear_event;
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, stop_), 0);
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, approach_), 0);
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, approachLimited_), 0);
openlcb::EventRegistry::instance()->register_handler(
openlcb::EventRegistryEntry(this, clear_), 0);
return REINIT_NEEDED; // Causes events identify.
}
return UPDATED;
}
void Signal3over2::handle_event_report(const EventRegistryEntry &entry,
EventReport *event,
BarrierNotifiable *done)
{
if (event->event == stop_) {
redU_->set();
yellowU_->clr();
greenU_->clr();
redL_->set();
yellowL_->clr();
} else if (event->event == approach_) {
redU_->clr();
yellowU_->set();
greenU_->clr();
redL_->set();
yellowL_->clr();
} else if (event->event == approachLimited_) {
redU_->set();
yellowU_->clr();
greenU_->clr();
redL_->clr();
yellowL_->set();
} else if (event->event == clear_) {
redU_->clr();
yellowU_->clr();
greenU_->set();
redL_->set();
yellowL_->clr();
}
done->notify();
}
void Signal3over2::handle_identify_global(const EventRegistryEntry ®istry_entry,
EventReport *event, BarrierNotifiable *done)
{
if (event->dst_node && event->dst_node != node_)
{
done->notify();
}
SendAllConsumersIdentified(event,done);
done->maybe_done();
}
void Signal3over2::handle_identify_consumer(const EventRegistryEntry ®istry_entry,
EventReport *event,
BarrierNotifiable *done)
{
SendConsumerIdentified(event,done);
done->maybe_done();
}
void Signal3over2::SendAllConsumersIdentified(EventReport *event,BarrierNotifiable *done)
{
openlcb::Defs::MTI mti_s, mti_a, mti_aL, mti_c;
mti_s = mti_a = mti_aL = mti_c = openlcb::Defs::MTI_CONSUMER_IDENTIFIED_UNKNOWN;
event->event_write_helper<1>()->WriteAsync(node_, mti_s, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(stop_),
done->new_child());
event->event_write_helper<2>()->WriteAsync(node_, mti_a, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(approach_),
done->new_child());
event->event_write_helper<3>()->WriteAsync(node_, mti_aL, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(approachLimited_),
done->new_child());
event->event_write_helper<4>()->WriteAsync(node_, mti_c, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(clear_),
done->new_child());
}
void Signal3over2::SendConsumerIdentified(EventReport *event,BarrierNotifiable *done)
{
openlcb::Defs::MTI mti = openlcb::Defs::MTI_CONSUMER_IDENTIFIED_UNKNOWN;
if (event->event == stop_ ||
event->event == approach_ ||
event->event == approachLimited_ ||
event->event == clear_)
event->event_write_helper<1>()->WriteAsync(node_, mti, openlcb::WriteHelper::global(),
openlcb::eventid_to_buffer(event->event),
done->new_child());
}
|
<filename>models/chims_models/evaluation.js
let mongoose = require('mongoose');
// Evaluation Schema
let evaluationSchema = mongoose.Schema({
EvaluationID: {
type: Number
},
CaseID: {
type: Number
},
PatientID: {
type: Number
},
EvalType: {
type: String
},
EvalDate: {
type: Date
},
FirstName: {
type: String
},
MI: {
type: String
},
LastName: {
type: String
}
});
let Evaluation = module.exports = mongoose.model('Evaluation', evaluationSchema); |
(function(){
'use strict';
angular
.module('app')
.factory('Interceptador', interceptador);
interceptador.$inject = ["$q", "$window", "$location", "toaster"];
function interceptador($q, $window, $location, toaster) {
var service = {
request : request,
responseError : responseError
};
return service;
function request(config) {
config.headers = config.headers || {};
if ($window.localStorage.token) {
config.headers['Authorization'] = 'Bearer '+ $window.localStorage.token;
}
return config;
}
function responseError(response) {
if (response.status === 401) {
$location.path('/login');
toaster.pop('error', "Erro!", response.data.mensagem);
}
toaster.pop('error', "Ocorreu um Erro", response.data.mensagem);
return $q.reject(response.data.mensagem);
}
}
})(); |
import { NativeTouchEvent, GestureResponderEvent } from 'react-native';
export function extractSingleTouch(e: GestureResponderEvent): (NativeTouchEvent | null) {
const nativeEvent = e.nativeEvent;
const touches = nativeEvent.touches;
const changedTouches = nativeEvent.changedTouches;
const hasTouches = touches && touches.length > 0;
const hasChangedTouches = changedTouches && changedTouches.length > 0;
return !hasTouches && hasChangedTouches
? changedTouches[0]
: hasTouches
? touches[0]
: nativeEvent;
} |
package com.peony.entrance.tcp_protobuf;
import com.peony.core.control.BeanHelper;
import com.peony.core.control.request.RequestService;
import com.peony.core.data.entity.account.MessageSender;
import com.peony.core.net.packet.NettyPBPacket;
import com.peony.common.tool.thread.ThreadPoolHelper;
import io.netty.channel.Channel;
import com.alibaba.fastjson.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.ScheduledExecutorService;
/**
* Created by apple on 16-10-4.
*/
public class NettyPBMessageSender implements MessageSender{
private static final Logger log = LoggerFactory.getLogger(NettyPBMessageSender.class);
private static final ScheduledExecutorService asyncExecutor = ThreadPoolHelper.newScheduledThreadPoolExecutor("NettyPBMessageSender",32);
private Channel channel;
private String accountId;
private RequestService requestService;
public NettyPBMessageSender(Channel channel,String accountId){
this.channel = channel;
requestService = BeanHelper.getServiceBean(RequestService.class);
this.accountId = accountId;
}
@Override
public void sendMessage(final int opcode, final Object data){
asyncExecutor.execute(new Runnable() {
@Override
public void run() {
try {
sendMessageSync(opcode,data);
}catch (Throwable e){
e.printStackTrace();
}
}
});
}
@Override
public void sendMessageSync(int opcode,Object data){
synchronized (this) {
NettyPBPacket nettyPBPacket = new NettyPBPacket();
nettyPBPacket.setId(-1); // 没有的时候为-1
nettyPBPacket.setData((byte[])data);
nettyPBPacket.setOpcode(opcode);
channel.writeAndFlush(nettyPBPacket);
log.info("send info,cmd = {}|accountId={}",requestService.getOpcodeNames().get(opcode),accountId);
}
}
}
|
<gh_stars>1-10
package com.auta;
import com.facebook.react.bridge.NativeModule;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.Callback;
import java.util.Map;
import java.util.HashMap;
import android.util.Base64;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import org.tensorflow.contrib.android.TensorFlowInferenceInterface;
public class ClassifierModule extends ReactContextBaseJavaModule {
private static TensorFlowInferenceInterface inferenceInterface;
private static final String PB_ADDRESS = "frozen_FocusClassifier.pb";
private final String[] LABELS = {"Concentrated", "Depressed", "Distracted", "Sleepy"};
String input = "conv2d_1_input", output = "activation_5/Sigmoid";
private static final int WIDTH = 64, HEIGHT = 64;
final int NUM_OUTPUT_CLASSES = 4;
public ClassifierModule(ReactApplicationContext reactContext){
super(reactContext);
inferenceInterface = new TensorFlowInferenceInterface(reactContext.getAssets(), PB_ADDRESS);
}
@Override
public String getName(){
return "Classifier";
}
@ReactMethod
public void classify(String base64Image, Callback errorCallback, Callback successCallback){
byte[] decodedString = Base64.decode(base64Image, Base64.DEFAULT);
Bitmap bmp = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length);
// Take a bitmap and change it into a float array
assert bmp.getWidth() == WIDTH && bmp.getHeight() == HEIGHT;
int[] pixels = new int[WIDTH*HEIGHT];
float[] brightness = new float[WIDTH*HEIGHT];
float[] r = new float[WIDTH*HEIGHT];
float[] g = new float[WIDTH*HEIGHT];
float[] b = new float[WIDTH*HEIGHT];
bmp.getPixels(pixels, 0, WIDTH, 0, 0, WIDTH, HEIGHT);
for (int i = 0; i < pixels.length; i++) {
r[i] = ((pixels[i]) >> 16 & 0xff)/255.0f;
g[i] = ((pixels[i]) >> 8 & 0xff)/255.0f;
b[i] = ((pixels[i]) & 0xff)/255.0f;
}
float[] inputArray = new float[3*WIDTH*HEIGHT];
for (int i=0;i<WIDTH*HEIGHT;i++) {
inputArray[(3*i)] = r[i];
inputArray[(3*i)+1] = g[i];
inputArray[(3*i)+2] = b[i];
}
// Make a prediction with the image
float[] prediction = predict(inputArray);
// form prediction from labels
float max = 0.0f;
int maxI = 0;
for (int i=0;i<prediction.length;i++){
max = (max > prediction[i]) ? max : prediction[i];
maxI = (max > prediction[i]) ? maxI : i;
}
successCallback.invoke(LABELS[maxI]);
}
private float[] predict(float[] inputArray){
float outputArray[] = new float[NUM_OUTPUT_CLASSES];
// feed the image to the classifier
inferenceInterface.feed(input, inputArray, 1, WIDTH, HEIGHT, 3);
inferenceInterface.run(new String[] {output});
inferenceInterface.fetch(output, outputArray);
// return prediction
return outputArray;
}
} |
<filename>quotes/src/main/java/se/callista/quotes/QuotesApplication.java
package se.callista.quotes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
@EnableDiscoveryClient
@SpringBootApplication
public class QuotesApplication {
private static final Logger LOG = LoggerFactory.getLogger(QuotesApplication.class);
public static void main(String[] args) {
int verNo = 11;
SpringApplication.run(QuotesApplication.class, args);
LOG.info("QuotesApplication v{} started", verNo);
}
}
|
import React from 'react';
import { Switch, Route, Redirect } from 'react-router-dom';
import viewsRoutes from 'views/routes';
const Routes = (): JSX.Element => {
return (
<Switch>
{viewsRoutes.map((item, i) => (
<Route key={i} exact path={item.path} render={() => item.renderer()} />
))}
<Redirect to={'/not-found'} />
</Switch>
);
};
export default Routes;
|
extension UIColor {
static func HexToColor(hexString: String) -> UIColor? {
var formattedHexString = hexString.trimmingCharacters(in: .whitespacesAndNewlines)
if formattedHexString.hasPrefix("#") {
formattedHexString.remove(at: formattedHexString.startIndex)
}
var rgb: UInt64 = 0
let scanner = Scanner(string: formattedHexString)
scanner.scanHexInt64(&rgb)
let red = CGFloat((rgb & 0xFF0000) >> 16) / 255.0
let green = CGFloat((rgb & 0x00FF00) >> 8) / 255.0
let blue = CGFloat(rgb & 0x0000FF) / 255.0
return UIColor(red: red, green: green, blue: blue, alpha: 1.0)
}
} |
<gh_stars>1-10
import request from "../utils/request";
// const qs = require('querystring')
const statisApi = {
statis: "/api/dashboard/static"
};
export function getStatisApi() {
return request({
url: statisApi.statis,
method: "get",
headers: {
"Content-Type": "application/json;charset=UTF-8"
}
});
}
|
#!/bin/sh
SCRIPT="$0"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="$@ generate -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -l groovy -o samples/client/petstore/groovy -DhideGenerationTimestamp=true"
java $JAVA_OPTS -jar $executable $ags
|
#ifndef OOP_ASSIGNMENT2_USERLIST_H
#define OOP_ASSIGNMENT2_USERLIST_H
#include "User.h"
#include "Book.h"
class UserList {
private:
User* users_;
int capacity_;
int usersCount_;
public:
UserList(int);
UserList(const UserList&);
void addUser(User);// at the end of the array.
User& searchUser(string);
User& searchUser(int);
void deleteUser(int);
void operator=(const UserList&);
friend ostream &operator << (ostream &output, UserList &);
~UserList();
};
#endif
|
fn md5_i(a: u32, b: u32, c: u32, d: u32, x: u32, s: u32, t: u32) -> u32 {
((b & c) | ((!b) & d)).wrapping_add(a).wrapping_add(x).wrapping_add(t).rotate_left(s)
}
struct MD5 {
state: [u32; 4],
}
impl MD5 {
fn process_block(&mut self, block: &[u8]) {
let mut x = [0u32; 16];
for i in 0..16 {
x[i] = u32::from_le_bytes([
block[i * 4],
block[i * 4 + 1],
block[i * 4 + 2],
block[i * 4 + 3],
]);
}
let (mut a, mut b, mut c, mut d) = (self.state[0], self.state[1], self.state[2], self.state[3]);
// MD5 logical functions
const S: [u32; 64] = [
7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 5, 9, 14, 20, 5, 9, 14, 20,
5, 9, 14, 20, 5, 9, 14, 20, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23,
6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21,
];
const K: [u32; 64] = [
0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee, 0xf57c0faf, 0x4787c62a, 0xa8304613,
0xfd469501, 0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be, 0x6b901122, 0xfd987193,
0xa679438e, 0x49b40821, 0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa, 0xd62f105d,
0x02441453, 0xd8a1e681, 0xe7d3fbc8, 0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed,
0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a, 0xfffa3942, 0x8771f681, 0x6d9d6122,
0xfde5380c, 0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70, 0x289b7ec6, 0xeaa127fa,
0xd4ef3085, 0x04881d05, 0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665, 0xf4292244,
0x432aff97, 0xab9423a7, 0xfc93a039, 0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1,
0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1, 0xf7537e82, 0xbd3af235, 0x2ad7d2bb,
0xeb86d391,
];
for i in 0..64 {
let (f, g) = match i {
0..=15 => ((b & c) | ((!b) & d), i),
16..=31 => ((d & b) | ((!d) & c), (5 * i + 1) % 16),
32..=47 => (b ^ c ^ d, (3 * i + 5) % 16),
48..=63 => (c ^ (b | (!d)), (7 * i) % 16),
_ => unreachable!(),
};
let temp = d;
d = c;
c = b;
b = b.wrapping_add(
a.wrapping_add(f).wrapping_add(K[i]).wrapping_add(x[g]).rotate_left(S[i]),
);
a = temp;
}
self.state[0] = self.state[0].wrapping_add(a);
self.state[1] = self.state[1].wrapping_add(b);
self.state[2] = self.state[2].wrapping_add(c);
self.state[3] = self.state[3].wrapping_add(d);
}
} |
class Chef::Knife::Ssh
def action_nodes(n)
@action_nodes=n
end
def fix_longest(l)
@longest=l
end
end
|
pushd .
# pip install -r misc/requirements.txt
export FLASK_APP=app.py
flask run
popd
|
<gh_stars>10-100
/////////////////////////////////////////////////////////////////////////////
// Name: src/gtk1/mdi.cpp
// Purpose:
// Author: <NAME>
// Copyright: (c) 1998 <NAME>
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#if wxUSE_MDI
#include "wx/mdi.h"
#ifndef WX_PRECOMP
#include "wx/intl.h"
#include "wx/menu.h"
#include "wx/dialog.h"
#endif
#include "wx/notebook.h"
#include "wx/gtk1/private.h"
#include <glib.h>
#include <gdk/gdk.h>
#include <gtk/gtk.h>
#include "wx/gtk1/win_gtk.h"
//-----------------------------------------------------------------------------
// constants
//-----------------------------------------------------------------------------
const int wxMENU_HEIGHT = 27;
//-----------------------------------------------------------------------------
// idle system
//-----------------------------------------------------------------------------
extern void wxapp_install_idle_handler();
extern bool g_isIdle;
//-----------------------------------------------------------------------------
// globals
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// "switch_page"
//-----------------------------------------------------------------------------
extern "C" {
static void
gtk_mdi_page_change_callback( GtkNotebook *WXUNUSED(widget),
GtkNotebookPage *page,
gint WXUNUSED(page_num),
wxMDIParentFrame *parent )
{
if (g_isIdle)
wxapp_install_idle_handler();
// send deactivate event to old child
wxMDIChildFrame *child = parent->GetActiveChild();
if (child)
{
wxActivateEvent event1( wxEVT_ACTIVATE, false, child->GetId() );
event1.SetEventObject( child);
child->HandleWindowEvent( event1 );
}
// send activate event to new child
wxMDIClientWindowBase * const client_window = parent->GetClientWindow();
if ( !client_window )
return;
child = NULL;
wxWindowList::compatibility_iterator node = client_window->GetChildren().GetFirst();
while (node)
{
wxMDIChildFrame *child_frame = wxDynamicCast( node->GetData(), wxMDIChildFrame );
// CE: we come here in the destructor with a null child_frame - I think because
// gtk_signal_connect( GTK_OBJECT(m_widget), "switch_page", (see below)
// isn't deleted early enough
if (!child_frame)
return ;
if (child_frame->m_page == page)
{
child = child_frame;
break;
}
node = node->GetNext();
}
if (!child)
return;
wxActivateEvent event2( wxEVT_ACTIVATE, true, child->GetId() );
event2.SetEventObject( child);
child->HandleWindowEvent( event2 );
}
}
//-----------------------------------------------------------------------------
// wxMDIParentFrame
//-----------------------------------------------------------------------------
wxIMPLEMENT_DYNAMIC_CLASS(wxMDIParentFrame, wxFrame);
void wxMDIParentFrame::Init()
{
m_justInserted = false;
m_clientWindow = NULL;
}
bool wxMDIParentFrame::Create(wxWindow *parent,
wxWindowID id,
const wxString& title,
const wxPoint& pos,
const wxSize& size,
long style,
const wxString& name )
{
if ( !wxFrame::Create( parent, id, title, pos, size, style, name ) )
return false;
m_clientWindow = OnCreateClient();
if ( !m_clientWindow->CreateClient(this, GetWindowStyleFlag()) )
return false;
return true;
}
void wxMDIParentFrame::GtkOnSize( int x, int y, int width, int height )
{
wxFrame::GtkOnSize( x, y, width, height );
wxMDIChildFrame *child_frame = GetActiveChild();
if (!child_frame) return;
wxMenuBar *menu_bar = child_frame->m_menuBar;
if (!menu_bar) return;
if (!menu_bar->m_widget) return;
menu_bar->m_x = 0;
menu_bar->m_y = 0;
menu_bar->m_width = m_width;
menu_bar->m_height = wxMENU_HEIGHT;
gtk_pizza_set_size( GTK_PIZZA(m_mainWidget),
menu_bar->m_widget,
0, 0, m_width, wxMENU_HEIGHT );
}
void wxMDIParentFrame::OnInternalIdle()
{
/* if a an MDI child window has just been inserted
it has to be brought to the top in idle time. we
simply set the last notebook page active as new
pages can only be appended at the end */
if (m_justInserted)
{
GtkNotebook *notebook = GTK_NOTEBOOK(m_clientWindow->m_widget);
gtk_notebook_set_page( notebook, g_list_length( notebook->children ) - 1 );
/* need to set the menubar of the child */
wxMDIChildFrame *active_child_frame = GetActiveChild();
if (active_child_frame != NULL)
{
wxMenuBar *menu_bar = active_child_frame->m_menuBar;
if (menu_bar)
{
menu_bar->m_width = m_width;
menu_bar->m_height = wxMENU_HEIGHT;
gtk_pizza_set_size( GTK_PIZZA(m_mainWidget),
menu_bar->m_widget,
0, 0, m_width, wxMENU_HEIGHT );
menu_bar->Attach(active_child_frame);
}
}
m_justInserted = false;
return;
}
wxFrame::OnInternalIdle();
wxMDIChildFrame *active_child_frame = GetActiveChild();
bool visible_child_menu = false;
wxWindowList::compatibility_iterator node = m_clientWindow->GetChildren().GetFirst();
while (node)
{
wxMDIChildFrame *child_frame = wxDynamicCast( node->GetData(), wxMDIChildFrame );
if ( child_frame )
{
wxMenuBar *menu_bar = child_frame->m_menuBar;
if ( menu_bar )
{
if (child_frame == active_child_frame)
{
if (menu_bar->Show(true))
{
menu_bar->m_width = m_width;
menu_bar->m_height = wxMENU_HEIGHT;
gtk_pizza_set_size( GTK_PIZZA(m_mainWidget),
menu_bar->m_widget,
0, 0, m_width, wxMENU_HEIGHT );
// Attach() asserts if we call it for an already
// attached menu bar so don't do it if we're already
// associated with this frame (it would be nice to get
// rid of this check and ensure that this doesn't
// happen...)
if ( menu_bar->GetFrame() != child_frame )
menu_bar->Attach( child_frame );
}
visible_child_menu = true;
}
else
{
if (menu_bar->Show(false))
{
menu_bar->Detach();
}
}
}
}
node = node->GetNext();
}
/* show/hide parent menu bar as required */
if ((m_frameMenuBar) &&
(m_frameMenuBar->IsShown() == visible_child_menu))
{
if (visible_child_menu)
{
m_frameMenuBar->Show( false );
m_frameMenuBar->Detach();
}
else
{
m_frameMenuBar->Show( true );
m_frameMenuBar->Attach( this );
m_frameMenuBar->m_width = m_width;
m_frameMenuBar->m_height = wxMENU_HEIGHT;
gtk_pizza_set_size( GTK_PIZZA(m_mainWidget),
m_frameMenuBar->m_widget,
0, 0, m_width, wxMENU_HEIGHT );
}
}
}
wxMDIChildFrame *wxMDIParentFrame::GetActiveChild() const
{
if (!m_clientWindow) return NULL;
GtkNotebook *notebook = GTK_NOTEBOOK(m_clientWindow->m_widget);
if (!notebook) return NULL;
gint i = gtk_notebook_get_current_page( notebook );
if (i < 0) return NULL;
GtkNotebookPage* page = (GtkNotebookPage*) (g_list_nth(notebook->children,i)->data);
if (!page) return NULL;
wxWindowList::compatibility_iterator node = m_clientWindow->GetChildren().GetFirst();
while (node)
{
wxMDIChildFrame *child_frame = wxDynamicCast( node->GetData(), wxMDIChildFrame );
wxASSERT_MSG( child_frame, wxT("child is not a wxMDIChildFrame") );
if (child_frame->m_page == page)
return child_frame;
node = node->GetNext();
}
return NULL;
}
void wxMDIParentFrame::ActivateNext()
{
if (m_clientWindow)
gtk_notebook_next_page( GTK_NOTEBOOK(m_clientWindow->m_widget) );
}
void wxMDIParentFrame::ActivatePrevious()
{
if (m_clientWindow)
gtk_notebook_prev_page( GTK_NOTEBOOK(m_clientWindow->m_widget) );
}
//-----------------------------------------------------------------------------
// wxMDIChildFrame
//-----------------------------------------------------------------------------
wxIMPLEMENT_DYNAMIC_CLASS(wxMDIChildFrame, wxFrame);
wxBEGIN_EVENT_TABLE(wxMDIChildFrame, wxFrame)
EVT_ACTIVATE(wxMDIChildFrame::OnActivate)
EVT_MENU_HIGHLIGHT_ALL(wxMDIChildFrame::OnMenuHighlight)
wxEND_EVENT_TABLE()
void wxMDIChildFrame::Init()
{
m_menuBar = NULL;
m_page = NULL;
}
bool wxMDIChildFrame::Create( wxMDIParentFrame *parent,
wxWindowID id, const wxString& title,
const wxPoint& WXUNUSED(pos), const wxSize& size,
long style, const wxString& name )
{
m_title = title;
return wxWindow::Create( parent->GetClientWindow(), id, wxDefaultPosition, size, style, name );
}
wxMDIChildFrame::~wxMDIChildFrame()
{
delete m_menuBar;
}
void wxMDIChildFrame::SetMenuBar( wxMenuBar *menu_bar )
{
wxASSERT_MSG( m_menuBar == NULL, wxT("Only one menubar allowed") );
m_menuBar = menu_bar;
if (m_menuBar)
{
wxMDIParentFrame *mdi_frame = (wxMDIParentFrame*)m_parent->GetParent();
m_menuBar->SetParent( mdi_frame );
/* insert the invisible menu bar into the _parent_ mdi frame */
gtk_pizza_put( GTK_PIZZA(mdi_frame->m_mainWidget),
m_menuBar->m_widget,
0, 0, mdi_frame->m_width, wxMENU_HEIGHT );
}
}
wxMenuBar *wxMDIChildFrame::GetMenuBar() const
{
return m_menuBar;
}
GtkNotebook *wxMDIChildFrame::GTKGetNotebook() const
{
wxMDIClientWindow * const
client = wxStaticCast(GetParent(), wxMDIClientWindow);
wxCHECK( client, NULL );
return GTK_NOTEBOOK(client->m_widget);
}
void wxMDIChildFrame::Activate()
{
GtkNotebook * const notebook = GTKGetNotebook();
wxCHECK_RET( notebook, "no parent notebook?" );
gint pageno = gtk_notebook_page_num( notebook, m_widget );
gtk_notebook_set_page( notebook, pageno );
}
void wxMDIChildFrame::OnActivate( wxActivateEvent& WXUNUSED(event) )
{
}
void wxMDIChildFrame::OnMenuHighlight( wxMenuEvent& event )
{
#if wxUSE_STATUSBAR
wxMDIParentFrame *mdi_frame = (wxMDIParentFrame*)m_parent->GetParent();
if ( !ShowMenuHelp(event.GetMenuId()) )
{
// we don't have any help text for this item, but may be the MDI frame
// does?
mdi_frame->OnMenuHighlight(event);
}
#endif // wxUSE_STATUSBAR
}
void wxMDIChildFrame::SetTitle( const wxString &title )
{
if ( title == m_title )
return;
m_title = title;
GtkNotebook * const notebook = GTKGetNotebook();
wxCHECK_RET( notebook, "no parent notebook?" );
gtk_notebook_set_tab_label_text(notebook, m_widget, wxGTK_CONV( title ) );
}
//-----------------------------------------------------------------------------
// "size_allocate"
//-----------------------------------------------------------------------------
extern "C" {
static void gtk_page_size_callback( GtkWidget *WXUNUSED(widget), GtkAllocation* alloc, wxWindow *win )
{
if (g_isIdle) wxapp_install_idle_handler();
if ((win->m_x == alloc->x) &&
(win->m_y == alloc->y) &&
(win->m_width == alloc->width) &&
(win->m_height == alloc->height) &&
(win->m_sizeSet))
{
return;
}
win->SetSize( alloc->x, alloc->y, alloc->width, alloc->height );
}
}
//-----------------------------------------------------------------------------
// InsertChild callback for wxMDIClientWindow
//-----------------------------------------------------------------------------
static void wxInsertChildInMDI( wxMDIClientWindow* parent, wxMDIChildFrame* child )
{
wxString s = child->GetTitle();
if ( s.empty() ) s = _("MDI child");
GtkWidget *label_widget = gtk_label_new( s.mbc_str() );
gtk_misc_set_alignment( GTK_MISC(label_widget), 0.0, 0.5 );
gtk_signal_connect( GTK_OBJECT(child->m_widget), "size_allocate",
GTK_SIGNAL_FUNC(gtk_page_size_callback), (gpointer)child );
GtkNotebook *notebook = GTK_NOTEBOOK(parent->m_widget);
gtk_notebook_append_page( notebook, child->m_widget, label_widget );
child->m_page = (GtkNotebookPage*) (g_list_last(notebook->children)->data);
wxMDIParentFrame *parent_frame = (wxMDIParentFrame*) parent->GetParent();
parent_frame->m_justInserted = true;
}
//-----------------------------------------------------------------------------
// wxMDIClientWindow
//-----------------------------------------------------------------------------
wxIMPLEMENT_DYNAMIC_CLASS(wxMDIClientWindow, wxWindow);
bool wxMDIClientWindow::CreateClient( wxMDIParentFrame *parent, long style )
{
m_needParent = true;
m_insertCallback = (wxInsertChildFunction)wxInsertChildInMDI;
if (!PreCreation( parent, wxDefaultPosition, wxDefaultSize ) ||
!CreateBase( parent, wxID_ANY, wxDefaultPosition, wxDefaultSize, style, wxDefaultValidator, wxT("wxMDIClientWindow") ))
{
wxFAIL_MSG( wxT("wxMDIClientWindow creation failed") );
return false;
}
m_widget = gtk_notebook_new();
gtk_signal_connect( GTK_OBJECT(m_widget), "switch_page",
GTK_SIGNAL_FUNC(gtk_mdi_page_change_callback), (gpointer)parent );
gtk_notebook_set_scrollable( GTK_NOTEBOOK(m_widget), 1 );
m_parent->DoAddChild( this );
PostCreation();
Show( true );
return true;
}
#endif
|
package io.opensphere.controlpanels.layers;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.function.Predicate;
import javax.swing.JComboBox;
import javax.swing.JMenuItem;
import javax.swing.JTree;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.tree.TreeNode;
import javax.swing.tree.TreePath;
import io.opensphere.controlpanels.layers.activedata.controller.AvailableDataDataLayerController;
import io.opensphere.controlpanels.layers.activedata.tree.TreeTransferHandler;
import io.opensphere.controlpanels.layers.availabledata.AvailableDataTreeTableTreeCellRenderer;
import io.opensphere.controlpanels.layers.base.AbstractDiscoveryDataPanel;
import io.opensphere.controlpanels.layers.base.DataTreeButtonProvisioner;
import io.opensphere.controlpanels.layers.base.DiscoveryTreeExpansionHelper;
import io.opensphere.controlpanels.layers.base.LayerUtilities;
import io.opensphere.core.Toolbox;
import io.opensphere.core.util.collections.CollectionUtilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.EqualsHelper;
import io.opensphere.core.util.swing.JTreeUtilities;
import io.opensphere.core.util.swing.tree.ListCheckBoxTree;
import io.opensphere.core.util.swing.tree.TreeTableTreeCellRenderer;
import io.opensphere.core.util.swing.tree.TreeTableTreeNode;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.event.DataTypeVisibilityChangeEvent;
import io.opensphere.mantle.data.impl.GroupByNodeUserObject;
/**
* The Class SearchAvailableLayersPanel. This available layers tree has no check
* boxes. It acts as a search only tree with single selection.
*/
@SuppressWarnings("PMD.GodClass")
public class SearchAvailableLayersPanel extends AbstractDiscoveryDataPanel implements ActionListener
{
/** Serial. */
private static final long serialVersionUID = 1L;
/** The data layer controller. */
private final transient AvailableDataDataLayerController myDataLayerController;
/**
* Predicate used to determine which data type to return within a selected
* data group.
*/
private final transient Predicate<? super DataTypeInfo> myDataTypePredicate;
/** The Expansion helper. */
private final transient DiscoveryTreeExpansionHelper myExpansionHelper;
/** The Selected data group info. */
private transient volatile Collection<? extends DataGroupInfo> mySelectedDataGroupInfos = Collections.emptySet();
/** The Selected data type info. */
private transient volatile Collection<? extends DataTypeInfo> mySelectedDataTypeInfos = Collections.emptySet();
/** The tree table tree cell renderer. */
private final transient AvailableDataTreeTableTreeCellRenderer myTreeTableTreeCellRenderer;
/**
* Instantiates a new search available layers panel.
*
* @param tb the toolbox
* @param controller the controller
* @param selectionMode The selection mode for the tree.
* @param dataTypePredicate Predicate used to determine which data type to
* return within a selected data group.
*/
public SearchAvailableLayersPanel(Toolbox tb, AvailableDataDataLayerController controller, int selectionMode,
Predicate<? super DataTypeInfo> dataTypePredicate)
{
super(tb, false, false, selectionMode);
myDataTypePredicate = dataTypePredicate;
if (controller == null)
{
myDataLayerController = new AvailableDataDataLayerController(tb, this);
}
else
{
myDataLayerController = controller;
}
myDataLayerController.addListener(this);
myTreeTableTreeCellRenderer = new AvailableDataTreeTableTreeCellRenderer(getToolbox());
myExpansionHelper = new DiscoveryTreeExpansionHelper(getToolbox().getPreferencesRegistry().getPreferences(getClass()),
DiscoveryTreeExpansionHelper.Mode.STORE_EXPANSIONS);
myExpansionHelper.loadFromPreferences();
if (getLowerPanel() != null)
{
add(getLowerPanel(), BorderLayout.SOUTH);
}
}
@Override
public void actionPerformed(ActionEvent e)
{
}
@Override
public void dataGroupsChanged()
{
rebuildTree();
}
@Override
public void dataGroupVisibilityChanged(DataTypeVisibilityChangeEvent event)
{
}
/**
* Gets the selected data group. This method is thread-safe.
*
* @return the selected data group
*/
public Collection<? extends DataGroupInfo> getSelectedDataGroups()
{
return mySelectedDataGroupInfos;
}
/**
* Gets the selected data type. This method is thread-safe.
*
* @return the selected data type
*/
public Collection<? extends DataTypeInfo> getSelectedDataTypes()
{
return mySelectedDataTypeInfos;
}
@Override
public void initGuiElements()
{
super.initGuiElements();
getTree().setPaintHoverRow(false);
}
@Override
public void refreshTreeLabelRequest()
{
}
/**
* Selects all leaf nodes.
*/
public void selectAll()
{
setSelectionPaths(new Predicate<TreeNode>()
{
@Override
public boolean test(TreeNode node)
{
return node.isLeaf();
}
});
}
/**
* Sets the selected tree paths to the groups that pass the filter.
*
* @param dataGroupPredicate The filter.
*/
public void setSelectedDataGroups(Predicate<DataGroupInfo> dataGroupPredicate)
{
Predicate<TreeNode> nodeHasKey = new Predicate<TreeNode>()
{
@Override
public boolean test(TreeNode node)
{
if (node.isLeaf() && node instanceof TreeTableTreeNode)
{
DataGroupInfo dataGroup = LayerUtilities.getDataGroup((TreeTableTreeNode)node);
return dataGroupPredicate.test(dataGroup);
}
else
{
return false;
}
}
};
setSelectionPaths(nodeHasKey);
}
/**
* Sets the selected tree paths to the groups that have a data type with one
* of the given type keys.
*
* @param typeKeys the type keys
*/
public void setSelectedTypeKeys(final Collection<String> typeKeys)
{
if (!typeKeys.isEmpty())
{
setSelectedDataGroups(dataGroup -> !dataGroup
.findMembers((Predicate<DataTypeInfo>)dataType -> typeKeys.contains(dataType.getTypeKey()), false, true)
.isEmpty());
}
}
@Override
public void valueChanged(TreeSelectionEvent e)
{
Collection<DataGroupInfo> selectedDataGroupInfos = New.collection();
Collection<DataTypeInfo> selectedDataTypeInfos = New.collection();
JTree layerTree = getTree();
TreePath[] treePaths = layerTree.getSelectionPaths();
if (treePaths != null)
{
for (TreePath treePath : treePaths)
{
TreeTableTreeNode treeNode = (TreeTableTreeNode)treePath.getLastPathComponent();
GroupByNodeUserObject groupByNode = (GroupByNodeUserObject)treeNode.getPayload().getPayloadData();
if (groupByNode != null && groupByNode.getDataGroupInfo() != null)
{
DataGroupInfo dgi = groupByNode.getDataGroupInfo();
selectedDataGroupInfos.add(dgi);
Collection<DataTypeInfo> dtis = dgi.getMembers(false);
if (!dtis.isEmpty())
{
for (DataTypeInfo dti : dtis)
{
if (myDataTypePredicate == null || myDataTypePredicate.test(dti))
{
selectedDataTypeInfos.add(dti);
}
}
}
}
}
}
mySelectedDataGroupInfos = New.unmodifiableCollection(selectedDataGroupInfos);
mySelectedDataTypeInfos = New.unmodifiableCollection(selectedDataTypeInfos);
}
@Override
protected void checkBoxActionPerformed(ActionEvent e)
{
}
@Override
protected String getActionContextId()
{
return null;
}
@Override
protected AvailableDataDataLayerController getController()
{
return myDataLayerController;
}
@Override
protected String getDataPanelTitle()
{
return null;
}
@Override
protected DiscoveryTreeExpansionHelper getExpansionHelper()
{
return myExpansionHelper;
}
@Override
protected JMenuItem getShowLayerDetailsMenuItem(DataGroupInfo dgi)
{
return null;
}
@Override
protected DataTreeButtonProvisioner getTreeButtonProvisioner()
{
return null;
}
@Override
protected TreeTableTreeCellRenderer getTreeTableTreeCellRenderer()
{
return myTreeTableTreeCellRenderer;
}
@Override
protected TreeTransferHandler getTreeTransferHandler()
{
return null;
}
@Override
protected Component getViewByComboBox()
{
final JComboBox<String> viewBy = new JComboBox<>(myDataLayerController.getViewTypes());
Dimension dim = new Dimension(200, 24);
viewBy.setPreferredSize(dim);
viewBy.setMinimumSize(dim);
int selectedIndex = 0;
String viewByString = getController().getViewByTypeString().toLowerCase();
for (int i = 0; i < viewBy.getItemCount(); i++)
{
if (viewByString.equals(viewBy.getItemAt(i).toLowerCase()))
{
selectedIndex = i;
break;
}
}
viewBy.setSelectedIndex(selectedIndex);
viewBy.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
getExpandContractTreeButton().setSelected(false);
getController().setViewByTypeFromString(viewBy.getSelectedItem().toString());
}
});
return viewBy;
}
@Override
protected void rebuildTreeComplete(List<TreeTableTreeNode> lastSelectedNodes)
{
if (CollectionUtilities.hasContent(lastSelectedNodes))
{
for (TreeTableTreeNode node : lastSelectedNodes)
{
GroupByNodeUserObject uo = null;
if (node.getPayloadData() instanceof GroupByNodeUserObject)
{
uo = (GroupByNodeUserObject)node.getPayloadData();
}
final GroupByNodeUserObject fUO = uo;
focusOnNode(new Predicate<GroupByNodeUserObject>()
{
@Override
public boolean test(GroupByNodeUserObject value)
{
String dgiId = null;
String dtiId = null;
if (fUO != null)
{
dgiId = fUO.getDataGroupInfo() == null ? null : fUO.getDataGroupInfo().getId();
dtiId = fUO.getDataTypeInfo() == null ? null : fUO.getDataTypeInfo().getTypeKey();
}
String vDgiId = value.getDataGroupInfo() == null ? null : value.getDataGroupInfo().getId();
String vDtiId = value.getDataTypeInfo() == null ? null : value.getDataTypeInfo().getTypeKey();
return EqualsHelper.equals(dgiId, vDgiId, dtiId, vDtiId);
}
}, null);
}
}
}
/**
* Sets the selection paths to the nodes that pass the filter.
*
* @param filter the filter
*/
private void setSelectionPaths(Predicate<? super TreeNode> filter)
{
List<TreePath> selectedPaths = ListCheckBoxTree
.mapTreePathsFromNodes(JTreeUtilities.flatten((TreeNode)getTree().getModel().getRoot(), filter));
getTree().setSelectionPaths(selectedPaths.toArray(new TreePath[selectedPaths.size()]));
}
}
|
#!/bin/bash
sudo yum update -y
sudo yum install docker -y
sudo systemctl enable docker
sudo systemctl start docker
sudo hostnamectl set-hostname ${NAME}
|
#!/bin/sh
set -e
bundler install
bundler exec jekyll build
ruby sitemap.rb
rm -rf htmltest
mkdir -p htmltest
# our baseUrl is /docs so we need to create that structure for htmltest
# otherwise it would fail to find absolute links like /docs/3.5
cp -a _site htmltest/docs
# do not deploy file generated by jekyll-redirect-from v0.13.0+ plugin
rm -f htmltest/docs/redirects.json
htmltest -s
(cd htmltest && tar cvzf docs.tar.gz docs)
|
<gh_stars>10-100
/*
Copyright © 2021 The OpenEBS Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package csipv
import (
"context"
"fmt"
"sync"
"time"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
corev1 "k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
"k8s.io/apimachinery/pkg/util/wait"
coreinformers "k8s.io/client-go/informers/core/v1"
clientset "k8s.io/client-go/kubernetes"
corelisters "k8s.io/client-go/listers/core/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/workqueue"
"k8s.io/klog/v2"
)
const (
VolumeAnnotation = "csi-volume-name"
LeakProtectionFinalizer = "csi-leak-protection"
)
// LeakProtectionController gracefully cleans up any orphan volume created
// by csi plugin before external provisioner creates pv for given pvc.
// See https://github.com/kubernetes-csi/external-provisioner/issues/486 for
// more details.
// Note: As a storage vendor, you should be able to lookup your volumes
// uniquely based on csi CreateVolume request name parameter.
type LeakProtectionController struct {
driverName string
onPVCDelete func(pvc *corev1.PersistentVolumeClaim, createVolumeName string) error
client clientset.Interface
pvcLister corelisters.PersistentVolumeClaimLister
pvcListerSynced cache.InformerSynced
queue workqueue.RateLimitingInterface
// track set of pending volumes creation (stores pvc namespaced name string).
// It is used in synchronizing BeginCreateVolume (invoked by csi.CreateVolume)
// and onPVCDelete which deletes the created volume if any. Since CSI spec
// doesn't expect create and delete volume rpcs per volume to be concurrent safe,
// the controller loop here needs to ensure that it doesn't call onPVCDelete
// method if there is any in-flight create volume rpcs running.
claimsInProgress *syncSet
}
func NewLeakProtectionController(
client clientset.Interface,
pvcInformer coreinformers.PersistentVolumeClaimInformer,
driverName string,
onPVCDelete func(pvc *corev1.PersistentVolumeClaim, createVolumeName string) error,
) (*LeakProtectionController, error) {
if driverName == "" {
return nil, fmt.Errorf("empty csi driver name")
}
if onPVCDelete == nil {
return nil, fmt.Errorf("invalid pvc onDelete callback")
}
c := &LeakProtectionController{
driverName: driverName,
onPVCDelete: onPVCDelete,
client: client,
pvcLister: pvcInformer.Lister(),
pvcListerSynced: pvcInformer.Informer().HasSynced,
queue: workqueue.NewNamedRateLimitingQueue(
workqueue.DefaultControllerRateLimiter(), "leak-protection"),
claimsInProgress: newSyncSet(),
}
pvcInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{
AddFunc: c.onAddUpdate,
UpdateFunc: func(old, new interface{}) {
c.onAddUpdate(new)
},
})
return c, nil
}
// onAddUpdate reacts to pvc added/updated events
func (c *LeakProtectionController) onAddUpdate(obj interface{}) {
pvc, ok := obj.(*corev1.PersistentVolumeClaim)
if !ok {
utilruntime.HandleError(fmt.Errorf("pvc informer returned non-pvc object: %#v", obj))
return
}
key, err := cache.MetaNamespaceKeyFunc(pvc)
if err != nil {
utilruntime.HandleError(fmt.Errorf("couldn't get key for persistent volume claim %#v: %v", pvc, err))
return
}
klog.V(4).InfoS("received informer event on pvc", "key", key)
c.queue.Add(key)
}
// Run runs the controller goroutines.
func (c *LeakProtectionController) Run(workers int, stopCh <-chan struct{}) {
defer utilruntime.HandleCrash()
defer c.queue.ShutDown()
klog.InfoS("starting up csi pvc controller")
defer klog.InfoS("shutting down csi pvc provisioning controller")
if !cache.WaitForNamedCacheSync("CSI Provisioner", stopCh, c.pvcListerSynced) {
return
}
for i := 0; i < workers; i++ {
go wait.Until(c.runWorker, time.Second, stopCh)
}
<-stopCh
}
func (c *LeakProtectionController) runWorker() {
for c.processNextWorkItem() {
}
}
// processNextWorkItem deals with one pvcKey off the queue. It returns false when it's time to quit.
func (c *LeakProtectionController) processNextWorkItem() bool {
pvcKey, quit := c.queue.Get()
if quit {
return false
}
defer c.queue.Done(pvcKey)
pvcNamespace, pvcName, err := cache.SplitMetaNamespaceKey(pvcKey.(string))
if err != nil {
utilruntime.HandleError(fmt.Errorf("error parsing pvc key %q: %v", pvcKey, err))
return true
}
err = c.processPVC(pvcNamespace, pvcName)
if err == nil {
c.queue.Forget(pvcKey)
return true
}
utilruntime.HandleError(fmt.Errorf("failed to process pvc %v: %v", pvcKey, err))
c.queue.AddRateLimited(pvcKey)
return true
}
func (c *LeakProtectionController) processPVC(pvcNamespace, pvcName string) error {
pvc, err := c.pvcLister.PersistentVolumeClaims(pvcNamespace).Get(pvcName)
if apierrors.IsNotFound(err) {
klog.V(4).InfoS("pvc not found, ignoring...", "pvc", klog.KRef(pvcNamespace, pvcName))
return nil
}
// if relevant finalizer doesn't exists, skip processing that pvc.
if !c.finalizerExists(pvc) {
return nil
}
klog.InfoS("leak controller processing pvc", "pvc", klog.KRef(pvcNamespace, pvcName))
// if pvc gets bound to a persistent volume, we can safely remove the finalizer
// since csi external-provisioner guarantees to call csi spec DeleteVolume method.
if pvc.Status.Phase == corev1.ClaimBound {
return c.removeFinalizer(pvc)
}
// process pvc in case it's marked for deletion.
if pvc.GetDeletionTimestamp() != nil {
volumeName, exists := pvc.GetAnnotations()[c.GetAnnotationKey()]
if !exists {
return fmt.Errorf("failed to find volume name used by csi create volume request")
}
if err := func() error {
if alreadyExists := c.claimsInProgress.Add(c.claimsInProgressKey(pvc)); alreadyExists {
return fmt.Errorf("csi driver already has volume creation in progress, will retry after sometime")
}
defer c.claimsInProgress.Remove(c.claimsInProgressKey(pvc))
return c.onPVCDelete(pvc, volumeName)
}(); err != nil {
return fmt.Errorf("failed to finalize pvc deletion: %v", err)
}
klog.InfoS("deleted volume via csi driver if exists", "volume", volumeName,
"driver", c.driverName, "pvc", klog.KRef(pvcNamespace, pvcName))
return c.removeFinalizer(pvc)
}
return nil
}
func (c *LeakProtectionController) claimsInProgressKey(pvc *corev1.PersistentVolumeClaim) string {
return pvc.Namespace + "/" + pvc.Name
}
func (c *LeakProtectionController) finalizerExists(pvc *corev1.PersistentVolumeClaim) bool {
finalizers := pvc.GetFinalizers()
for _, finalizer := range finalizers {
if finalizer == c.GetFinalizer() {
return true
}
}
return false
}
func (c *LeakProtectionController) addFinalizer(pvc *corev1.PersistentVolumeClaim, volumeName string) error {
finalizer := c.GetFinalizer()
if c.finalizerExists(pvc) {
klog.V(4).InfoS("finalizer already exists, ignoring...",
"finalizer", finalizer, "pvc", klog.KObj(pvc))
return nil
}
claimClone := pvc.DeepCopy()
claimClone.ObjectMeta.Annotations[c.GetAnnotationKey()] = volumeName
claimClone.ObjectMeta.Finalizers = append(claimClone.ObjectMeta.Finalizers, finalizer)
_, err := c.client.CoreV1().PersistentVolumeClaims(claimClone.Namespace).Update(context.TODO(), claimClone, metav1.UpdateOptions{})
if err != nil {
klog.ErrorS(err, "failed to add finalizer to pvc", "pvc", klog.KObj(pvc))
return err
}
klog.V(3).InfoS("added finalizer to pvc",
"finalizer", finalizer, "pvc", klog.KObj(pvc))
return nil
}
func (c *LeakProtectionController) removeFinalizer(pvc *corev1.PersistentVolumeClaim) error {
finalizer := c.GetFinalizer()
claimClone := pvc.DeepCopy()
// remove the annotation added previously.
delete(claimClone.ObjectMeta.Annotations, c.GetAnnotationKey())
currFinalizerList := claimClone.ObjectMeta.Finalizers
newFinalizerList := make([]string, 0, len(currFinalizerList))
for _, v := range currFinalizerList {
if v == finalizer {
continue
}
newFinalizerList = append(newFinalizerList, v)
}
claimClone.ObjectMeta.Finalizers = newFinalizerList
_, err := c.client.CoreV1().PersistentVolumeClaims(claimClone.Namespace).Update(context.TODO(), claimClone, metav1.UpdateOptions{})
if err != nil {
klog.ErrorS(err, "failed to remove finalizer from PVC",
"finalizer", finalizer,
"pvc", klog.KObj(pvc))
return err
}
klog.V(3).InfoS("removed finalizer from PVC",
"finalizer", finalizer, "pvc", klog.KObj(pvc))
return nil
}
// BeginCreateVolume add relevant finalizer to the given pvc to avoid potential
// csi volume leak. It must be called from the create volume csi method
// implementation just before actual volume provisioning.
// volumeName param should be same as csi.CreateVolumeRequest Name parameter.
// In case of error, the csi driver should return non-retryable grpc error codes
// to external provisioner.
// Returned finishCreateVolume function must be called (preferably under defer)
// after attempting to provision volume.
// e.g
// {
// finishCreateVolume, err := c.BeginCreateVolume("volumeId", "namespace", "name")
// if err != nil {
// return nil, status.Errorf(codes.FailedPrecondition, err.Error())
// }
// defer finishCreateVolume()
// ..... start provisioning volume here .....
// }
func (c *LeakProtectionController) BeginCreateVolume(volumeName,
pvcNamespace, pvcName string) (func(), error) {
pvc, err := c.client.CoreV1().PersistentVolumeClaims(pvcNamespace).
Get(context.TODO(), pvcName, metav1.GetOptions{})
if err != nil {
klog.ErrorS(err, "failed to fetch pvc", "pvc", klog.KRef(pvcNamespace, pvcName))
return nil, status.Errorf(codes.FailedPrecondition, "failed to fetch pvc: %v", err)
} else if pvc.GetDeletionTimestamp() != nil {
// if pvc is already marked for deletion, return err.
err = fmt.Errorf("pvc already marked for deletion")
klog.ErrorS(err, "", "pvc", klog.KRef(pvcNamespace, pvcName))
return nil, status.Errorf(codes.FailedPrecondition, err.Error())
}
key := c.claimsInProgressKey(pvc)
finishCreateVolume := func() {
c.claimsInProgress.Remove(key)
}
alreadyExists := c.claimsInProgress.Add(key)
if alreadyExists {
return nil, status.Errorf(codes.Aborted,
"csi driver already has volume creation in progress")
}
if err = c.addFinalizer(pvc, volumeName); err != nil {
finishCreateVolume() // make sure we clean up on error.
return nil, err
}
return finishCreateVolume, nil
}
func (c *LeakProtectionController) GetFinalizer() string {
return c.driverName + "/" + LeakProtectionFinalizer
}
func (c *LeakProtectionController) GetAnnotationKey() string {
return c.driverName + "/" + VolumeAnnotation
}
// syncSet is synchronised set of strings
type syncSet struct {
sync.Mutex
m map[string]struct{}
}
func newSyncSet() *syncSet {
return &syncSet{
m: make(map[string]struct{}),
}
}
func (s *syncSet) Add(k string) bool {
s.Lock()
_, ok := s.m[k]
s.m[k] = struct{}{}
s.Unlock()
return ok
}
func (s *syncSet) Remove(k string) {
s.Lock()
delete(s.m, k)
s.Unlock()
}
|
zip -r deploy.zip \
css \
img \
webfonts \
\
common.js \
config.js \
elm.min.js \
index.js \
live_config.js \
twitch-ext.min.js \
viewer.js \
\
config.html \
live_config.html \
panel.html
|
<gh_stars>0
import React from 'react'
import PropTypes from 'prop-types'
import { makeStyles } from '@material-ui/core/styles'
import { Paper } from '@material-ui/core'
import TextProperty from './TextProperty'
import HostilityProperty from './HostilityProperty'
const useStyles = makeStyles(theme => ({
paper: {
userSelect: 'none',
padding: theme.spacing(4),
height: 'auto',
pointerEvents: 'auto',
gridArea: 'R',
display: 'grid',
gridGap: '0.5em',
gridTemplateColumns: 'auto auto',
gridAutoRows: 'min-content'
},
twoColumns: { gridColumn: '1 / span 2' }
}))
const PointProperties = props => {
const classes = useStyles()
return (
<Paper
className={ classes.paper }
elevation={ 4 }
>
<TextProperty label='Name' property='name' properties={props.properties} onCommit={props.update} className={classes.twoColumns}/>
<TextProperty label={'Unique Designation'} property={'t'} properties={props.properties} onCommit={props.update} className={ classes.twoColumns } />
<HostilityProperty properties={props.properties} onCommit={props.update} className={classes.twoColumns}/>
<TextProperty label='Date-Time Group' property='w' className={classes.twoColumns} properties={props.properties} onCommit={props.update}/>
<TextProperty label='Staff Comments' property='g' className={classes.twoColumns} properties={props.properties} onCommit={props.update}/>
<TextProperty label={'Additional Information'} property={'h'} properties={props.properties} onCommit={props.update} className={ classes.twoColumns }/>
<TextProperty label={'Altitude/Depth'} property={'x'} properties={props.properties} onCommit={props.update} className={ classes.twoColumns }/>
{/* TODO: ENY property */}
</Paper>
)
}
PointProperties.propTypes = {
properties: PropTypes.object.isRequired,
update: PropTypes.func.isRequired
}
export default PointProperties
|
<reponame>neuroelf/dermodelphi
import React, { Component } from 'react';
import { BLOCKS_ADDCAT, NEWCATEGORY_ACAT_NEW } from '../Constants'
export default class DelphiNewCategoryASelect extends Component {
constructor(props) {
super(props);
this.state = { };
this.handleChange = this.handleChange.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleChange(event) {
const { newCategory } = { ...this.props.AppObj.state};
const newNewCategory = Object.assign({}, newCategory);
var newACategory = parseInt(event.target.value);
newNewCategory.acat = newACategory;
this.props.AppObj.setState({ newCategory: newNewCategory });
}
handleSubmit(event) {
event.preventDefault();
}
render() {
const { newCategory } = { ...this.props.AppObj.state };
return (
<select value={newCategory.acat.toString()} onChange={this.handleChange}>
{Object.keys(global.DM_LEVELANAMES)
.map(ANodeId =>
<option value={ANodeId} key={ANodeId}>{global.DM_LEVELANAMES[ANodeId]}</option>)}
<option value={BLOCKS_ADDCAT} key={BLOCKS_ADDCAT}>{NEWCATEGORY_ACAT_NEW}</option>
</select>
);
}
}
|
import * as React from 'react';
import styles from './FunctionBasedStyled.module.scss';
import { IFunctionBasedStyledProps } from './IFunctionBasedStyledProps';
import { Checkbox, PrimaryButton, Toggle, IToggleStyleProps, IToggleStyles, ICheckboxStyleProps, IButtonStyles, ICheckboxStyles } from "office-ui-fabric-react";
export default class FunctionBasedStyled extends React.Component<IFunctionBasedStyledProps, {}> {
public render(): React.ReactElement<IFunctionBasedStyledProps> {
const buttonStyles = (): IButtonStyles => {
const btnStyles: IButtonStyles = {
root: { backgroundColor: "black"},
rootHovered: { backgroundColor: "green"},
};
return btnStyles;
};
const checkboxStyles = (props: ICheckboxStyleProps): ICheckboxStyles => {
const chkStyles: ICheckboxStyles = {
text: [
{color: "purple"},
props.checked && {
color: "blue"
}
]
};
return chkStyles;
};
const toggleStyles = (props: IToggleStyleProps): IToggleStyles => {
const tglStyles: IToggleStyles = {
container: {},
text: {},
root: {},
label: {},
pill: [
{backgroundColor: "violet" },
props.checked && {backgroundColor: "green" }
],
thumb: {}
};
return tglStyles;
};
return (
<div>
<div className={styles.componentSection}>
<PrimaryButton styles={buttonStyles()} text="Primary Button" />
</div>
<div className={styles.componentSection}>
<Checkbox styles={checkboxStyles} label="Checkbox" />
</div>
<div className={styles.componentSection}>
<Toggle styles={toggleStyles} label="Toggle" />
</div>
<div className={styles.componentSection}>
<Toggle styles={toggleStyles} label="Toggle" checked disabled/>
</div>
</div>
);
}
}
|
<reponame>Arbolista/spike
/*global module require*/
var webpack_config = require('./client/config/test/webpack');
module.exports = function (config) {
config.set({
browsers: ['PhantomJS'],
files: [
'test.client.js'
],
basePath: './',
frameworks: [
'jasmine'
],
preprocessors: {
'test.client.js': ['webpack', 'sourcemap']
},
reporters: ['dots','progress'],
webpack: webpack_config,
webpackMiddleware: {
// info is too chatty - it obscures test information
noInfo: true
}
});
};
|
<filename>WebExtension/config.js
'use strict';
var config = {};
config.map = {
number: [
'period', 'resetPeriod', 'initialPeriod', 'notificationTime', 'notificationTruncate',
'notification.sound.media.default.type', 'notification.sound.media.custom0.type',
'notification.sound.media.custom1.type', 'notification.sound.media.custom2.type',
'notification.sound.media.custom3.type', 'notification.sound.media.custom4.type',
'notification.sound.media.custom0.selector', 'notification.sound.media.custom1.selector',
'notification.sound.media.custom2.selector', 'notification.sound.media.custom3.selector',
'notification.sound.media.custom4.selector',
'soundVolume', 'silentTime', 'oldFashion', 'size', 'fullWidth', 'fullHeight',
'clrPattern', 'threatAsNew'
],
checkbox: [
'notification', 'alert', 'combined', 'searchMode', 'ignoreOpens',
'relatedToCurrent', 'currentTab', 'background', 'useBlankTabs',
'newWindow', 'keyUp', 'render', 'doReadOnArchive', 'inboxRedirection',
'alphabetic', 'onGmailNotification', 'minimal', 'welcome', 'badge',
'plug-in/labels', 'express', 'basic.html', 'smartOpen',
'notification.buttons.markasread', 'notification.buttons.archive', 'notification.buttons.trash'
]
};
config.prefs = {
'period': 60, // seconds
'initialPeriod': 3, // seconds
'resetPeriod': 0, // minutes
'feeds_0': '',
'feeds_1': '',
'feeds_2': '',
'feeds_3': '',
'feeds_4': '',
'feeds_5': '',
'feeds_custom': '',
'notification': true,
'notificationTime': 8, // seconds
'notificationFormat': chrome.i18n.getMessage('notification'),
'notificationTruncate': 70,
'alert': true,
'notification.sound.media.default.type': 0,
'notification.sound.media.custom0.type': 0,
'notification.sound.media.custom1.type': 0,
'notification.sound.media.custom2.type': 0,
'notification.sound.media.custom3.type': 0,
'notification.sound.media.custom4.type': 0,
'notification.sound.media.custom0.selector': 0,
'notification.sound.media.custom1.selector': 0,
'notification.sound.media.custom2.selector': 0,
'notification.sound.media.custom3.selector': 0,
'notification.sound.media.custom4.selector': 0,
'notification.sound.media.custom0.filter': '',
'notification.sound.media.custom1.filter': '',
'notification.sound.media.custom2.filter': '',
'notification.sound.media.custom3.filter': '',
'notification.sound.media.custom4.filter': '',
'soundVolume': 80,
'silentTime': 10, // minutes
'combined': navigator.userAgent.indexOf('Firefox') !== -1,
'searchMode': true,
'ignoreOpens': false,
'relatedToCurrent': false,
'currentTab': false,
'background': false,
'useBlankTabs': false,
'newWindow': false,
'oldFashion': 0,
'size': 0,
'fullWidth': 750,
'fullHeight': 600,
'keyUp': false,
'render': true,
'doReadOnArchive': true,
'inboxRedirection': true,
'alphabetic': false,
'clrPattern': 0,
'onGmailNotification': true,
'threatAsNew': 10, // minutes
'minimal': true,
'welcome': true,
'badge': true,
'backgroundColor': '#CD3C30',
'express': false,
'notification.buttons.markasread': true,
'notification.buttons.archive': true,
'notification.buttons.trash': false,
'basic.html': false,
'smartOpen': true,
// plug-ins
'plug-in/labels': true
};
|
<reponame>ndinakar/Phase4-SCSB-Gateway<gh_stars>1-10
package org.recap.model;
import lombok.Getter;
import lombok.Setter;
import java.util.List;
/**
* Created by hemalathas on 1/11/16.
*/
@Getter
@Setter
public class ItemResponseInformation {
private String patronBarcode;
private List<String> itemBarcodes;
private String requestType;
private String deliveryLocation;
private String requestingInstitution;
private String bibliographicId;
private String expirationDate;
private String screenMessage;
private boolean success;
private String emailAddress;
private String titleIdentifier;
}
|
package stereoscope
import (
"github.com/anchore/stereoscope/pkg/image"
)
type config struct {
Registry image.RegistryOptions
AdditionalMetadata []image.AdditionalMetadata
Platform *image.Platform
}
|
<filename>renard-wx/functions/adminUpUser/index.js
// 云函数入口文件
const cloud = require('wx-server-sdk')
cloud.init()
const db = cloud.database()
const _ = db.command
// 云函数入口函数
exports.main = async (event, context) => {
var id = event.id
var userId = event.userId
try {
return await db.collection('jingzhi-user')
.where({
_id: id
})
.update({
data: {
userId: userId
},
})
} catch (e) {
console.error(e)
}
}
|
<filename>v15.4/genEntry.js
"use strict";
const fs = require('fs');
const path = require('path');
const modulePaths = [
'./app/common/js/components',
'./app/common/js/basecomponent'
];
function getDirnames(modulePath) {
const filenames = fs.readdirSync(path.join(__dirname, modulePath), 'utf8');
const dirnames = filenames.filter(filename => path.join(__dirname, modulePath, filename).indexOf('index.js') === -1);
return dirnames;
}
function getData(context, modulePath, dirs, noModule) {
dirs = dirs || [];
let data = '', moduleName = '';
for (let dir of dirs) {
const filepath = path.join(context, modulePath, dir);
if (fs.lstatSync(filepath).isFile()) {
const fileExt = path.extname(dir);
dir = path.basename(dir, fileExt);
moduleName = dir + fileExt;
} else {
moduleName = dir;
}
if (noModule) {
data += `import './${moduleName}';\n`;
} else {
data += `import ${dir} from './${moduleName}';\n`;
}
}
data += `\n\n/** 导入模块总数:${dirs.length} */\n`;
// console.log(data);
return data;
}
function writeDataToEntry(context, modulePath, data) {
const file = path.join(context, modulePath, 'index.js');
fs.writeFileSync(file, data, 'utf8');
}
function gen(modulePaths) {
for (let modulePath of modulePaths) {
const dirnames = getDirnames(modulePath);
const data = getData(__dirname, modulePath, dirnames);
writeDataToEntry(__dirname, modulePath, data);
}
}
exports['writeDataToEntry'] = writeDataToEntry;
exports['getData'] = getData;
exports['gen'] = gen;
|
<reponame>landawn/ProjectEuler
package com.landawn.projecteuler._100;
import static org.junit.Assert.assertEquals;
import java.util.Set;
import org.junit.Test;
import com.landawn.abacus.util.Array;
import com.landawn.abacus.util.stream.IntStream;
import com.landawn.projecteuler.TestBase;
/**
*
* @see <a href="https://projecteuler.net/problem=23">Non-abundant sums</a>
*
*/
public class _023 extends TestBase {
@Override
@Test
public void s_1() {
final int EULER23_MAX = 28123;
final int MAX_SQRT = (int) Math.sqrt(EULER23_MAX);
final int[] sqrts = IntStream.rangeClosed(0, EULER23_MAX).map(i -> (int) Math.sqrt(i)).toArray();
final int[] sums = Array.repeat(1, EULER23_MAX + 1);
for (int i = 2; i < MAX_SQRT; i++) {
for (int j = EULER23_MAX; sqrts[j] >= i; j--) {
if (j % i == 0) {
sums[j] += (j / i == i ? i : (i + j / i));
}
}
}
final int[] abundants = IntStream.rangeClosed(1, EULER23_MAX).filter(i -> sums[i] > i).toArray();
final Set<Integer> abundantSet = IntStream.of(abundants).toSet();
final long result = IntStream.rangeClosed(1, EULER23_MAX) //
.filter(i -> {
for (int j = 0; abundants[j] < i; j++) {
if (abundantSet.contains(i - abundants[j])) {
return false;
}
}
return true;
}) //
.sum();
assertEquals(4179871, result);
// IntStream.rangeClosed(1, EULER23_MAX) //
// .filter(i -> IntStream.of(abundants).noneMatch(j -> abundantSet.contains(i - j))) //
// .max().ifPresent(i -> N.println("Actually, the max euler 23 number is: " + i));
// Actually, the max euler 23 number is: 20161
}
}
|
#! /bin/bash
cd client || exit
go test -v . |
use Blackfire\Player\Guzzle\StepConverter;
use Blackfire\Player\Step\FollowStep;
use GuzzleHttp\Client;
use GuzzleHttp\Psr7\Request;
class CustomStepConverter implements StepConverter
{
public function convertStep($step)
{
if ($step instanceof FollowStep) {
$url = $step->getUrl();
$client = new Client();
$request = new Request('GET', $url);
$response = $client->send($request, ['http_errors' => false]);
// Handle the response as per the testing scenario requirements
// For example, check the response status code or content
// Return the generated PHP code as a string
return $this->generatePhpCode($request, $response);
}
// Handle other types of steps if necessary
return null;
}
private function generatePhpCode(Request $request, $response)
{
// Generate the PHP code based on the $request and $response
// For example, construct the code to send the request and handle the response
$phpCode = "// Generated PHP code to send the request and handle the response\n";
$phpCode .= "\$client = new Client();\n";
$phpCode .= "\$request = new Request('GET', '{$request->getUri()}');\n";
$phpCode .= "\$response = \$client->send(\$request, ['http_errors' => false]);\n";
$phpCode .= "// Handle the response as per the testing scenario requirements\n";
$phpCode .= "// For example, check the response status code or content\n";
return $phpCode;
}
} |
<gh_stars>0
/*
* Telemetry.c
*
* Created on: Dec 18, 2020
* Author: Théo
*/
#include "Telemetry.h"
#include "../Data_Logger/Data_logger.h"
#define MAX_TRANSMIT_SPEED 10000 //Permet de rajouter une limite de vitesse (utile dans une précédente application de ce module)
#define PERIODE_1_OCTET (1000000 / MAX_TRANSMIT_SPEED) //Periode alouée à l'envoit d'un octet
static telemetry_t * telem ; //We dont use multiples telemetry anyways
//Private function to know the number of bytes available
uint16_t get_nb_bytes_available(telemetry_t * telem);
uint16_t get_nb_bytes_available(telemetry_t * telem){
uint16_t nb = 0 ;
if(telem->index_write_rx > telem->index_read_rx){
nb = telem->index_write_rx - telem->index_read_rx ;
}
else if(telem->index_write_rx < telem->index_read_rx){
nb = LEN_RX_BUFFER - telem->index_read_rx + telem->index_write_rx ;
}
return nb;
}
void TELEMETRY_Init(telemetry_t * telem_, UART_HandleTypeDef * huart)
{
telem = telem_ ;
telem->huart = huart ;
//On envoit un premier message qui sert sutout à éviter que les premiers octet utiles soient perdu
static uint8_t hey_msg[] = "Hey it s me Theo...\n";
HAL_UART_Transmit_DMA(telem->huart, hey_msg, 20);
//Activation de l'it "idle" qui préviendra de la fin de la récéption d'un message
__HAL_UART_ENABLE_IT(telem->huart, UART_IT_IDLE);
//On lance la récéption en DMA
HAL_UART_Receive_DMA(telem->huart, telem->buffer_rx, LEN_RX_BUFFER);
}
void TELEMETRY_Process(uint32_t time_us){
// -------------------- Transmission ------------------------
//Ce qui suit vide le buffer en transmettant on contenu en respectant la limite
//de vitesse maximum
if(telem->index_write_tx != telem->index_read_tx && telem->transmission_state == TRANSMISSION_IDLE)
{
telem->transmission_state = TRANSMISSION_IN_PROGRESS;
//On transmet les données
HAL_UART_Transmit_DMA(telem->huart, &telem->buffer_tx[SIZE_PORTION_BUFFER*telem->index_read_tx], telem->len_portion_tx[telem->index_read_tx]);
//Incrémentation de l'index
telem->index_read_tx ++ ;
telem->index_read_tx = telem->index_read_tx % NB_PORTION_BUFFER ;
}
// --------------------- Reception -------------------------------
uint16_t bytes_avaibles = get_nb_bytes_available(telem);
if(bytes_avaibles > 2) //Minimum trois octets pour faire quoi que ce soit
{
uint16_t bytes_used = 1 ;
if(telem->buffer_rx[telem->index_read_rx] == '$') //Détection de début de séquence
{
//Number of byte recquiered including the '$' and the checksum to use the current frame we are on
data_t * current_data = DATA_LOGGER_Get_Data(telem->buffer_rx[(telem->index_read_rx+1)%LEN_RX_BUFFER]);
uint16_t bytes_recquired = (current_data->format / 20) + 3 ;
if(bytes_recquired > bytes_avaibles)
return ; //We just wait for then next call with hopefully more data available
bytes_used = bytes_recquired;
//Len is id(1 byte) + data(x bytes)
uint16_t len_frame = bytes_recquired - 2 ;
//Checksum
uint16_t checksum = 0 ;
for(uint16_t b = 0; b < len_frame; b++)
{
checksum += telem->buffer_rx[(telem->index_read_rx+1+b)%LEN_RX_BUFFER];
}
if(checksum % 256 == telem->buffer_rx[(telem->index_read_rx + bytes_recquired-1)%LEN_RX_BUFFER])
{
DATA_LOGGER_Reception(&telem->buffer_rx[(telem->index_read_rx+1)%LEN_RX_BUFFER]);
}
}
telem->index_read_rx += bytes_used ;
telem->index_read_rx = telem->index_read_rx % LEN_RX_BUFFER ;
}
}
void TELEMETRY_Send_Data(uint8_t * data, uint8_t len){
uint8_t to_send[SIZE_PORTION_BUFFER];
uint32_t checksum = 0 ;
to_send[0] = '$';
for(uint8_t b = 0; b < len; b++){
to_send[1 + b] = data[b];
checksum += (uint32_t)to_send[1 + b];
}
to_send[1 + len] = (uint8_t)(checksum % 256) ;
TELEMETRY_Send_Data_Raw(to_send, len+2);
}
void TELEMETRY_Send_Data_Raw(uint8_t * data, uint8_t len){
//Copie du tableau dans le buffer
for(uint8_t c = 0; c < len; c++)
telem->buffer_tx[telem->index_write_tx*SIZE_PORTION_BUFFER + c] = data[c];
//On enregistre la taille de la donnée
telem->len_portion_tx[telem->index_write_tx] = len ;
//Incrémentation de l'index
telem->index_write_tx ++ ;
telem->index_write_tx = telem->index_write_tx % NB_PORTION_BUFFER ;
//Si la transmission est au repos, on relance la transmission
if(telem->index_write_tx != telem->index_read_tx && telem->transmission_state == TRANSMISSION_IDLE)
{
telem->transmission_state = TRANSMISSION_IN_PROGRESS;
//On transmet les données
HAL_UART_Transmit_DMA(telem->huart, &telem->buffer_tx[SIZE_PORTION_BUFFER*telem->index_read_tx], telem->len_portion_tx[telem->index_read_tx]);
//Incrémentation de l'index
telem->index_read_tx ++ ;
telem->index_read_tx = telem->index_read_tx % NB_PORTION_BUFFER ;
}
}
void TELEMETRY_Idle_Line_Detection(void){
//On met à jour l'index d'écriture du buffer rx
telem->index_write_rx = LEN_RX_BUFFER - telem->huart->hdmarx->Instance->NDTR ;
}
void TELEMETRY_TxCpltCallback(void){
//Si on a encore des données à envoyer
if(telem->index_write_tx != telem->index_read_tx){
//On transmet les données
HAL_UART_Transmit_DMA(telem->huart, &telem->buffer_tx[SIZE_PORTION_BUFFER*telem->index_read_tx], telem->len_portion_tx[telem->index_read_tx]);
//Incrémentation de l'index
telem->index_read_tx ++ ;
telem->index_read_tx = telem->index_read_tx % NB_PORTION_BUFFER ;
}
else
telem->transmission_state = TRANSMISSION_IDLE ;
}
|
const { resolve } = require('path')
const postcss = require('rollup-plugin-postcss')
module.exports = {
rollup(config) {
config.plugins.push(
postcss({
config: {
path: './postcss.config.js'
},
extensions: ['.css'],
minimize: true,
include: ['./src/styles/index.css'],
extract: resolve('dist/styles/all.css')
}),
postcss({
config: {
path: './postcss.config.js'
},
extensions: ['.css'],
minimize: true,
include: [
'./src/styles/modules/components.css',
'./src/styles/modules/utilities.css'
],
extract: resolve('dist/styles/utilities-only.css')
})
)
return config
}
}
|
/**
* ST STTS751 temperature Sensor I2C extension for makecode.
* From microbit/micropython Chinese community.
* https://github.com/makecode-extensions
*/
/**
* ST STTS751 temperature Sensor I2C extension
*/
//% weight=100 color=#4090e0 icon="\uf2c7" block="STTS751"
namespace STTS751 {
export enum STTS751_T_UNIT {
//% block="C"
C = 0,
//% block="F"
F = 1
}
export enum STTS751_RESOLUTION {
//% block="9 bit"
BIT9 = 9,
//% block="10 bit"
BIT10 = 10,
//% block="11 bit"
BIT11 = 11,
//% block="12 bit"
BIT12 = 12
}
const _STTS751_RESOLUTION = [8, 0, 4, 12]
const STTS751_I2C_ADDR = 0x4A
const STTS751_REG_STATUS = 1
const STTS751_REG_CONFIG = 3
const STTS751_REG_CONRAT = 4
const STTS751_REG_TEMPVH = 0
const STTS751_REG_TEMPVL = 2
const STTS751_REG_ONESHOT = 15
let _oneshot = false
oneshot_mode(false)
// set dat to reg
function setreg(reg: number, dat: number): void {
let tb = pins.createBuffer(2)
tb[0] = reg
tb[1] = dat
pins.i2cWriteBuffer(STTS751_I2C_ADDR, tb)
}
// read a Int8LE from reg
function getInt8LE(reg: number): number {
pins.i2cWriteNumber(STTS751_I2C_ADDR, reg, NumberFormat.UInt8BE);
return pins.i2cReadNumber(STTS751_I2C_ADDR, NumberFormat.Int8LE);
}
// read a UInt8LE from reg
function getUInt8LE(reg: number): number {
pins.i2cWriteNumber(STTS751_I2C_ADDR, reg, NumberFormat.UInt8BE);
return pins.i2cReadNumber(STTS751_I2C_ADDR, NumberFormat.UInt8LE);
}
// set a mask dat to reg
function setreg_mask(reg: number, dat: number, mask: number): void {
setreg(reg, (getUInt8LE(reg) & mask) | dat)
}
// turn number to int16
function int16(n: number): number {
return (n > 0x7fff) ? n - 65536 : n
}
// oneshot mode handle
function ONE_SHOT(): void {
if (_oneshot) {
setreg(STTS751_REG_ONESHOT, 1)
while (true) {
if (getUInt8LE(STTS751_REG_STATUS) < 0x80) return
}
}
}
/**
* set oneshot mode to reduce power consumption
*/
//% block="oneshot mode %oneshot"
export function oneshot_mode(oneshot: boolean = false) {
_oneshot = oneshot
let t = (oneshot) ? 0x40 : 0x00
setreg_mask(STTS751_REG_CONFIG, t, 0xBF)
}
/**
* set temperature sensor resolution
*/
//% block="resolution %res"
//% res.defl=STTS751.STTS751_RESOLUTION.BIT12
export function resolution(res: STTS751.STTS751_RESOLUTION = STTS751.STTS751_RESOLUTION.BIT12) {
if ((res < 9) || (res > 12)) return
setreg_mask(STTS751_REG_CONFIG, _STTS751_RESOLUTION[res - 9], 0xF3)
}
/**
* get temperature
*/
//% block="temperature %u"
export function temperature(u: STTS751.STTS751_T_UNIT = STTS751.STTS751_T_UNIT.C): number {
ONE_SHOT()
let T = int16(getUInt8LE(STTS751_REG_TEMPVH) * 256 + getUInt8LE(STTS751_REG_TEMPVL)) / 256
if (u == STTS751.STTS751_T_UNIT.F) T = 32 + T * 9 / 5
return T
}
}
|
def find_anagrams(word_list):
anagrams = []
for i in range(len(word_list)-1):
for j in range(i+1, len(word_list)):
if sorted(word_list[i]) == sorted(word_list[j]):
anagrams.append((word_list[i],word_list[j]))
return anagrams |
def mean(list):
total_sum = 0
for num in list:
total_sum += num
mean = total_sum/len(list)
return mean
list = [2.3, 3.7, 4.9]
print(mean(list)) |
'use strict';
/*global require*/
var defaultValue = require('terriajs-cesium/Source/Core/defaultValue');
var defined = require('terriajs-cesium/Source/Core/defined');
var knockout = require('terriajs-cesium/Source/ThirdParty/knockout');
var MenuBarItemViewModel = function(options) {
this.label = options.label;
this.image = options.image;
this.imageWidth = options.imageWidth;
this.imageHeight = options.imageHeight;
this.tooltip = options.tooltip;
this.callback = options.callback;
this.href = options.href;
this.observableToToggle = options.observableToToggle;
this.visible = defaultValue(options.visible, true);
knockout.track(this, ['label', 'image', 'imageWidth', 'imageHeight', 'tooltip', 'href', 'isToggle', 'observableToToggle', 'visible']);
};
MenuBarItemViewModel.prototype.execute = function() {
if (defined(this.callback)) {
this.callback(this);
}
if (defined(this.observableToToggle)) {
this.observableToToggle = !this.observableToToggle;
}
};
module.exports = MenuBarItemViewModel;
|
#!/bin/sh
#
# Configure a host as a client to the Galaxy CernVM-FS repositories.
#
# USAGE
#
# sudo ./cvmfs-galaxy-client-setup.sh -v <PROXY_ADDRESS>
#
# The above may take about 2 minutes to run. After which the Galaxy
# CernVM-FS repositories can be accessed. For example,
#
# ls /cvmfs/data.galaxyproject.org
#
# The initial access may take about 5 seconds on CentOS 7, but will be
# faster after that. With autofs, the directory won't appear under /cvmfs
# until it has been accessed.
#
# DESCRIPTION
#
# Install CernVM-FS client software and configure it to use the
# configurations from the "cvmfs-config.galaxyproject.org" repository
# for the Galaxy repositories.
#
# Copyright (C) 2021, QCIF Ltd.
#================================================================
PROGRAM='cvmfs-galaxy-client-setup'
VERSION='1.2.0'
EXE=$(basename "$0" .sh)
EXE_EXT=$(basename "$0")
#----------------------------------------------------------------
# Constants
# Default port for the proxy cache
DEFAULT_PROXY_PORT=3128
# Default cache size in MiB (should be between 4 GiB and 50 GiB)
DEFAULT_CACHE_SIZE_MB=4096 # 4 GiB
# Minimum value allowed for --size option in MiB
MIN_CACHE_SIZE_MB=1024 # 1 GiB
#----------------
# Header inserted into generated files
PROGRAM_INFO="Created by $PROGRAM $VERSION [$(date '+%F %T %Z')]"
#----------------
# Repository specific
ORG=galaxyproject.org
STRATUM_1_HOSTS="
cvmfs1-mel0.gvl.org.au \
cvmfs1-ufr0.galaxyproject.eu \
cvmfs1-tacc0.galaxyproject.org \
cvmfs1-iu0.galaxyproject.org \
cvmfs1-psu0.galaxyproject.org"
# Above order is significant, especially when not using the Geo API
# For dynamic configuration: the config repository
CONFIG_REPO=cvmfs-config.$ORG
CONFIG_REPO_KEY='-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuJZTWTY3/dBfspFKifv8
TWuuT2Zzoo1cAskKpKu5gsUAyDFbZfYBEy91qbLPC3TuUm2zdPNsjCQbbq1Liufk
uNPZJ8Ubn5PR6kndwrdD13NVHZpXVml1+ooTSF5CL3x/KUkYiyRz94sAr9trVoSx
THW2buV7ADUYivX7ofCvBu5T6YngbPZNIxDB4mh7cEal/UDtxV683A/5RL4wIYvt
S5SVemmu6Yb8GkGwLGmMVLYXutuaHdMFyKzWm+qFlG5JRz4okUWERvtJ2QAJPOzL
mAG1ceyBFowj/r3iJTa+Jcif2uAmZxg+cHkZG5KzATykF82UH1ojUzREMMDcPJi2
dQIDAQAB
-----END PUBLIC KEY-----
'
# For static configuration: the data repository
#
# This script can also be used to statically configure a single
# repository. That is, not use the dynamic configurations from
# the CONFIG_REPO. Normally, this is not recommended.
DATA_REPO=data.$ORG
DATA_REPO_KEY='-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5LHQuKWzcX5iBbCGsXGt
6CRi9+a9cKZG4UlX/lJukEJ+3dSxVDWJs88PSdLk+E25494oU56hB8YeVq+W8AQE
3LWx2K2ruRjEAI2o8sRgs/IbafjZ7cBuERzqj3Tn5qUIBFoKUMWMSIiWTQe2Sfnj
GzfDoswr5TTk7aH/FIXUjLnLGGCOzPtUC244IhHARzu86bWYxQJUw0/kZl5wVGcH
maSgr39h1xPst0Vx1keJ95AH0wqxPbCcyBGtF1L6HQlLidmoIDqcCQpLsGJJEoOs
NVNhhcb66OJHah5ppI1N3cZehdaKyr1XcF9eedwLFTvuiwTn6qMmttT/tHX7rcxT
owIDAQAB
-----END PUBLIC KEY-----
'
# Timezone database
ZONEINFO=/usr/share/zoneinfo
DEFAULT_TIMEZONE=Etc/UTC
#----------------------------------------------------------------
# Error handling
# Exit immediately if a simple command exits with a non-zero status.
# Better to abort than to continue running when something went wrong.
set -e
set -u # fail on attempts to expand undefined environment variables
#----------------------------------------------------------------
# Command line arguments
# Note: parsing does not support combining single letter options (e.g. "-vh")
CVMFS_HTTP_PROXY=
STATIC=
CVMFS_QUOTA_LIMIT_MB=$DEFAULT_CACHE_SIZE_MB
USE_GEO_API=
TIMEZONE=
QUIET=
VERBOSE=
SHOW_VERSION=
SHOW_HELP=
while [ $# -gt 0 ]
do
case "$1" in
-d|--direct)
if [ -n "$CVMFS_HTTP_PROXY" ]; then
echo "$EXE: usage error: do not use --direct with proxies" >&2
exit 2
fi
CVMFS_HTTP_PROXY=DIRECT
shift
;;
-s|--static-config)
STATIC=yes
shift
;;
-c|--cache-size)
if [ $# -lt 2 ]; then
echo "$EXE: usage error: $1 missing value" >&2
exit 2
fi
CVMFS_QUOTA_LIMIT_MB="$2"
shift; shift
;;
-g|--geo-api)
USE_GEO_API=yes
shift
;;
-t|--timezone|--tz)
if [ $# -lt 2 ]; then
echo "$EXE: usage error: $1 missing value" >&2
exit 2
fi
TIMEZONE="$2"
shift; shift
;;
-q|--quiet)
QUIET=yes
shift
;;
-v|--verbose)
VERBOSE=yes
shift
;;
--version)
SHOW_VERSION=yes
shift
;;
-h|--help)
SHOW_HELP=yes
shift
;;
-*)
echo "$EXE: usage error: unknown option: $1" >&2
exit 2
;;
*)
# Argument
if [ "$CVMFS_HTTP_PROXY" = 'DIRECT' ]; then
echo "$EXE: usage error: do not provide proxies with --direct" >&2
exit 2
fi
if echo "$1" | grep -q '^http://'; then
echo "$EXE: usage error: expecting an address, not a URL: \"$1\"" >&2
exit 2
fi
if echo "$1" | grep -q '^https://'; then
echo "$EXE: usage error: expecting an address, not a URL: \"$1\"" >&2
exit 2
fi
if echo "$1" | grep -q ':'; then
# Value has a port number
P="$1"
else
# Use default port number
P="$1:$DEFAULT_PROXY_PORT"
fi
if [ -z "$CVMFS_HTTP_PROXY" ]; then
CVMFS_HTTP_PROXY="$P"
else
CVMFS_HTTP_PROXY="$CVMFS_HTTP_PROXY;$P"
fi
shift
;;
esac
done
#----------------
# Help and version options
if [ -n "$SHOW_HELP" ]; then
cat <<EOF
Usage: $EXE_EXT [options] {proxies}
Options:
-c | --cache-size NUM size of cache in MiB (default: $DEFAULT_CACHE_SIZE_MB)
-g | --geo-api enable use of the Geo API (default: do not use it)
-s | --static-config configure $DATA_REPO only (not recommended)
-d | --direct no proxies, connect to Stratum 1 (not recommended)
-t | --timezone TZ set the timezone (e.g. Etc/UTC or Australia/Brisbane)
-q | --quiet output nothing unless an error occurs
-v | --verbose output extra information when running
--version display version information and exit
-h | --help display this help and exit
proxies:
IP address of proxy servers with optional port (default: $DEFAULT_PROXY_PORT)
e.g. 192.168.1.200 192.168.1.201:8080 # examples only: use your local proxy
EOF
exit 0
fi
if [ -n "$SHOW_VERSION" ]; then
echo "$PROGRAM $VERSION"
exit 0
fi
#----------------
# Other options
if [ -n "$TIMEZONE" ]; then
# Timezone configuration requested: check value is a valid timezone name
if [ ! -d "$ZONEINFO" ]; then
echo "$EXE: cannot set timezone: directory not found: $ZONEINFO" >&2
exit 3
fi
if [ ! -e "$ZONEINFO/$TIMEZONE" ]; then # Note: could be file or symlink
echo "$EXE: cannot set timezone: unknown timezone: $TIMEZONE" >&2
exit 1
fi
fi
if [ -n "$VERBOSE" ] && [ -n "$QUIET" ]; then
# Verbose overrides quiet, if both are specified
QUIET=
fi
if ! echo "$CVMFS_QUOTA_LIMIT_MB" | grep -qE '^[0-9]+$'; then
echo "$EXE: usage error: invalid cache size: \"$CVMFS_QUOTA_LIMIT_MB\"" >&2
exit 2
fi
if [ "$CVMFS_QUOTA_LIMIT_MB" -lt $MIN_CACHE_SIZE_MB ]; then
echo "$EXE: usage error: cache is too small: $CVMFS_QUOTA_LIMIT_MB MiB" >&2
exit 2
fi
if [ -z "$CVMFS_HTTP_PROXY" ]; then
# This environment variable should either be a list of proxies (host:port)
# separated by semicolons, or the value "DIRECT". When not using DIRECT,
# there must be at least one proxy.
echo "$EXE: usage error: missing proxies (-h for help)" >&2
exit 2
fi
#----------------------------------------------------------------
# Detect tested systems
if [ -f '/etc/system-release' ]; then
# Fedora based
DISTRO=$(head -1 /etc/system-release)
elif which lsb_release >/dev/null 2>&1; then
# Debian based
DISTRO="$(lsb_release --id --short) $(lsb_release --release --short)"
elif which uname >/dev/null 2>&1; then
# Other
DISTRO="$(uname -s) $(uname -r)"
else
DISTRO=unknown
fi
if echo "$DISTRO" | grep -q '^CentOS Linux release 7'; then
:
elif echo "$DISTRO" | grep -q '^CentOS Linux release 8'; then
:
elif echo "$DISTRO" | grep -q '^CentOS Stream release 8'; then
:
elif [ "$DISTRO" = 'Ubuntu 21.04' ]; then
:
elif [ "$DISTRO" = 'Ubuntu 20.10' ]; then
:
elif [ "$DISTRO" = 'Ubuntu 20.04' ]; then
:
elif [ "$DISTRO" = 'Ubuntu 18.04' ]; then
:
elif [ "$DISTRO" = 'Ubuntu 16.04' ]; then
:
else
# Add additional elif-statements for tested systems
echo "$EXE: warning: untested system: $DISTRO" >&2
fi
#----------------------------------------------------------------
# Check for root privileges
if [ "$(id -u)" -ne 0 ]; then
echo "$EXE: error: root privileges required" >&2
exit 1
fi
#----------------------------------------------------------------
# Install CernVM-FS client
# Use LOG file to suppress apt-get messages, only show on error
# Unfortunately, "apt-get -q" and "yum install -q" still produces output.
LOG="/tmp/${PROGRAM}.$$"
#----------------
# Fedora functions
_yum_not_installed() {
if rpm -q "$1" >/dev/null; then
return 1 # already installed
else
return 0 # not installed
fi
}
_yum_no_repo() {
# CentOS 7 has yum 3.4.3: no --enabled option, output is "cernvm/7/x86_64..."
# CentOS Stream 8 has yum 4.4.2: has --enabled option, output is "cernvm "
#
# So use old "enabled" argument instead of --enabled option and look for
# slash or space after the repo name.
if $YUM repolist enabled | grep -q "^$1[/ ]"; then
return 1 # has enabled repo
else
return 0 # no enabled repo
fi
}
_yum_install_repo() {
# Install the CernVM-FS YUM repository (if needed)
local REPO_NAME="$1"
local URL="$2"
if _yum_no_repo "$REPO_NAME"; then
# Repository not installed
_yum_install "$URL"
if _yum_no_repo "$REPO_NAME"; then
echo "$EXE: internal error: $URL did not install repo \"$REPO_NAME\"" >&2
exit 3
fi
else
if [ -z "$QUIET" ]; then
echo "$EXE: repository already installed: $REPO_NAME"
fi
fi
}
_yum_install() {
local PKG="$1"
local PKG_NAME=
if ! echo "$PKG" | grep -q /^https:/; then
# Value is a URL: extract package name from it
PKG_NAME=$(echo "$PKG" | sed 's/^.*\///') # remove everything up to last /
PKG_NAME=$(echo "$PKG_NAME" | sed 's/\.rpm$//') # remove .rpm
else
# Assume the entire value is the package name
PKG_NAME="$PKG"
fi
if ! rpm -q $PKG_NAME >/dev/null ; then
# Not already installed
if [ -z "$QUIET" ]; then
echo "$EXE: $YUM install: $PKG"
fi
if ! $YUM install -y $PKG >$LOG 2>&1; then
cat $LOG
rm $LOG
echo "$EXE: error: $YUM install: $PKG failed" >&2
exit 1
fi
rm $LOG
else
if [ -z "$QUIET" ]; then
echo "$EXE: package already installed: $PKG"
fi
fi
}
#----------------
# Debian functions
_dpkg_not_installed() {
if dpkg-query -s "$1" >/dev/null 2>&1; then
return 1 # already installed
else
return 0 # not installed
fi
}
_dpkg_download_and_install() {
# Download a Debian file from a URL and install it.
local PKG_NAME="$1"
local URL="$2"
if _dpkg_not_installed "$PKG_NAME"; then
# Download it
if [ -z "$QUIET" ]; then
echo "$EXE: downloading $URL"
fi
DEB_FILE="/tmp/$(basename "$URL").$$"
if ! wget --quiet -O "$DEB_FILE" $URL; then
rm -f "$DEB_FILE"
echo "$EXE: error: could not download: $URL" >&2
exit 1
fi
# Install it
if [ -z "$QUIET" ]; then
echo "$EXE: dpkg installing download file"
fi
if ! dpkg --install "$DEB_FILE" >$LOG 2>&1; then
cat $LOG
rm $LOG
echo "$EXE: error: dpkg install failed" >&2
exit 1
fi
rm -f "$DEB_FILE"
if _dpkg_not_installed "$PKG_NAME"; then
# The package from the URL did not install the expected package
echo "$EXE: internal error: $URL did not install $PKG_NAME" >&2
exit 3
fi
else
if [ -z "$QUIET" ]; then
echo "$EXE: repository already installed: $REPO_NAME"
fi
fi
}
_apt_get_update() {
if [ -z "$QUIET" ]; then
echo "$EXE: apt-get update"
fi
if ! apt-get update >$LOG 2>&1; then
cat $LOG
rm $LOG
echo "$EXE: error: apt-get update failed" >&2
exit 1
fi
}
_apt_get_install() {
local PKG="$1"
if _dpkg_not_installed "$PKG" ; then
# Not already installed: install it
if [ -z "$QUIET" ]; then
echo "$EXE: apt-get install $PKG"
fi
if ! apt-get install -y "$PKG" >$LOG 2>&1; then
cat $LOG
rm $LOG
echo "$EXE: error: apt-get install cvmfs failed" >&2
exit 1
fi
rm $LOG
else
if [ -z "$QUIET" ]; then
echo "$EXE: package already installed: $PKG"
fi
fi
}
#----------------
# Shared functions
_set_timezone() {
local EXTRA=
if [ $# -gt 0 ]; then
EXTRA="$1"
fi
# If the timezone is configured, this code will ALWAYS create the
# /etc/localtime symbolic link, but will NOT CREATE the
# /etc/timezone file if it does not exist (ONLY UPDATING it to match
# the symbolic link, if the file already exists). Some systems have
# both (e.g. Ubuntu 20.04) and some systems only have the symbolic
# link (e.g. CentOS 7).
if [ -z "$TIMEZONE" ]; then
# User has not asked for the timezone to be configured...
# ... but if it is not configured, try to configure it to an inferred
# value or DEFAULT_TIMEZONE.
# Determine if the timezone symlink needs to be created, and what
# value to set it to.
if [ ! -e /etc/localtime ]; then
# Symlink missing: need to create it
if [ ! -f /etc/timezone ]; then
# File does not exist
TIMEZONE=$DEFAULT_TIMEZONE
else
# File exists: use the value from in it
TIMEZONE=$(cat /etc/timezone)
fi
fi
if [ -n "$TIMEZONE" ]; then
# TIMEZONE is to be configured, because the symlink is missing.
# Check if the extracted timezone value is usable, since there
# might have been an invalid value in the /etc/timezone file If
# the value is not usable, the TIMEZONE is returned to being the
# empty string: it is not an error, because the user never asked
# for the timezone to be changeed.
if [ -d "$ZONEINFO" ]; then
if [ ! -e "$ZONEINFO/$TIMEZONE" ]; then # Note: file or symlink
# Bad value: do not configure the timezone
TIMEZONE=
fi
else
# No zoneinfo directory: do not configure the timezone
TIMEZONE=
fi
fi
# Note: if the user had explicitly requested the timezone to be set,
# the value has already been checked when the command line arguments
# were processed.
fi
# Configure the timezone _only_ if TIMEZONE is set (i.e. the user explicitly
# asked for it, or it has not been already configured).
if [ -n "$TIMEZONE" ]; then
# Configure timezone
# /etc/localtime symlink (mandatory)
if [ -z "$QUIET" ]; then
echo "$EXE: timezone: $TIMEZONE: /etc/localtime"
fi
ln -s -f "$ZONEINFO/$TIMEZONE" /etc/localtime
# /etc/timezone file (optional)
if [ -f /etc/timezone ]; then
# Update the file, since it already exists (i.e. never create it)
if [ -z "$QUIET" ]; then
echo "$EXE: timezone: $TIMEZONE: /etc/timezone"
fi
echo "$TIMEZONE" > /etc/timezone
fi
# Extra configurations (only if requested)
if [ "$EXTRA" = DEBIAN_FRONTEND ]; then
# Additions for Debian (needed when scrit is run inside Docker)
DEBIAN_FRONTEND="noninteractive apt-get install -y --no-install-recommends tzdata"
# echo "$EXE: DEBIAN_FRONTEND=$DEBIAN_FRONTEND"
fi
fi
}
#----------------
# Install for either Fedora or Debian based distributions
YUM=yum
if which dnf >/dev/null 2>&1; then
YUM=dnf
fi
if which $YUM >/dev/null 2>&1; then
# Installing for Fedora based distributions
_set_timezone
if _yum_not_installed 'cvmfs'; then
# TODO: additional packages needed when inside a Docker environment
# Get the CernVM-FS repo
_yum_install_repo 'cernvm' \
https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest.noarch.rpm
_yum_install cvmfs # install CernVM-FS
else
if [ -z "$QUIET" ]; then
echo "$EXE: package already installed: cvmfs"
fi
fi
elif which apt-get >/dev/null 2>&1; then
# Installing for Debian based distributions
_set_timezone DEBIAN_FRONTEND
if _dpkg_not_installed 'cvmfs' ; then
_apt_get_update # first update
# These are needed when inside a Docker environment
_apt_get_install apt-utils
_apt_get_install python3
_apt_get_install wget
_apt_get_install distro-info-data
_apt_get_install lsb-release
# Get the CernVM-FS repo
_dpkg_download_and_install 'cvmfs-release' \
https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest_all.deb
_apt_get_update # second update MUST be done after cvmfs-release-latest_all
_apt_get_install cvmfs # install CernVM-FS
else
if [ -z "$QUIET" ]; then
echo "$EXE: package already installed: cvmfs"
fi
fi
else
echo "$EXE: unsupported distribution: no apt-get, yum or dnf" >&2
exit 3
fi
#----------------------------------------------------------------
# Create directory for storing the organisation's keys
ORG_KEY_DIR="/etc/cvmfs/keys/$ORG"
if [ ! -e "$ORG_KEY_DIR" ]; then
if ! mkdir "$ORG_KEY_DIR"; then
echo "$EXE: error: could not create directory: $ORG_KEY_DIR" >&2
exit 1
fi
fi
#----------------------------------------------------------------
# Configure CernVM-FS
# Construct the value for CVMFS_SERVER_URL from Stratum 1 replica hosts
CVMFS_SERVER_URL=
for HOST in $STRATUM_1_HOSTS; do
URL="http://$HOST/cvmfs/@fqrn@"
if [ -z "$CVMFS_SERVER_URL" ]; then
CVMFS_SERVER_URL=$URL
else
CVMFS_SERVER_URL="$CVMFS_SERVER_URL;$URL"
fi
done
if [ -z "$STATIC" ]; then
#----------------
# Dynamic
# Add public key for the config-repository
CONFIG_REPO_KEY_FILE="$ORG_KEY_DIR/$CONFIG_REPO.pub"
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$CONFIG_REPO_KEY_FILE\""
fi
echo "$CONFIG_REPO_KEY" > "$CONFIG_REPO_KEY_FILE"
chmod 644 "$CONFIG_REPO_KEY_FILE"
# Create configuration for the config-repository
FILE="/etc/cvmfs/config.d/$CONFIG_REPO.conf"
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$FILE\""
fi
cat > "$FILE" <<EOF
# $PROGRAM_INFO
# Dynamic configuration mode
CVMFS_SERVER_URL="$CVMFS_SERVER_URL"
CVMFS_PUBLIC_KEY="$CONFIG_REPO_KEY_FILE"
EOF
# Configure CernVM-FS to use the configurations from config-repository
FILE="/etc/cvmfs/default.d/80-$ORG-cvmfs.conf"
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$FILE\""
fi
cat > "$FILE" <<EOF
# $PROGRAM_INFO
# Dynamic configuration mode
CVMFS_CONFIG_REPOSITORY="$CONFIG_REPO"
CVMFS_DEFAULT_DOMAIN="$ORG"
EOF
# Remove static config files, if any
rm -f "$ORG_KEY_DIR/$DATA_REPO.pub"
rm -f "/etc/cvmfs/domain.d/${ORG}.conf"
else
#----------------
# Static
# Add public key for the repository
REPO_PUBKEY_FILE="$ORG_KEY_DIR/$DATA_REPO.pub"
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$REPO_PUBKEY_FILE\""
fi
echo "$DATA_REPO_KEY" > "$REPO_PUBKEY_FILE"
chmod 600 "$REPO_PUBKEY_FILE"
# Create domain.d/org.conf
FILE=/etc/cvmfs/domain.d/${ORG}.conf
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$FILE\""
fi
cat > "$FILE" <<EOF
# $PROGRAM_INFO
# Static configuration mode
CVMFS_SERVER_URL="$CVMFS_SERVER_URL"
CVMFS_KEYS_DIR="/etc/cvmfs/keys/$ORG"
EOF
# Remove dynamic config files, if any
rm -f "$ORG_KEY_DIR/$CONFIG_REPO.pub"
rm -f "/etc/cvmfs/config.d/$CONFIG_REPO.conf"
rm -f "/etc/cvmfs/default.d/80-$ORG-cvmfs.conf"
fi
#----------------------------------------------------------------
# Local defaults
FILE="/etc/cvmfs/default.local"
if [ -z "$QUIET" ]; then
echo "$EXE: creating \"$FILE\""
fi
if [ -z "$USE_GEO_API" ]; then
# This is the default, because we've found that the geographic
# ordering is not always correct. Good or bad results are obtained,
# depending on which Stratum 1 is queried and the particular
# client/proxy IP address.
GC='# ' # Comment out CVMFS_USE_GEOAPI ("no" works, but is not documented)
else
GC='' # Do not comment it out: i.e. set CVMFS_USE_GEOAPI to "yes"
fi
cat > "$FILE" <<EOF
# $PROGRAM_INFO
CVMFS_HTTP_PROXY=${CVMFS_HTTP_PROXY}
CVMFS_QUOTA_LIMIT=${CVMFS_QUOTA_LIMIT_MB} # cache size in MiB (recommended: 4GB to 50GB)
${GC}CVMFS_USE_GEOAPI=yes
EOF
if [ -n "$STATIC" ]; then
# Extra config needed for a static repository
echo "" >> "$FILE"
echo "CVMFS_REPOSITORIES=\"$DATA_REPO\"" >> "$FILE"
fi
#----------------------------------------------------------------
# Setup
# Check
if ! cvmfs_config chksetup >/dev/null; then
echo "$EXE: error: bad cvmfs setup (run 'cvmfs_config chksetup')" 2>&1
exit 1
fi
# Setup
if [ -z "$QUIET" ]; then
echo "$EXE: running \"cvmfs_config setup\""
fi
if ! cvmfs_config setup; then
echo "$EXE: error: cvmfs_config setup failed" 2>&1
exit 1
fi
#----------------------------------------------------------------
# Success
if [ -z "$QUIET" ]; then
echo "$EXE: done"
fi
exit 0
# To reload a repository:
#
# cvmfs_config reload <repository-name>
# Available repositories (as of 2021-02-22):
#
# cvmfs-config.galaxyproject.org
# data.galaxyproject.org
# main.galaxyproject.org
# sandbox.galaxyproject.org
# singularity.galaxyproject.org
# test.galaxyproject.org
# usegalaxy.galaxyproject.org
#EOF
|
"""Farcy class test file."""
from __future__ import print_function
from collections import namedtuple
from datetime import datetime
from farcy import (Config, FARCY_COMMENT_START, Farcy, FarcyException, UTC,
main, no_handler_debug_factory)
from mock import MagicMock, call, patch
from github3.exceptions import ConnectionError
import farcy as farcy_module
import logging
import unittest
from .helper import Struct
Config.PATH = '/dev/null' # Don't allow the system config file to load.
farcy_module.APPROVAL_PHRASES = ['Dummy Approval'] # Provide only one option.
PFILE_ATTRS = ['contents', 'filename', 'patch', 'status']
MockInfo = namedtuple('Info', ['decoded'])
MockPFile = namedtuple('PFile', PFILE_ATTRS)
def assert_calls(method, *calls):
method.assert_has_calls(list(calls))
assert method.call_count == len(calls), "{0} != {1}".format(
list(calls), method.mock_calls)
def assert_status(farcy, failures=0):
if failures:
call2 = call('dummy', 'failure', context='farcy',
description='found {0} issue{1}'.format(
failures, 's' if failures > 1 else ''))
else:
call2 = call('dummy', 'success', context='farcy',
description='approves! Dummy Approval!')
assert_calls(farcy.repo.create_status,
call('dummy', 'pending', context='farcy',
description='started investigation'), call2)
def mockpfile(**kwargs):
for attr in PFILE_ATTRS:
kwargs.setdefault(attr, None)
return MockPFile(**kwargs)
class FarcyBaseTest(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
self.logger = logging.getLogger('farcy')
@patch('farcy.objects.get_session')
def _farcy_instance(self, mock_get_session, config=None):
if config is None:
config = Config(None)
if config.repository is None:
config.repository = 'dummy/dummy'
farcy = Farcy(config)
self.assertTrue(mock_get_session.called)
return farcy
class FarcyTest(FarcyBaseTest):
@patch('farcy.added_lines')
def test_compute_pfile_stats__added(self, mock_added_lines):
mock_added_lines.return_value = {13: 10, 15: 20, 18: 100}
stats = {'added_files': 10, 'added_lines': 10}
actual = self._farcy_instance()._compute_pfile_stats(
mockpfile(patch='', status='added'), stats)
self.assertTrue(mock_added_lines.called)
self.assertEqual(mock_added_lines.return_value, actual)
self.assertEqual({'added_files': 11, 'added_lines': 13}, stats)
def test_compute_pfile_stats__excluded(self):
stats = {'blacklisted_files': 10}
config = Config(None)
config.exclude_paths = ['tmp/*']
farcy = self._farcy_instance(config=config)
self.assertEqual(None, farcy._compute_pfile_stats(
mockpfile(filename='tmp/foo'), stats))
self.assertEqual({'blacklisted_files': 11}, stats)
@patch('farcy.added_lines')
def test_compute_pfile_stats__modified(self, mock_added_lines):
mock_added_lines.return_value = {1: 1, 2: 2}
for status in ['modified', 'renamed']:
stats = {'modified_files': 10, 'modified_lines': 10}
actual = self._farcy_instance()._compute_pfile_stats(
mockpfile(patch='', status=status), stats)
self.assertTrue(mock_added_lines.called)
mock_added_lines.reset_mock()
self.assertEqual(mock_added_lines.return_value, actual)
self.assertEqual({'modified_files': 11, 'modified_lines': 12},
stats)
def test_compute_pfile_stats__no_change(self):
stats = {'unchanged_files': 10}
self.assertEqual(None, self._farcy_instance()._compute_pfile_stats(
mockpfile(status='added'), stats))
self.assertEqual({'unchanged_files': 11}, stats)
def test_compute_pfile_stats__removed(self):
stats = {'deleted_files': 10}
config = Config(None)
config.exclude_paths = ['tmp/*']
farcy = self._farcy_instance(config=config)
self.assertEqual(None, farcy._compute_pfile_stats(
mockpfile(filename='a/tmp/b', status='removed'), stats))
self.assertEqual({'deleted_files': 11}, stats)
def test_compute_pfile_stats__unexpected_status(self):
stats = {}
with patch.object(self.logger, 'critical') as mock_critical:
self.assertEqual(None, self._farcy_instance()._compute_pfile_stats(
mockpfile(patch='', status='foobar'), stats))
self.assertTrue(mock_critical.called)
self.assertEqual({}, stats)
def test_get_issues__simple_module(self):
farcy = self._farcy_instance()
pfile = mockpfile(contents=lambda: MockInfo(decoded=b'"""A."""\n'),
filename='a.py')
pr = MagicMock(number=444, state='open', user=Struct(login='Dummy'))
self.assertEqual({}, farcy.get_issues(pfile, pr))
@patch('farcy.handlers.Rubocop.prepare_directory')
def test_get_issues__leverage_file_path_in_repo(self,
mock_prepare_directory):
farcy = self._farcy_instance()
pfile = mockpfile(
contents=lambda: MockInfo(
decoded=b'# frozen_string_literal: true\n'
),
filename='app/controllers/a.rb'
)
pr = MagicMock(number=444, state='open', user=Struct(login='Dummy'))
self.assertEqual({}, farcy.get_issues(pfile, pr))
self.assertTrue(mock_prepare_directory.called)
def test_get_issues__no_handlers(self):
farcy = self._farcy_instance()
self.assertEqual({}, farcy.get_issues(mockpfile(filename=''), None))
class FarcyHandlePrTest(FarcyBaseTest):
DUMMY_COMMENT = Struct(body='_[farcy \n* MatchingError', path='DummyFile',
position=16)
@patch('farcy.Farcy.get_issues')
@patch('farcy.added_lines')
def test_handle_pr__exception_from_get_issues(self, mock_added_lines,
mock_get_issues):
def side_effect():
raise Exception()
mock_added_lines.return_value = {16: 16}
mock_get_issues.side_effect = side_effect
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
pfile = mockpfile(patch='', status='added')
pr.files.return_value = [pfile]
farcy = self._farcy_instance()
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info, call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: encountered an exception in '
'handler. Check log.'))
mock_added_lines.assert_called_with('')
mock_get_issues.assert_called_once_with(pfile, pr)
assert_calls(farcy.repo.create_status,
call('dummy', 'pending', context='farcy',
description='started investigation'),
call('dummy', 'error', context='farcy',
description=('encountered an exception in handler. '
'Check log.')))
def test_handle_pr__pr_closed(self):
pr = MagicMock(number=180, state='closed')
farcy = self._farcy_instance()
with patch.object(self.logger, 'debug') as mock_debug:
farcy.handle_pr(pr)
mock_debug.assert_called_with(
'Skipping PR#180: invalid state (closed)')
pr.refresh.assert_called_with()
@patch('farcy.Farcy.get_issues')
@patch('farcy.added_lines')
def test_handle_pr__single_failure(self, mock_added_lines,
mock_get_issues):
mock_added_lines.return_value = {16: 16}
mock_get_issues.return_value = {16: ['Dummy Failure']}
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
pfile = mockpfile(filename='DummyFile', patch='', status='added')
pr.files.return_value = [pfile]
farcy = self._farcy_instance()
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info,
call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: found 1 issue'))
mock_added_lines.assert_called_with('')
mock_get_issues.assert_called_once_with(pfile, pr)
assert_calls(pr.create_review_comment, call(
'{0}\n* Dummy Failure'.format(FARCY_COMMENT_START),
'dummy', 'DummyFile', 16))
assert_status(farcy, failures=1)
@patch('farcy.Farcy.get_issues')
@patch('farcy.added_lines')
def test_handle_pr__single_failure__limit_exceeded(self, mock_added_lines,
mock_get_issues):
mock_added_lines.return_value = {16: 16}
mock_get_issues.return_value = {16: ['Dummy Failure']}
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
pr.review_comments.return_value = [self.DUMMY_COMMENT] * 128
pfile = mockpfile(filename='DummyFile', patch='', status='added')
pr.files.return_value = [pfile]
farcy = self._farcy_instance()
with patch.object(self.logger, 'debug') as mock_debug:
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info,
call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: found 1 issue'))
assert_calls(mock_debug,
call('PR#180 added_files: 1'),
call('PR#180 added_lines: 1'),
call('PR#180 issues: 1'),
call('PR#180 skipped_issues: 1'))
mock_added_lines.assert_called_with('')
mock_get_issues.assert_called_once_with(pfile, pr)
assert_calls(pr.create_review_comment)
assert_status(farcy, failures=1)
@patch('farcy.Farcy.get_issues')
@patch('farcy.added_lines')
def test_handle_pr__success(self, mock_added_lines, mock_get_issues):
mock_added_lines.return_value = {16: 16}
mock_get_issues.return_value = {3: ['Failure on non-modified line.']}
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
pfile = mockpfile(patch='', status='added')
pr.files.return_value = [pfile]
farcy = self._farcy_instance()
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info,
call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: approves! Dummy Approval!'))
mock_added_lines.assert_called_with('')
mock_get_issues.assert_called_once_with(pfile, pr)
assert_calls(pr.create_review_comment)
assert_status(farcy)
def test_handle_pr__success_without_any_changed_files(self):
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
pr.files.return_value = [mockpfile()]
farcy = self._farcy_instance()
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info,
call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: approves! Dummy Approval!'))
assert_status(farcy)
def test_handle_pr__success_without_files(self):
pr = MagicMock(number=180, state='open', user=Struct(login='Dummy'))
pr.commits.return_value = [Struct(sha='dummy')]
farcy = self._farcy_instance()
with patch.object(self.logger, 'info') as mock_info:
farcy.handle_pr(pr)
assert_calls(mock_info,
call('Handling PR#180 by Dummy'),
call('PR#180 STATUS: approves! Dummy Approval!'))
assert_status(farcy)
def test_handle_pr__user_blacklisted(self):
pr = Struct(number=180, user=Struct(login='Dummy'))
farcy = self._farcy_instance()
farcy.config.exclude_users = ['dummy']
with patch.object(self.logger, 'debug') as mock_debug:
farcy.handle_pr(pr)
mock_debug.assert_called_with(
'Skipping PR#180: Dummy is not allowed')
def test_handle_pr__user_not_whitelisted(self):
pr = Struct(number=180, user=Struct(login='Dummy'))
farcy = self._farcy_instance()
farcy.config.limit_users = ['bboe']
with patch.object(self.logger, 'debug') as mock_debug:
farcy.handle_pr(pr)
mock_debug.assert_called_with(
'Skipping PR#180: Dummy is not allowed')
class FarcyEventCallbackTest(FarcyBaseTest):
@patch('farcy.Farcy.handle_pr')
def test_PullRequestEvent__closed_existing(self, mock_handle_pr):
instance = self._farcy_instance()
instance.open_prs = {'DUMMY_BRANCH': None}
pull_request = Struct(head={'ref': 'DUMMY_BRANCH'}, number=1337)
event = Struct(payload={'action': 'closed',
'pull_request': pull_request})
instance.PullRequestEvent(event)
self.assertEqual({}, instance.open_prs)
self.assertFalse(mock_handle_pr.called)
@patch('farcy.Farcy.handle_pr')
def test_PullRequestEvent__closed_non_existing(self, mock_handle_pr):
instance = self._farcy_instance()
instance.log = MagicMock()
self.assertEqual({}, instance.open_prs)
pull_request = Struct(head={'ref': 'DUMMY_BRANCH'}, number=1337)
event = Struct(payload={'action': 'closed',
'pull_request': pull_request})
instance.PullRequestEvent(event)
self.assertEqual({}, instance.open_prs)
self.assertFalse(mock_handle_pr.called)
self.assertTrue(instance.log.warning.called)
@patch('farcy.Farcy.handle_pr')
def test_PullRequestEvent__opened(self, mock_handle_pr):
instance = self._farcy_instance()
self.assertEqual({}, instance.open_prs)
pull_request = Struct(head={'ref': 'DUMMY_BRANCH'}, number=1337)
event = Struct(payload={'action': 'opened',
'pull_request': pull_request})
instance.PullRequestEvent(event)
self.assertEqual({'DUMMY_BRANCH': pull_request}, instance.open_prs)
self.assertTrue(mock_handle_pr.called)
@patch('farcy.Farcy.handle_pr')
def test_PullRequestEvent__reopened(self, mock_handle_pr):
instance = self._farcy_instance()
self.assertEqual({}, instance.open_prs)
pull_request = Struct(head={'ref': 'DUMMY_BRANCH'}, number=1337)
event = Struct(payload={'action': 'reopened',
'pull_request': pull_request})
instance.PullRequestEvent(event)
self.assertEqual({'DUMMY_BRANCH': pull_request}, instance.open_prs)
self.assertFalse(mock_handle_pr.called)
@patch('farcy.Farcy.handle_pr')
def test_PushEvent__pr_does_not_exist(self, mock_handle_pr):
event = Struct(payload={'ref': 'refs/heads/DUMMY_BRANCH'})
self._farcy_instance().PushEvent(event)
self.assertFalse(mock_handle_pr.called)
@patch('farcy.Farcy.handle_pr')
def test_PushEvent__pr_exists(self, mock_handle_pr):
instance = self._farcy_instance()
instance.open_prs['DUMMY_BRANCH'] = 0xDEADBEEF
instance.PushEvent(Struct(payload={'ref': 'refs/heads/DUMMY_BRANCH'}))
mock_handle_pr.assert_called_with(0xDEADBEEF)
class FarcyEventTest(FarcyBaseTest):
def test_event_loop__ignore_events_before_start(self):
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=1)
farcy = self._farcy_instance() # Must have its time set second.
events = []
newest_id = farcy._event_loop([event], events)
self.assertEqual(None, newest_id)
self.assertEqual([], events)
def test_event_loop__ignore_old_events(self):
farcy = self._farcy_instance() # Must have its time set first.
farcy.last_event_id = 1
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=1)
events = []
newest_id = farcy._event_loop([event], events)
self.assertEqual(None, newest_id)
self.assertEqual([], events)
def test_event_loop__multiple_events(self):
farcy = self._farcy_instance() # Must have its time set first.
event_1 = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=1)
event_2 = Struct(actor=Struct(login=None), type='ForkEvent',
created_at=datetime.now(UTC()), id=2)
event_3 = Struct(actor=Struct(login=None), type='PullRequestEvent',
created_at=datetime.now(UTC()), id=3)
event_4 = Struct(actor=Struct(login=None), type='MemberEvent',
created_at=datetime.now(UTC()), id=4)
events = []
newest_id = farcy._event_loop([event_4, event_3, event_2, event_1],
events)
self.assertEqual(4, newest_id)
self.assertEqual([event_1, event_3], events)
def test_event_loop__no_events(self):
events = []
newest_id = self._farcy_instance()._event_loop([], events)
self.assertEqual(None, newest_id)
self.assertEqual([], events)
def test_event_loop__single_event(self):
farcy = self._farcy_instance() # Must have its time set first.
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=0xDEADBEEF)
events = []
newest_id = farcy._event_loop([event], events)
self.assertEqual(0xDEADBEEF, newest_id)
self.assertEqual([event], events)
def test_events__end_loop(self):
farcy = self._farcy_instance()
self.assertEqual(None, farcy.last_event_id)
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=0xDEADBEEF)
farcy.repo.events.return_value = Struct(
[event], etag='DUMMY_ETAG',
last_response=Struct(headers={'X-Poll-Interval': 100}))
event_itr = farcy.events()
self.assertEqual(event, next(event_itr))
farcy.running = False
self.assertRaises(StopIteration, next, event_itr)
@patch('farcy.Farcy._event_loop')
@patch('time.sleep')
def test_events__network_exception(self, mock_sleep, mock_event_loop):
farcy = self._farcy_instance()
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=0xDEADBEEF)
call_count = [0]
def side_effect(_, events):
call_count[0] += 1
if call_count[0] == 1:
raise ConnectionError('Foo')
else:
events.append(event)
return event.id
mock_event_loop.side_effect = side_effect
self.assertEqual(event, next(farcy.events()))
self.assertEqual(0xDEADBEEF, farcy.last_event_id)
self.assertTrue(mock_sleep.called_with(1))
def test_events__prevent_duplicate_calls(self):
farcy = self._farcy_instance()
self.assertEqual(None, farcy.last_event_id)
event = Struct(actor=Struct(login=None), type='PushEvent',
created_at=datetime.now(UTC()), id=0xDEADBEEF)
farcy.repo.events.return_value = Struct([event], etag='DUMMY_ETAG')
self.assertEqual(event, next(farcy.events()))
self.assertEqual(0xDEADBEEF, farcy.last_event_id)
self.assertRaises(FarcyException, next, farcy.events())
@patch('farcy.Farcy.events')
@patch('farcy.Farcy.PushEvent')
def test_run(self, mock_callback, mock_events):
event1 = Struct(type='PushEvent', uniq=1)
event2 = Struct(type='PushEvent', uniq=2)
self.assertEqual(event1, event1)
self.assertNotEqual(event1, event2)
mock_events.return_value = [event1, event2]
self._farcy_instance().run()
assert_calls(mock_callback, call(event1), call(event2))
mock_callback.assert_called_with(event2)
@patch('farcy.Farcy.handle_pr')
def test_run__single_pull_request(self, mock_handle_pr):
farcy = self._farcy_instance()
farcy.repo.pull_request.side_effect = lambda x: x
farcy.config.pull_requests = '418'
farcy.run()
assert_calls(mock_handle_pr, call(418, force=True))
@patch('farcy.Farcy.handle_pr')
def test_run__multiple_pull_requests(self, mock_handle_pr):
farcy = self._farcy_instance()
farcy.repo.pull_request.side_effect = lambda x: x
farcy.config.pull_requests = '360,180,720'
farcy.run()
assert_calls(mock_handle_pr, call(180, force=True),
call(360, force=True), call(720, force=True))
class MainTest(unittest.TestCase):
@patch('farcy.Farcy')
@patch('farcy.Config')
def test_main__farcy_exception_in_run(self, mock_config, mock_farcy):
def side_effect():
raise FarcyException
mock_farcy.return_value.run.side_effect = side_effect
self.assertEqual(1, main())
@patch('farcy.Farcy')
@patch('farcy.Config')
def test_main__keyboard_interrupt_in_farcy(self, mock_config, mock_farcy):
def side_effect(_):
raise KeyboardInterrupt
mock_farcy.side_effect = side_effect
self.assertEqual(0, main())
@patch('farcy.Farcy')
@patch('farcy.Config')
def test_main__keyboard_interrupt_in_run(self, mock_config, mock_farcy):
def side_effect():
raise KeyboardInterrupt
mock_farcy.return_value.run.side_effect = side_effect
self.assertEqual(0, main())
@patch('farcy.Config')
def test_main__no_repo_specified(self, mock_config):
mock_config.return_value.repository = None
self.assertEqual(2, main())
@patch('farcy.Farcy')
@patch('farcy.Config')
def test_main__no_exception(self, mock_config, mock_farcy):
self.assertEqual(None, main())
self.assertTrue(mock_config.called)
self.assertTrue(mock_farcy.called)
self.assertTrue(mock_farcy.return_value.run.called)
class NoHandlerDebugFactory(unittest.TestCase):
def setUp(self):
self.farcy = MagicMock()
def test_no_handler_factory__cache_response(self):
func = no_handler_debug_factory(1)
func(self.farcy, '.js')
func(self.farcy, '.js')
self.farcy.log.debug.assert_called_once_with(
'No handlers for extension .js')
def test_no_handler_factory__output_when_cache_expired(self):
func = no_handler_debug_factory(0)
func(self.farcy, '.js')
func(self.farcy, '.js')
calls = [call('No handlers for extension .js')] * 2
assert_calls(self.farcy.log.debug, *calls)
def test_no_handler_factory__multiple_calls(self):
func = no_handler_debug_factory(1)
func(self.farcy, '.js')
func(self.farcy, '.css')
assert_calls(self.farcy.log.debug,
call('No handlers for extension .js'),
call('No handlers for extension .css'))
|
#!/bin/sh
# Create Default RabbitMQ setup
( sleep 10 ; \
# Create users
# rabbitmqctl add_user <username> <password>
rabbitmqctl add_user ${RABBIT_USERNAME} ${RABBIT_PASSWORD} ; \
# Set user rights
# rabbitmqctl set_user_tags <username> <tag>
rabbitmqctl set_user_tags ${RABBIT_USERNAME} administrator ; \
# Create vhosts
# rabbitmqctl add_vhost <vhostname>
rabbitmqctl add_vhost /${RABBIT_VHOST} ; \
# Set vhost permissions
# rabbitmqctl set_permissions -p <vhostname> <username> ".*" ".*" ".*"
rabbitmqctl set_permissions -p /${RABBIT_VHOST} ${RABBIT_USERNAME} ".*" ".*" ".*" ; \
#rabbitmq-plugins enable rabbitmq_stomp; \
) &
rabbitmq-server $@
|
/** Feature Actions plugin classes. */
package io.opensphere.featureactions;
|
def remove_space(string):
words = string.split()
return ' '.join(words)
# testing
input_string = "This string has extra whitespace."
result = remove_space(input_string)
print(result) # Output: "This string has extra whitespace." |
import Skillbar from './Skillbar';
export default Skillbar;
|
package Dao;
import models.Word;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.sql2o.Connection;
import org.sql2o.Sql2o;
import static org.junit.Assert.*;
/**
* Created by Guest on 1/31/18.
*/
public class Sql2oWordDaoTest {
private Connection connection;
private Sql2oWordDao wordDao;
@Before
public void setUp() throws Exception {
String connectionString = "jdbc:h2:mem:testing;INIT=RUNSCRIPT from 'classpath:db/create.sql'";
Sql2o sql2o = new Sql2o(connectionString, "", "");
wordDao = new Sql2oWordDao(sql2o);
connection = sql2o.open();
}
@After
public void tearDown() throws Exception {
connection.close();
}
public Word setUpNewWord() {
return new Word("Poop");
}
public Word setUpNewWord1() {
return new Word("Pee");
}
public Word setUpNewWord2() {
return new Word("fart");
}
@Test
public void add() throws Exception {
Word word = setUpNewWord();
int originalId = word.getId();
wordDao.add(word);
assertNotEquals(originalId, word.getId());
}
@Test
public void createRandomWord() throws Exception {
wordDao.createRandomWord();
wordDao.createRandomWord();
assertNotEquals(wordDao.findById(1).getName(), wordDao.findById(2).getName());
System.out.println(wordDao.findById(1).getName());
System.out.println(wordDao.findById(2).getName());
}
@Test
public void findById() throws Exception {
Word word = setUpNewWord();
Word word1 = setUpNewWord1();
wordDao.add(word);
wordDao.add(word1);
assertEquals(word, wordDao.findById(word.getId()));
assertEquals(word1, wordDao.findById(word1.getId()));
}
@Test
public void getAll() throws Exception {
Word word = setUpNewWord();
Word word1 = setUpNewWord1();
wordDao.add(word);
wordDao.add(word1);
assertTrue(wordDao.getAll().contains(word));
assertTrue(wordDao.getAll().contains(word1));
}
@Test
public void update() throws Exception {
Word word = setUpNewWord();
Word word1 = setUpNewWord1();
wordDao.add(word);
wordDao.add(word1);
String originalName = word.getName();
wordDao.update(word.getId(), "New Zealand");
assertNotEquals(originalName, wordDao.findById(word.getId()).getName());
}
@Test
public void deleteById() throws Exception {
Word word = setUpNewWord();
Word word1 = setUpNewWord1();
wordDao.add(word);
wordDao.add(word1);
wordDao.deleteById(word.getId());
assertFalse(wordDao.getAll().contains(word));
assertTrue(wordDao.getAll().contains(word1));
}
@Test
public void deleteAll() throws Exception {
Word word = setUpNewWord();
Word word1 = setUpNewWord1();
wordDao.add(word);
wordDao.add(word1);
wordDao.deleteAll();
assertFalse(wordDao.getAll().contains(word));
assertFalse(wordDao.getAll().contains(word1));
}
} |
package benchmarks.CLEVER.Sub.Eq;
public class newV {
private int foo(int a, int b) {
int c=b-a;
return c;
}
int main() {
return foo(900,5);
}
}
|
package cn.home1.cloud.config.server.util;
import static lombok.AccessLevel.PRIVATE;
import lombok.NoArgsConstructor;
@NoArgsConstructor(access = PRIVATE)
public abstract class Consts {
public static final String DATA_DIRECTORY = System.getProperty("user.home") + "/data/config-server";
public static final String DOT_ENV = ".env";
public static final String PRIVILEGE_ENV_PROFILE_ = "PRIVILEGE_ENV_PROFILE_";
public static final String PRIVILEGE_ENV_PROFILE_WILDCARD = PRIVILEGE_ENV_PROFILE_ + "*";
}
|
int add_numbers(int a, int b){
return a + b;
} |
import { getFlatLayerMock } from './sketch-layer.component.mock';
import { SketchData } from './sketch.service';
export function getSketchDataMock() {
return {
previews: [{}],
pages: [
getFlatLayerMock(9)
]
} as SketchData;
}
|
<gh_stars>1-10
const httpStatus = require('http-status');
const ElasticsearchService = require('../services/elasticsearch.service');
const APICatalogService = require('../services/api-catalog.service');
/**
* Namespace index search
* see https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current/api-reference.html#api-search
* @param req
* @param res
* @param next
* @returns {*}
*/
exports.searchByNamespace = (req, res, next) => {
const namespace = req.params.namespace;
const searchText = req.query.searchText;
const timestamp = req.query.timestamp;
const rangeType = req.query.rangeType;
const order = req.query.order;
const lowerBoundTimestamp = req.query.lowerBoundTimestamp;
const upperBoundTimestamp = req.query.upperBoundTimestamp;
ElasticsearchService.searchByNamespace(namespace, searchText, timestamp, rangeType, order, lowerBoundTimestamp, upperBoundTimestamp)
.then(logs => {
res.status(httpStatus.OK).json({
result: true,
data: logs
});
})
.catch(() => next(new Error('Error getting ES logs')));
};
exports.getServiceGroups = (req, res, next) => {
const environment = req.params.environment;
APICatalogService.getServiceGroups(environment)
.then(groups => {
res.status(httpStatus.OK).json({
result: true,
data: groups
});
})
.catch(() => next(new Error('Error getting services groups')));
};
exports.searchAPICatalog = (req, res, next) => {
const environment = req.params.environment;
const serviceGroup = req.query.serviceGroup;
const searchTerms = req.query.searchTerms;
APICatalogService.searchAPICatalog(environment, serviceGroup, searchTerms)
.then(apis => {
res.status(httpStatus.OK).json({
result: true,
data: apis
});
})
.catch(() => next(new Error('Error searching API Catalog')));
};
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This script should not be run directly but sourced by the other
# scripts (e.g. sysroot-creator-trusty.sh). Its up to the parent scripts
# to define certain environment variables: e.g.
# DISTRO=ubuntu
# DIST=trusty
# APT_REPO=http://archive.ubuntu.com/ubuntu
# KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
# DEBIAN_PACKAGES="gcc libz libssl"
#@ This script builds a Debian sysroot images for building Google Chrome.
#@
#@ Generally this script is invoked as:
#@ sysroot-creator-<flavour>.sh <mode> <args>*
#@ Available modes are shown below.
#@
#@ List of modes:
######################################################################
# Config
######################################################################
set -o nounset
set -o errexit
SCRIPT_DIR=$(cd $(dirname $0) && pwd)
if [ -z "${DIST:-}" ]; then
echo "error: DIST not defined"
exit 1
fi
if [ -z "${APT_REPO:-}" ]; then
echo "error: APT_REPO not defined"
exit 1
fi
if [ -z "${KEYRING_FILE:-}" ]; then
echo "error: KEYRING_FILE not defined"
exit 1
fi
if [ -z "${DEBIAN_PACKAGES:-}" ]; then
echo "error: DEBIAN_PACKAGES not defined"
exit 1
fi
readonly REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
readonly REQUIRED_TOOLS="wget"
######################################################################
# Package Config
######################################################################
PACKAGES_EXT=${PACKAGES_EXT:-bz2}
readonly RELEASE_FILE="Release"
readonly RELEASE_FILE_GPG="Release.gpg"
readonly RELEASE_LIST="${REPO_BASEDIR}/${RELEASE_FILE}"
readonly RELEASE_LIST_GPG="${REPO_BASEDIR}/${RELEASE_FILE_GPG}"
readonly PACKAGE_FILE_AMD64="main/binary-amd64/Packages.${PACKAGES_EXT}"
readonly PACKAGE_FILE_I386="main/binary-i386/Packages.${PACKAGES_EXT}"
readonly PACKAGE_FILE_ARM="main/binary-armhf/Packages.${PACKAGES_EXT}"
readonly PACKAGE_FILE_ARM64="main/binary-arm64/Packages.${PACKAGES_EXT}"
readonly PACKAGE_FILE_MIPS="main/binary-mipsel/Packages.${PACKAGES_EXT}"
readonly PACKAGE_LIST_AMD64="${REPO_BASEDIR}/${PACKAGE_FILE_AMD64}"
readonly PACKAGE_LIST_I386="${REPO_BASEDIR}/${PACKAGE_FILE_I386}"
readonly PACKAGE_LIST_ARM="${REPO_BASEDIR}/${PACKAGE_FILE_ARM}"
readonly PACKAGE_LIST_ARM64="${REPO_BASEDIR}/${PACKAGE_FILE_ARM64}"
readonly PACKAGE_LIST_MIPS="${REPO_BASEDIR}/${PACKAGE_FILE_MIPS}"
readonly DEBIAN_DEP_LIST_AMD64="packagelist.${DIST}.amd64"
readonly DEBIAN_DEP_LIST_I386="packagelist.${DIST}.i386"
readonly DEBIAN_DEP_LIST_ARM="packagelist.${DIST}.arm"
readonly DEBIAN_DEP_LIST_ARM64="packagelist.${DIST}.arm64"
readonly DEBIAN_DEP_LIST_MIPS="packagelist.${DIST}.mipsel"
######################################################################
# Helper
######################################################################
Banner() {
echo "######################################################################"
echo $*
echo "######################################################################"
}
SubBanner() {
echo "----------------------------------------------------------------------"
echo $*
echo "----------------------------------------------------------------------"
}
Usage() {
egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
}
DownloadOrCopy() {
if [ -f "$2" ] ; then
echo "$2 already in place"
return
fi
HTTP=0
echo "$1" | grep -qs ^http:// && HTTP=1
if [ "$HTTP" = "1" ]; then
SubBanner "downloading from $1 -> $2"
wget "$1" -O "${2}.partial"
mv "${2}.partial" $2
else
SubBanner "copying from $1"
cp "$1" "$2"
fi
}
SetEnvironmentVariables() {
ARCH=""
echo $1 | grep -qs Amd64$ && ARCH=AMD64
if [ -z "$ARCH" ]; then
echo $1 | grep -qs I386$ && ARCH=I386
fi
if [ -z "$ARCH" ]; then
echo $1 | grep -qs Mips$ && ARCH=MIPS
fi
if [ -z "$ARCH" ]; then
echo $1 | grep -qs ARM$ && ARCH=ARM
fi
if [ -z "$ARCH" ]; then
echo $1 | grep -qs ARM64$ && ARCH=ARM64
fi
if [ -z "${ARCH}" ]; then
echo "ERROR: Unable to determine architecture based on: $1"
exit 1
fi
ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
}
# some sanity checks to make sure this script is run from the right place
# with the right tools
SanityCheck() {
Banner "Sanity Checks"
local chrome_dir=$(cd "${SCRIPT_DIR}/../../.." && pwd)
BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
mkdir -p ${BUILD_DIR}
echo "Using build directory: ${BUILD_DIR}"
for tool in ${REQUIRED_TOOLS} ; do
if ! which ${tool} > /dev/null ; then
echo "Required binary $tool not found."
echo "Exiting."
exit 1
fi
done
# This is where the staging sysroot is.
INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tgz"
if ! mkdir -p "${INSTALL_ROOT}" ; then
echo "ERROR: ${INSTALL_ROOT} can't be created."
exit 1
fi
}
ChangeDirectory() {
# Change directory to where this script is.
cd ${SCRIPT_DIR}
}
ClearInstallDir() {
Banner "Clearing dirs in ${INSTALL_ROOT}"
rm -rf ${INSTALL_ROOT}/*
}
CreateTarBall() {
Banner "Creating tarball ${TARBALL}"
tar zcf ${TARBALL} -C ${INSTALL_ROOT} .
}
ExtractPackageBz2() {
if [ "${PACKAGES_EXT}" == "bz2" ]; then
bzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2"
else
xzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2"
fi
}
GeneratePackageListAmd64() {
local output_file="$1"
local package_list="${BUILD_DIR}/Packages.${DIST}_amd64.${PACKAGES_EXT}"
local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
DownloadOrCopy "${PACKAGE_LIST_AMD64}" "${package_list}"
VerifyPackageListing "${PACKAGE_FILE_AMD64}" "${package_list}"
ExtractPackageBz2 "$package_list" "$tmp_package_list"
GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
${DEBIAN_PACKAGES_X86} ${DEBIAN_PACKAGES_AMD64}"
}
GeneratePackageListI386() {
local output_file="$1"
local package_list="${BUILD_DIR}/Packages.${DIST}_i386.${PACKAGES_EXT}"
local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
DownloadOrCopy "${PACKAGE_LIST_I386}" "${package_list}"
VerifyPackageListing "${PACKAGE_FILE_I386}" "${package_list}"
ExtractPackageBz2 "$package_list" "$tmp_package_list"
GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
${DEBIAN_PACKAGES_X86}"
}
GeneratePackageListARM() {
local output_file="$1"
local package_list="${BUILD_DIR}/Packages.${DIST}_arm.${PACKAGES_EXT}"
local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_arm"
DownloadOrCopy "${PACKAGE_LIST_ARM}" "${package_list}"
VerifyPackageListing "${PACKAGE_FILE_ARM}" "${package_list}"
ExtractPackageBz2 "$package_list" "$tmp_package_list"
GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
${DEBIAN_PACKAGES_ARM}"
}
function GeneratePackageListARM64() {
local output_file="$1"
local package_list="${BUILD_DIR}/Packages.${DIST}_arm64.${PACKAGES_EXT}"
local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_arm64"
DownloadOrCopy "${PACKAGE_LIST_ARM64}" "${package_list}"
VerifyPackageListing "${PACKAGE_FILE_ARM64}" "${package_list}"
ExtractPackageBz2 "$package_list" "$tmp_package_list"
GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
${DEBIAN_PACKAGES_ARM64}"
}
GeneratePackageListMips() {
local output_file="$1"
local package_list="${BUILD_DIR}/Packages.${DIST}_mips.${PACKAGES_EXT}"
local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_mips"
DownloadOrCopy "${PACKAGE_LIST_MIPS}" "${package_list}"
VerifyPackageListing "${PACKAGE_FILE_MIPS}" "${package_list}"
ExtractPackageBz2 "$package_list" "$tmp_package_list"
GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}"
}
StripChecksumsFromPackageList() {
local package_file="$1"
sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
}
VerifyPackageFilesMatch() {
local downloaded_package_file="$1"
local stored_package_file="$2"
diff -u "$downloaded_package_file" "$stored_package_file"
if [ "$?" -ne "0" ]; then
echo "ERROR: downloaded package files does not match $2."
echo "You may need to run UpdatePackageLists."
exit 1
fi
}
######################################################################
#
######################################################################
HacksAndPatchesAmd64() {
Banner "Misc Hacks & Patches"
# these are linker scripts with absolute pathnames in them
# which we rewrite here
lscripts="${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libpthread.so \
${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libc.so"
# Rewrite linker scripts
sed -i -e 's|/usr/lib/x86_64-linux-gnu/||g' ${lscripts}
sed -i -e 's|/lib/x86_64-linux-gnu/||g' ${lscripts}
# This is for chrome's ./build/linux/pkg-config-wrapper
# which overwrites PKG_CONFIG_LIBDIR internally
SubBanner "Move pkgconfig scripts"
mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
mv ${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/pkgconfig/* \
${INSTALL_ROOT}/usr/lib/pkgconfig
SubBanner "Adding an additional ld.conf include"
LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
echo /usr/lib/gcc/x86_64-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
echo /usr/lib >> "$LD_SO_HACK_CONF"
}
HacksAndPatchesI386() {
Banner "Misc Hacks & Patches"
# these are linker scripts with absolute pathnames in them
# which we rewrite here
lscripts="${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libpthread.so \
${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libc.so"
# Rewrite linker scripts
sed -i -e 's|/usr/lib/i386-linux-gnu/||g' ${lscripts}
sed -i -e 's|/lib/i386-linux-gnu/||g' ${lscripts}
# This is for chrome's ./build/linux/pkg-config-wrapper
# which overwrites PKG_CONFIG_LIBDIR internally
SubBanner "Move pkgconfig scripts"
mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
mv ${INSTALL_ROOT}/usr/lib/i386-linux-gnu/pkgconfig/* \
${INSTALL_ROOT}/usr/lib/pkgconfig
SubBanner "Adding an additional ld.conf include"
LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
echo /usr/lib/gcc/i486-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
echo /usr/lib >> "$LD_SO_HACK_CONF"
}
HacksAndPatchesARM() {
Banner "Misc Hacks & Patches"
# these are linker scripts with absolute pathnames in them
# which we rewrite here
lscripts="${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libpthread.so \
${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libc.so"
# Rewrite linker scripts
sed -i -e 's|/usr/lib/arm-linux-gnueabihf/||g' ${lscripts}
sed -i -e 's|/lib/arm-linux-gnueabihf/||g' ${lscripts}
# This is for chrome's ./build/linux/pkg-config-wrapper
# which overwrites PKG_CONFIG_LIBDIR internally
SubBanner "Move pkgconfig files"
mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
mv ${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/pkgconfig/* \
${INSTALL_ROOT}/usr/lib/pkgconfig
}
function HacksAndPatchesARM64() {
Banner "Misc Hacks & Patches"
# these are linker scripts with absolute pathnames in them
# which we rewrite here
lscripts="${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/libpthread.so \
${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/libc.so"
# Rewrite linker scripts
sed -i -e 's|/usr/lib/aarch64-linux-gnu/||g' ${lscripts}
sed -i -e 's|/lib/aarch64-linux-gnu/||g' ${lscripts}
# This is for chrome's ./build/linux/pkg-config-wrapper
# which overwrites PKG_CONFIG_LIBDIR internally
SubBanner "Move pkgconfig files"
mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
mv ${INSTALL_ROOT}/usr/lib/aarch64-linux-gnu/pkgconfig/* \
${INSTALL_ROOT}/usr/lib/pkgconfig
}
HacksAndPatchesMips() {
Banner "Misc Hacks & Patches"
# these are linker scripts with absolute pathnames in them
# which we rewrite here
lscripts="${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libpthread.so \
${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libc.so"
# Rewrite linker scripts
sed -i -e 's|/usr/lib/mipsel-linux-gnu/||g' ${lscripts}
sed -i -e 's|/lib/mipsel-linux-gnu/||g' ${lscripts}
# This is for chrome's ./build/linux/pkg-config-wrapper
# which overwrites PKG_CONFIG_LIBDIR internally
SubBanner "Move pkgconfig files"
mkdir -p ${INSTALL_ROOT}/usr/lib/pkgconfig
mv ${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/pkgconfig/* \
${INSTALL_ROOT}/usr/lib/pkgconfig
}
InstallIntoSysroot() {
Banner "Install Libs And Headers Into Jail"
mkdir -p ${BUILD_DIR}/debian-packages
mkdir -p ${INSTALL_ROOT}
while (( "$#" )); do
local file="$1"
local package="${BUILD_DIR}/debian-packages/${file##*/}"
shift
local sha256sum="$1"
shift
if [ "${#sha256sum}" -ne "64" ]; then
echo "Bad sha256sum from package list"
exit 1
fi
Banner "Installing ${file}"
DownloadOrCopy ${APT_REPO}/pool/${file} ${package}
if [ ! -s "${package}" ] ; then
echo
echo "ERROR: bad package ${package}"
exit 1
fi
echo "${sha256sum} ${package}" | sha256sum --quiet -c
SubBanner "Extracting to ${INSTALL_ROOT}"
dpkg --fsys-tarfile ${package}\
| tar -xf - -C ${INSTALL_ROOT}
done
# Prune /usr/share, leaving only pkgconfig
for name in ${INSTALL_ROOT}/usr/share/*; do
if [ "${name}" != "${INSTALL_ROOT}/usr/share/pkgconfig" ]; then
rm -r ${name}
fi
done
}
CleanupJailSymlinks() {
Banner "Jail symlink cleanup"
SAVEDPWD=$(pwd)
cd ${INSTALL_ROOT}
local libdirs="lib usr/lib"
if [ "${ARCH}" != "MIPS" ]; then
libdirs+=" lib64"
fi
find $libdirs -type l -printf '%p %l\n' | while read link target; do
# skip links with non-absolute paths
echo "${target}" | grep -qs ^/ || continue
echo "${link}: ${target}"
case "${link}" in
usr/lib/gcc/*-linux-gnu/4.*/* | usr/lib/gcc/arm-linux-gnueabihf/4.*/* |\
usr/lib/gcc/aarch64-linux-gnu/4.*/*)
# Relativize the symlink.
ln -snfv "../../../../..${target}" "${link}"
;;
usr/lib/*-linux-gnu/* | usr/lib/arm-linux-gnueabihf/*)
# Relativize the symlink.
ln -snfv "../../..${target}" "${link}"
;;
usr/lib/*)
# Relativize the symlink.
ln -snfv "../..${target}" "${link}"
;;
lib64/* | lib/*)
# Relativize the symlink.
ln -snfv "..${target}" "${link}"
;;
esac
done
find $libdirs -type l -printf '%p %l\n' | while read link target; do
# Make sure we catch new bad links.
if [ ! -r "${link}" ]; then
echo "ERROR: FOUND BAD LINK ${link}"
ls -l ${link}
exit 1
fi
done
cd "$SAVEDPWD"
}
#@
#@ BuildSysrootAmd64
#@
#@ Build everything and package it
BuildSysrootAmd64() {
ClearInstallDir
local package_file="$BUILD_DIR/package_with_sha256sum_amd64"
GeneratePackageListAmd64 "$package_file"
local files_and_sha256sums="$(cat ${package_file})"
StripChecksumsFromPackageList "$package_file"
VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_AMD64"
InstallIntoSysroot ${files_and_sha256sums}
CleanupJailSymlinks
HacksAndPatchesAmd64
CreateTarBall
}
#@
#@ BuildSysrootI386
#@
#@ Build everything and package it
BuildSysrootI386() {
ClearInstallDir
local package_file="$BUILD_DIR/package_with_sha256sum_i386"
GeneratePackageListI386 "$package_file"
local files_and_sha256sums="$(cat ${package_file})"
StripChecksumsFromPackageList "$package_file"
VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_I386"
InstallIntoSysroot ${files_and_sha256sums}
CleanupJailSymlinks
HacksAndPatchesI386
CreateTarBall
}
#@
#@ BuildSysrootARM
#@
#@ Build everything and package it
BuildSysrootARM() {
ClearInstallDir
local package_file="$BUILD_DIR/package_with_sha256sum_arm"
GeneratePackageListARM "$package_file"
local files_and_sha256sums="$(cat ${package_file})"
StripChecksumsFromPackageList "$package_file"
VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM"
APT_REPO=${APR_REPO_ARM:=$APT_REPO}
InstallIntoSysroot ${files_and_sha256sums}
CleanupJailSymlinks
HacksAndPatchesARM
CreateTarBall
}
#@
#@ BuildSysrootARM64
#@
#@ Build everything and package it
function BuildSysrootARM64() {
ClearInstallDir
local package_file="$BUILD_DIR/package_with_sha256sum_arm64"
GeneratePackageListARM64 "$package_file"
local files_and_sha256sums="$(cat ${package_file})"
StripChecksumsFromPackageList "$package_file"
VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM64"
APT_REPO=${APR_REPO_ARM64:=$APT_REPO}
InstallIntoSysroot ${files_and_sha256sums}
CleanupJailSymlinks
HacksAndPatchesARM64
CreateTarBall
}
#@
#@ BuildSysrootMips
#@
#@ Build everything and package it
BuildSysrootMips() {
ClearInstallDir
local package_file="$BUILD_DIR/package_with_sha256sum_arm"
GeneratePackageListMips "$package_file"
local files_and_sha256sums="$(cat ${package_file})"
StripChecksumsFromPackageList "$package_file"
VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS"
APT_REPO=${APR_REPO_MIPS:=$APT_REPO}
InstallIntoSysroot ${files_and_sha256sums}
CleanupJailSymlinks
HacksAndPatchesMips
CreateTarBall
}
#@
#@ BuildSysrootAll
#@
#@ Build sysroot images for all architectures
BuildSysrootAll() {
RunCommand BuildSysrootAmd64
RunCommand BuildSysrootI386
RunCommand BuildSysrootARM
RunCommand BuildSysrootMips
}
UploadSysroot() {
local rev=$1
if [ -z "${rev}" ]; then
echo "Please specify a revision to upload at."
exit 1
fi
set -x
gsutil cp -a public-read "${TARBALL}" \
"gs://chrome-linux-sysroot/toolchain/$rev/"
set +x
}
#@
#@ UploadSysrootAmd64 <revision>
#@
UploadSysrootAmd64() {
UploadSysroot "$@"
}
#@
#@ UploadSysrootI386 <revision>
#@
UploadSysrootI386() {
UploadSysroot "$@"
}
#@
#@ UploadSysrootARM <revision>
#@
UploadSysrootARM() {
UploadSysroot "$@"
}
#@
#@ UploadSysrootARM64 <revision>
#@
function UploadSysrootARM64() {
UploadSysroot "$@"
}
#@
#@ UploadSysrootMips <revision>
#@
UploadSysrootMips() {
UploadSysroot "$@"
}
#@
#@ UploadSysrootAll <revision>
#@
#@ Upload sysroot image for all architectures
UploadSysrootAll() {
RunCommand UploadSysrootAmd64 "$@"
RunCommand UploadSysrootI386 "$@"
RunCommand UploadSysrootARM "$@"
RunCommand UploadSysrootARM64 "$@"
RunCommand UploadSysrootMips "$@"
}
#
# CheckForDebianGPGKeyring
#
# Make sure the Debian GPG keys exist. Otherwise print a helpful message.
#
CheckForDebianGPGKeyring() {
if [ ! -e "$KEYRING_FILE" ]; then
echo "KEYRING_FILE not found: ${KEYRING_FILE}"
echo "Debian GPG keys missing. Install the debian-archive-keyring package."
exit 1
fi
}
#
# VerifyPackageListing
#
# Verifies the downloaded Packages.bz2 file has the right checksums.
#
VerifyPackageListing() {
local file_path=$1
local output_file=$2
local release_file="${BUILD_DIR}/${RELEASE_FILE}"
local release_file_gpg="${BUILD_DIR}/${RELEASE_FILE_GPG}"
CheckForDebianGPGKeyring
DownloadOrCopy ${RELEASE_LIST} ${release_file}
DownloadOrCopy ${RELEASE_LIST_GPG} ${release_file_gpg}
echo "Verifying: ${release_file} with ${release_file_gpg}"
set -x
gpgv --keyring "${KEYRING_FILE}" "${release_file_gpg}" "${release_file}"
set +x
echo "Verifying: ${output_file}"
local checksums=$(grep ${file_path} ${release_file} | cut -d " " -f 2)
local sha256sum=$(echo ${checksums} | cut -d " " -f 3)
if [ "${#sha256sum}" -ne "64" ]; then
echo "Bad sha256sum from ${RELEASE_LIST}"
exit 1
fi
echo "${sha256sum} ${output_file}" | sha256sum --quiet -c
}
#
# GeneratePackageList
#
# Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
# to output file.
#
GeneratePackageList() {
local input_file="$1"
local output_file="$2"
echo "Updating: ${output_file} from ${input_file}"
/bin/rm -f "${output_file}"
shift
shift
for pkg in $@ ; do
local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
egrep -o "pool/.*")
if [ -z "${pkg_full}" ]; then
echo "ERROR: missing package: $pkg"
exit 1
fi
local pkg_nopool=$(echo "$pkg_full" | sed "s/^pool\///")
local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
grep ^SHA256: | sed 's/^SHA256: //')
if [ "${#sha256sum}" -ne "64" ]; then
echo "Bad sha256sum from Packages"
exit 1
fi
echo $pkg_nopool $sha256sum >> "$output_file"
done
# sort -o does an in-place sort of this file
sort "$output_file" -o "$output_file"
}
#@
#@ UpdatePackageListsAmd64
#@
#@ Regenerate the package lists such that they contain an up-to-date
#@ list of URLs within the Debian archive. (For amd64)
UpdatePackageListsAmd64() {
GeneratePackageListAmd64 "$DEBIAN_DEP_LIST_AMD64"
StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_AMD64"
}
#@
#@ UpdatePackageListsI386
#@
#@ Regenerate the package lists such that they contain an up-to-date
#@ list of URLs within the Debian archive. (For i386)
UpdatePackageListsI386() {
GeneratePackageListI386 "$DEBIAN_DEP_LIST_I386"
StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_I386"
}
#@
#@ UpdatePackageListsARM
#@
#@ Regenerate the package lists such that they contain an up-to-date
#@ list of URLs within the Debian archive. (For arm)
UpdatePackageListsARM() {
GeneratePackageListARM "$DEBIAN_DEP_LIST_ARM"
StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM"
}
#@
#@ UpdatePackageListsARM64
#@
#@ Regenerate the package lists such that they contain an up-to-date
#@ list of URLs within the Debian archive. (For arm)
function UpdatePackageListsARM64() {
GeneratePackageListARM64 "$DEBIAN_DEP_LIST_ARM64"
StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM64"
}
#@
#@ UpdatePackageListsMips
#@
#@ Regenerate the package lists such that they contain an up-to-date
#@ list of URLs within the Debian archive. (For arm)
UpdatePackageListsMips() {
GeneratePackageListMips "$DEBIAN_DEP_LIST_MIPS"
StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS"
}
#@
#@ UpdatePackageListsAll
#@
#@ Regenerate the package lists for all architectures.
UpdatePackageListsAll() {
RunCommand UpdatePackageListsAmd64
RunCommand UpdatePackageListsI386
RunCommand UpdatePackageListsARM
RunCommand UpdatePackageListsARM64
RunCommand UpdatePackageListsMips
}
RunCommand() {
SetEnvironmentVariables "$1"
SanityCheck
"$@"
}
if [ $# -eq 0 ] ; then
echo "ERROR: you must specify a mode on the commandline"
echo
Usage
exit 1
elif [ "$(type -t $1)" != "function" ]; then
echo "ERROR: unknown function '$1'." >&2
echo "For help, try:"
echo " $0 help"
exit 1
else
ChangeDirectory
if echo $1 | grep -qs "All$"; then
"$@"
else
RunCommand "$@"
fi
fi
|
package com.nike.cerberus.hystrix;
import com.netflix.hystrix.strategy.concurrency.HystrixRequestContext;
import com.nike.riposte.server.http.RequestInfo;
import com.nike.riposte.server.http.ResponseInfo;
import com.nike.riposte.server.http.filter.RequestAndResponseFilter;
import io.netty.channel.ChannelHandlerContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Riposte filter that sets up and shuts down a HystrixRequestContext
*/
public class HystrixRequestAndResponseFilter implements RequestAndResponseFilter {
private static final String HYSTRIX_REQUEST_CONTEXT = "HystrixRequestContext";
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@Override
public <T> RequestInfo<T> filterRequestFirstChunkNoPayload(RequestInfo<T> currentRequestInfo,
ChannelHandlerContext ctx) {
HystrixRequestContext context = HystrixRequestContext.initializeContext();
currentRequestInfo.addRequestAttribute(HYSTRIX_REQUEST_CONTEXT, context);
return currentRequestInfo;
}
@Override
public <T> RequestInfo<T> filterRequestLastChunkWithFullPayload(RequestInfo<T> currentRequestInfo,
ChannelHandlerContext ctx) {
// Nothing to do - the other filterRequest method already handled Hystrix initialization.
// Returning null just means use the passed-in response, which is what we want.
return null;
}
@Override
public <T> ResponseInfo<T> filterResponse(ResponseInfo<T> currentResponseInfo, RequestInfo<?> requestInfo,
ChannelHandlerContext ctx) {
try {
if (requestInfo != null && requestInfo.getRequestAttributes() != null) {
HystrixRequestContext context = (HystrixRequestContext) requestInfo.getRequestAttributes().get(HYSTRIX_REQUEST_CONTEXT);
if (context != null) {
context.shutdown();
}
}
} catch (Throwable t) {
logger.error("An unexpected error occurred trying to shutdown the HystrixRequestContext for this request.", t);
}
// Returning null just means use the passed-in response, which is what we want.
return null;
}
} |
<reponame>lt123456/6hc
/**
Name : ajaxPost
Author : kingthy
Email : <EMAIL>
Blog : http://www.cnblogs.com/kingthy/
Version : 1.0.0
License : MIT,GPL licenses.
**/
jQuery.fn.extend({
ajaxPost : function(settings){
if(typeof settings == 'function'){
success = arguments[0];
}else{
success = null;
}
settings = jQuery.extend({
url : null,
dataType : 'json',
success : success
}, settings);
function createIframe(id){
var iframeHtml = '<iframe id="' + id + '" name="' + id + '" style="position:absolute; top:-9999px; left:-9999px" src="javascript:false;" />';
jQuery(iframeHtml).appendTo(document.body);
return jQuery('#' + id);
}
function postCallbackData(data){
var d = settings.dataType == 'xml' ? data.responseXML : data.responseText;
if(settings.dataType == 'json' && d){
return jQuery.parseJSON(d);
}else{
return d;
}
}
var id = "jAjaxPostFrame" + (new Date().getTime());
var io = createIframe(id);
var postCallback = function(){
var io = $('#' + id)[0];
var data = {};
try{
if(io.contentWindow){
data.responseText = io.contentWindow.document.body ? io.contentWindow.document.body.innerHTML : null;
data.responseXML = io.contentWindow.document.XMLDocument ? io.contentWindow.document.XMLDocument : io.contentWindow.document;
}else if(io.contentDocument){
data.responseText = io.contentDocument.document.body ? io.contentDocument.document.body.innerHTML : null;
data.responseXML = io.contentDocument.document.XMLDocument ? io.contentDocument.document.XMLDocument : io.contentDocument.document;
}
}catch(e){
data.error = e;
}
if(data.error){
if(settings.error)settings.error(data.error);
}else{
try{
var d = postCallbackData(data);
if(settings.success)settings.success(d);
}catch(e){}
}
if(settings.complete)settings.complete(data);
jQuery(io).unbind();
setTimeout(function(){
try{
jQuery(io).remove();
}catch(e){}
},100);
}
try{
if(jQuery('input[type=file]', this).length){
this.attr('encoding', 'multipart/form-data');
this.attr('enctype', 'multipart/form-data');
}else{
this.attr('encoding', 'application/x-www-form-urlencoded');
this.attr('enctype', 'application/x-www-form-urlencoded');
}
this.attr('target', id);
this.attr('method', 'POST');
if(settings.url)this.attr('action', settings.url);
io.load(postCallback);
this[0].submit();
}catch(e){}
return this;
}
}); |
def calculate_carry(a, k, initial_carry):
i = len(a) - 1
carry = initial_carry
while i >= 0:
summ = carry + ord(a[i]) - ord('0')
carry = summ // k
i -= 1
return carry |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.