text stringlengths 1 1.05M |
|---|
#!/usr/bin/env sh
################################################################################
# ERROR: Let the user know if the script fails
################################################################################
trap 'ret=$?; test $ret -ne 0 && printf "\n \e[31m๏ฑ\033[0m Setup failed \e[31m๏ฑ\033[0m\n" >&2; exit $ret' EXIT
set -e
################################################################################
# FUNC: Check for required functions file
################################################################################
if [ -e functions.sh ]; then
source functions.sh
else
printf "\n โ ๏ธ ./functions.sh not found! \n"
exit 1
fi
################################################################################
# Get in Setup! http://patorjk.com/software/taag/ ( font: Script )
################################################################################
printf "
()
/\ _ _|_ _
/ \|/ | | | |/ \_
/(__/|__/|_/ \_/|_/|__/
-------------------------- /| -----------------------
[for macOS 12.0.1] \|
โญโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฎ
โ Okay developers the macOS setup has ${bold}started!${normal}. โ
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โ Safe to run multiple times on the same machine. โ
โ It ${green}installs${reset}, ${blue}upgrades${reset}, or ${yellow}skips${reset} packages based โ
โ on what is already installed on the machine. โ
โฐโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโฏ
${dim}$(get_os) $(get_os_version) ${normal}
"
################################################################################
# CHECK: Internet
################################################################################
chapter "Checking internet connectionโฆ"
check_internet_connection
################################################################################
# PROMPT: Password
################################################################################
chapter "Caching passwordโฆ"
ask_for_sudo
################################################################################
# PROMPT: SSH Key
################################################################################
chapter 'Checking for SSH keyโฆ'
ssh_key_setup
################################################################################
# INSTALL: Dependencies
################################################################################
chapter "Installing Dependenciesโฆ"
# ------------------------------------------------------------------------------
# XCode
# ------------------------------------------------------------------------------
if [ xcode_tools_is_installed ]; then
print_success_muted "Xcode already installed. Skipping."
else
step "Installing Xcodeโฆ"
xcode-select --install &> /dev/null
print_success "Xcode installed!"
fi
# ------------------------------------------------------------------------------
# Homebrew
# ------------------------------------------------------------------------------
if ! [ -x "$(command -v brew)" ]; then
step "Installing Homebrewโฆ"
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
export PATH="/opt/homebrew/bin:$PATH"
command printf '"export PATH="/opt/homebrew/bin:$PATH"' >> "$HOME/.zshrc"
print_success "Homebrew installed!"
else
print_success_muted "Homebrew already installed. Skipping."
fi
# ------------------------------------------------------------------------------
# NVM
# ------------------------------------------------------------------------------
if [ ! -d "$NVM_DIRECTORY" ]; then
step "Installing NVMโฆ"
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
command printf "$NVM_SOURCE_PATH" >> "$HOME/.path"
command printf "$NVM_COMPLETION_PATH" >> "$HOME/.path"
. $HOME/.path
print_success "NVM installed!"
step "Installing latest Nodeโฆ"
nvm install node
nvm use node
nvm run node --version
nodev=$(node -v)
print_success "Using Node $nodev!"
else
print_success_muted "NVM/Node already installed. Skipping."
fi
################################################################################
# INSTALL: brews
################################################################################
if [ -e $cwd/install/brews ]; then
chapter "Installing Homebrew formulaeโฆ"
for brew in $(<$cwd/install/brews); do
install_brews $brew
done
fi
################################################################################
# UPDATE: Homebrew
################################################################################
chapter "Updating Homebrew formulaeโฆ"
brew update
################################################################################
# INSTALL: casks
################################################################################
if [ -e $cwd/install/casks ]; then
chapter "Installing apps via Homebrewโฆ"
for cask in $(<$cwd/install/casks); do
install_casks $cask
done
fi
################################################################################
# INSTALL: Mac App Store Apps
################################################################################
chapter "Installing apps from App Storeโฆ"
if [ -x mas ]; then
print_warning "Please install mas-cli first: brew mas. Skipping."
else
if [ -e $cwd/install/apps ]; then
if mas_setup; then
# Workaround for associative array in Bash 3
# https://stackoverflow.com/questions/6047648/bash-4-associative-arrays-error-declare-a-invalid-option
for app in $(<$cwd/install/apps); do
KEY="${app%%::*}"
VALUE="${app##*::}"
install_application_via_app_store $KEY $VALUE
done
else
print_warning "Please signin to App Store first. Skipping."
fi
fi
fi
################################################################################
# CLEAN: Homebrew files
################################################################################
chapter "Cleaning up Homebrew filesโฆ"
brew cleanup 2> /dev/null
################################################################################
# INSTALL: npm packages
################################################################################
if [ -e $cwd/install/npm ]; then
chapter "Installing npm packagesโฆ"
for pkg in $(<$cwd/install/npm); do
KEY="${pkg%%::*}"
VALUE="${pkg##*::}"
install_npm_packages $KEY $VALUE
done
fi
################################################################################
# CONFIGURATION: macOS configuration
################################################################################
chapter "Configure macOSโฆ"
step " macOS preferencesโฆ"
if [ -f $cwd/preferences.sh ]; then
if ask "Do you want to apply preferences?" Y; then
. "$cwd/preferences.sh"; printf "\n You're preferences have been updated. ๐ฅ \n";
else
print_success_muted "Preferences declined. Skipped.";
fi
else
print_warning "No preferences found. Skipping."
fi
################################################################################
# SYMLINK: symbolic link of files
################################################################################
chapter "Symbolic Link of files to rootโฆ"
symlink_files
################################################################################
# ๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐๐
################################################################################
e_message
|
#!/bin/bash
help() {
cat <<EOF
Usage: ./mktest.sh issueXXX.md
[-g : generate a new test case from a .md file]
[-d : diff expected and actual]
[-e : generate TWO html previews, issueXXX.actual.html and issueXXX.expected.html]
[-o : open the HTML preview(s)]
[-p : generate and open an html preview]
[-r : reverse native back to markdown]
[-s : use system pandoc]
For most commands, issueXXX.md may not exist, but it's just a convenient way of
addressing the native test files named tests/issueXXX.*.native.
pandoc-citeproc binary:
The default is to use the built pandoc-citeproc executable (ie $ stack exec pandoc --).
This is the one that $ stack build outputs.
You can use the one on your normal \$PATH with the -s flag, or specify
a particular one:
$ CITEPROC="/usr/local/bin/pandoc-citeproc" ./mktest.sh ...
With your regular path, this might be a previous passes-tests build (ie from
$ make) in ~/.local/bin, so be careful.
The same applies for PANDOC="...".
If you're making a new test, generate the native files with:
$ touch issueXXX.md
# ... edit issueXXX.md
$ ./mktest.sh issueXXX.md -g
You can use
$ ./mktest.sh issueXXX.md -gpo
... to also open up an HTML preview in your browser (using open or xdg-open).
Note that ideally your test case fails without your proposed changes, so you
should see a difference with
$ CITEPROC="/path/to/buggy/pandoc-citeproc" ./mktest.sh issueXXX.md -d
(see -d flag below)
For reproducing a single test diff with colours or in a GUI diff tool, you can
produce the actual and expected outputs:
$ ./mktest.sh issueXXX.md -d
# => ./test-diffs/issueXXX.expected.native and ./test-diffs/issueXXX.actual.native are produced.
# => a diff is shown
You may specify your diff tool, which is 'git diff --no-index --' by default
$ export DIFFTOOL="ksdiff"
$ ...
Similarly, you may want the HTML for both, in case reading diffs is tricky:
$ ./mktest.sh issueXXX.md -e [-o]
# => ./test-diffs/issueXXX.expected.html and ./test-diffs/issueXXX.actual.html are produced.
# => Adding -o will open both in your browser.
If you're trying to understand one of the failing tests and you want some markdown,
(maybe you need to rewrite a test, hopefully not) try "reverse mode":
# given tests/issueXXX.*.native
$ ./mktest.sh issueXXX.md -r
The conversion will be sitting in issueXXX.md.
Also use -po by itself to open the expected output as HTML.
EOF
exit
}
# default to built pandoc
stack_pandoc=$(stack exec which pandoc)
PANDOC="${PANDOC:-$stack_pandoc}"
stack_citeproc=$(stack exec which pandoc-citeproc)
CITEPROC="${CITEPROC:-$stack_citeproc}"
GENERATE=0
REVERSE=0
SHOULD_PREVIEW=0
DOUBLE_PREVIEW=0
OPEN=0
DIFF=0
DIFFTOOL="${DIFFTOOL:-git diff --no-index --}"
reext() {
echo $(echo $1 | cut -f 1 -d .)$2
}
needfile() {
if ! test -s $1; then
echo "no such file $IN_NATIVE"
exit 1
fi
}
openit() {
if test "$OPEN" -eq 1; then
if hash open 2>/dev/null; then
open $1
elif hash xdg-open 2>/dev/null; then
xdg-open $1
fi
fi
}
MD="none"
if test "$1" != "-h"; then
MD="$1"
shift 1
fi
while getopts ":sgrdpeoh" opt; do
case ${opt} in
s ) PANDOC="$(which pandoc)"; CITEPROC="$(which pandoc-citeproc)" ;;
g ) GENERATE=1 ;;
r ) REVERSE=1 ;;
d ) DIFF=1 ;;
p ) SHOULD_PREVIEW=1 ;;
e ) DOUBLE_PREVIEW=1 ;;
o ) OPEN=1 ;;
h ) help ;;
\? ) help ;;
esac
done
shift $((OPTIND -1))
# echo using PANDOC=\""$PANDOC"\"
# echo using CITEPROC=\""$CITEPROC"\"
OUTDIR="./test-diffs"
mkdir -p $OUTDIR
IN_NATIVE=tests/$(reext $MD .in.native)
EXPECTED_NATIVE=tests/$(reext $MD .expected.native)
if test "$GENERATE" -eq 1; then
needfile $MD
$PANDOC $MD -s -o $IN_NATIVE && \
$PANDOC $IN_NATIVE -s -F $CITEPROC -o $EXPECTED_NATIVE
elif test "$REVERSE" -eq 1; then
needfile $IN_NATIVE
pandoc $IN_NATIVE -f native -s -t markdown -o $MD
exit $?
fi
if test "$DIFF" -eq 1; then
needfile $IN_NATIVE
needfile $EXPECTED_NATIVE
ACTUAL=$OUTDIR/$(reext $MD .actual.native)
EXPECTED=$OUTDIR/$(reext $MD .expected.native)
$PANDOC $IN_NATIVE -F $CITEPROC -s -o $ACTUAL
$PANDOC $EXPECTED_NATIVE -s -o $EXPECTED
if git diff --no-index -- $EXPECTED $ACTUAL 2>&1 >/dev/null; then
exit 0
else
$DIFFTOOL $EXPECTED $ACTUAL
exit $?
fi
elif test "$DOUBLE_PREVIEW" -eq 1; then
needfile $IN_NATIVE
needfile $EXPECTED_NATIVE
ACTUAL=$OUTDIR/$(reext $MD .actual.html)
EXPECTED=$OUTDIR/$(reext $MD .expected.html)
$PANDOC $IN_NATIVE -F $CITEPROC --metadata title:$ACTUAL -s -t html5 -o $ACTUAL
openit $ACTUAL
$PANDOC $EXPECTED_NATIVE --metadata title:$EXPECTED -s -t html5 -o $EXPECTED
openit $EXPECTED
elif test "$SHOULD_PREVIEW" -eq 1; then
needfile $EXPECTED_NATIVE
EXPECTED=$OUTDIR/$(reext $MD .expected.html)
$PANDOC $EXPECTED_NATIVE --metadata title:$EXPECTED -s -t html5 -o $EXPECTED
openit $EXPECTED
fi
|
<filename>lib/assets/javascripts/dashboard/data/backbone/sync-options.js
var _ = require('underscore');
var Backbone = require('backbone');
(function () {
// helper functions needed from backbone (they are not exported)
var getValue = function (object, prop, method) {
if (!(object && object[prop])) return null;
return _.isFunction(object[prop]) ? object[prop](method) : object[prop];
};
// Throw an error when a URL is needed, and none is supplied.
var urlError = function () {
throw new Error('A "url" property or function must be specified');
};
// backbone.sync replacement to control url prefix
Backbone.originalSync = Backbone.sync;
Backbone.sync = function (method, model, options) {
var url = options.url || getValue(model, 'url', method) || urlError();
// prefix if http is not present
var absoluteUrl = url.indexOf('http') === 0 || url.indexOf('//') === 0;
if (!absoluteUrl) {
// We need to fix this
// this comes from cdb.config.prefixUrl
options.url = model._config.get('base_url') + url;
} else {
options.url = url;
}
if (method !== 'read') {
// remove everything related
if (model.surrogateKeys) {
Backbone.cachedSync.invalidateSurrogateKeys(getValue(model, 'surrogateKeys'));
}
}
return Backbone.originalSync(method, model, options);
};
Backbone.currentSync = Backbone.sync;
Backbone.withCORS = function (method, model, options) {
if (!options) {
options = {};
}
if (!options.crossDomain) {
options.crossDomain = true;
}
if (!options.xhrFields) {
options.xhrFields = { withCredentials: true };
}
return Backbone.currentSync(method, model, options);
};
// this method returns a cached version of backbone sync
// take a look at https://github.com/teambox/backbone.memoized_sync/blob/master/backbone.memoized_sync.js
// this is the same concept but implemented as a wrapper for ``Backbone.sync``
// usage:
// initialize: function () {
// this.sync = Backbone.cachedSync(this.user_name);
// }
Backbone.cachedSync = function (namespace, sync) {
if (!namespace) {
throw new Error('cachedSync needs a namespace as argument');
}
var surrogateKey = namespace;
var session = window.user_data && window.user_data.username;
// no user session, no cache
// there should be a session to have cache so we avoid
// cache collision for someone with more than one account
if (session) {
namespace += '-' + session;
} else {
return Backbone.sync;
}
var namespaceKey = 'cdb-cache/' + namespace;
// saves all the localstore references to the namespace
// inside localstore. It allows to remove all the references
// at a time
var index = {
// return a list of references for the namespace
_keys: function () {
return JSON.parse(localStorage.getItem(namespaceKey) || '{}');
},
// add a new reference for the namespace
add: function (key) {
var keys = this._keys();
keys[key] = +new Date();
localStorage.setItem(namespaceKey, JSON.stringify(keys));
},
// remove all the references for the namespace
invalidate: function () {
var keys = this._keys();
_.each(keys, function (v, k) {
localStorage.removeItem(k);
});
localStorage.removeItem(namespaceKey);
}
};
// localstore-like cache wrapper
var cache = {
setItem: function (key, value) {
localStorage.setItem(key, value);
index.add(key);
return this;
},
// this is async in case the data needs to be compressed
getItem: function (key, callback) {
var val = localStorage.getItem(key);
_.defer(function () {
callback(val);
});
},
removeItem: function (key) {
localStorage.removeItem(key);
index.invalidate();
}
};
var cached = function (method, model, options) {
var url = options.url || getValue(model, 'url') || urlError();
var key = namespaceKey + '/' + url;
if (method === 'read') {
var success = options.success;
var cachedValue = null;
options.success = function (resp, status, xhr) {
// if cached value is ok
if (cachedValue && xhr.responseText === cachedValue) {
return;
}
cache.setItem(key, xhr.responseText);
success(resp, status, xhr);
};
cache.getItem(key, function (val) {
cachedValue = val;
if (val) {
success(JSON.parse(val), 'success');
}
});
} else {
cache.removeItem(key);
}
return (sync || Backbone.sync)(method, model, options);
};
// create a public function to invalidate all the namespace
// items
cached.invalidate = function () {
index.invalidate();
};
// for testing and debugging porpuposes
cached.cache = cache;
// have a global namespace -> sync function in order to avoid invalidation
Backbone.cachedSync.surrogateKeys[surrogateKey] = cached;
return cached;
};
Backbone.cachedSync.surrogateKeys = {};
Backbone.cachedSync.invalidateSurrogateKeys = function (keys) {
_.each(keys, function (k) {
var s = Backbone.cachedSync.surrogateKeys[k];
if (s) {
s.invalidate();
} else {
console.error('Backbone sync options: surrogate key not found: ' + k);
}
});
};
Backbone.syncAbort = function () {
var self = arguments[1];
if (self._xhr) {
self._xhr.abort();
}
self._xhr = Backbone.sync.apply(this, arguments);
self._xhr.always(function () { self._xhr = null; });
return self._xhr;
};
Backbone.delayedSaveSync = function (sync, delay) {
var dsync = _.debounce(sync, delay);
return function (method, model, options) {
if (method === 'create' || method === 'update') {
return dsync(method, model, options);
} else {
return sync(method, model, options);
}
};
};
Backbone.saveAbort = function () {
var self = this;
if (this._saving && this._xhr) {
this._xhr.abort();
}
this._saving = true;
var xhr = Backbone.Model.prototype.save.apply(this, arguments);
this._xhr = xhr;
xhr.always(function () { self._saving = false; });
return xhr;
};
})();
|
<gh_stars>0
package common
import (
"fmt"
"strings"
"github.com/flant/werf/pkg/storage"
"github.com/flant/werf/pkg/werf"
)
func GetStagesStorageCache(synchronization string) (storage.StagesStorageCache, error) {
if synchronization == storage.LocalStorageAddress {
return storage.NewFileStagesStorageCache(werf.GetStagesStorageCacheDir()), nil
} else if strings.HasPrefix(synchronization, "kubernetes://") {
ns := strings.TrimPrefix(synchronization, "kubernetes://")
return storage.NewKubernetesStagesStorageCache(ns), nil
} else {
panic(fmt.Sprintf("unknown synchronization param %q", synchronization))
}
}
func GetStorageLockManager(synchronization string) (storage.LockManager, error) {
if synchronization == storage.LocalStorageAddress {
return storage.NewGenericLockManager(werf.GetHostLocker()), nil
} else if strings.HasPrefix(synchronization, "kubernetes://") {
ns := strings.TrimPrefix(synchronization, "kubernetes://")
return storage.NewKubernetesLockManager(ns), nil
} else {
panic(fmt.Sprintf("unknown synchronization param %q", synchronization))
}
}
|
function transformData(datos) {
return datos.map((element) => {
return {
Numero2: element.Numero + '.' + element.MetaId.Numero,
...element
};
});
}
// Example usage
const datos = [
{ Numero: 1, MetaId: { Numero: 10 } },
{ Numero: 2, MetaId: { Numero: 20 } },
{ Numero: 3, MetaId: { Numero: 30 } }
];
const DatosTransformados = transformData(datos);
console.log(DatosTransformados); |
#!/bin/bash
dieharder -d 9 -g 45 -S 403526862
|
class CustomEventDispatcher implements EventDispatcher {
private eventListeners: { [eventType: string]: (() => void)[] } = {};
addEventListener(eventType: string, listener: () => void): void {
if (!this.eventListeners[eventType]) {
this.eventListeners[eventType] = [];
}
this.eventListeners[eventType].push(listener);
}
removeEventListener(eventType: string, listener: () => void): void {
const listeners = this.eventListeners[eventType];
if (listeners) {
const index = listeners.indexOf(listener);
if (index !== -1) {
listeners.splice(index, 1);
}
}
}
dispatchEvent(eventType: string): void {
const listeners = this.eventListeners[eventType];
if (listeners) {
for (const listener of listeners) {
listener();
}
}
}
} |
import React from 'react';
import Layout from '../../components/Layout';
const Thanks = () => (
<Layout>
<section className="section">
<div className="container mx-auto">
<div className="flex flex-wrap justify-center bg-white shadow-xl rounded-lg -mt-64 py-16 px-12 relative z-10">
<div className="w-full text-center lg:w-8/12">
<p className="text-4xl text-center">
<span role="img" aria-label="love">
๐
</span>
</p>
<h3 className="font-semibold text-3xl">Thank you</h3>
<p className="text-gray-600 text-lg leading-relaxed mt-4 mb-4">
Will get back to you ASAP!
</p>
<div className="sm:block flex flex-col mt-10">
<a
href="https://www.creative-tim.com/learning-lab/tailwind/react/overview/notus?ref=nr-index"
target="_blank"
rel="noreferrer"
className="get-started text-white font-bold px-6 py-4 rounded outline-none focus:outline-none mr-1 mb-2 bg-blue-500 active:bg-blue-600 uppercase text-sm shadow hover:shadow-lg ease-linear transition-all duration-150"
>
Get started
</a>
<a
href="https://github.com/creativetimofficial/notus-react?ref=nr-index"
target="_blank"
rel="noreferrer"
className="github-star sm:ml-1 text-white font-bold px-6 py-4 rounded outline-none focus:outline-none mr-1 mb-1 bg-gray-800 active:bg-gray-700 uppercase text-sm shadow hover:shadow-lg"
>
<i className="fab fa-github text-lg mr-1"></i>
<span>Help With a Star</span>
</a>
</div>
<div className="text-center mt-16"></div>
</div>
</div>
</div>
</section>
</Layout>
);
export default Thanks;
|
require 'test_helper'
class OverwatchAPITest < ActiveSupport::TestCase
test 'battletag is encoded for URL' do
api = OverwatchAPI.new(battletag: 'Amรฉlie#1234', platform: 'pc')
assert_equal '/api/v3/u/Am%C3%A9lie-1234/stats?platform=pc', api.profile_url
end
end
|
<gh_stars>10-100
package fastly
// Coordinates represent the location of a datacenter.
type Coordinates struct {
Latitude float64 `mapstructure:"latitude"`
Longtitude float64 `mapstructure:"longitude"`
X float64 `mapstructure:"x"`
Y float64 `mapstructure:"y"`
}
// Datacenter is a list of Datacenters returned by the Fastly API.
type Datacenter struct {
Code string `mapstructure:"code"`
Coordinates Coordinates `mapstructure:"coordinates"`
Group string `mapstructure:"group"`
Name string `mapstructure:"name"`
Shield string `mapstructure:"shield"`
}
// AllDatacenters returns the lists of datacenters for Fastly's network.
func (c *Client) AllDatacenters() (datacenters []Datacenter, err error) {
resp, err := c.Get("/datacenters", nil)
if err != nil {
return nil, err
}
var m []Datacenter
if err := decodeBodyMap(resp.Body, &m); err != nil {
return nil, err
}
return m, nil
}
|
package com.vc.easy
object L561 {
def arrayPairSum(nums: Array[Int]): Int = {
scala.util.Sorting.quickSort(nums)
var sum = 0
(nums.indices by 2).foreach(i => {
sum += nums(i)
})
sum
}
}
|
<filename>examples/plotting/plot_with_matplotlib.py
import numpy as np
import matplotlib.pyplot as plt
from acconeer_utils.clients import SocketClient, SPIClient, UARTClient
from acconeer_utils.clients import configs
from acconeer_utils import example_utils
def main():
args = example_utils.ExampleArgumentParser(num_sens=1).parse_args()
example_utils.config_logging(args)
if args.socket_addr:
client = SocketClient(args.socket_addr)
elif args.spi:
client = SPIClient()
else:
port = args.serial_port or example_utils.autodetect_serial_port()
client = UARTClient(port)
config = configs.IQServiceConfig()
config.sensor = args.sensors
config.range_interval = [0.2, 0.6]
config.sweep_rate = 10
config.gain = 0.6
info = client.setup_session(config)
num_points = info["data_length"]
amplitude_y_max = 0.3
fig, (amplitude_ax, phase_ax) = plt.subplots(2)
fig.set_size_inches(8, 6)
fig.canvas.set_window_title("Acconeer matplotlib example")
for ax in [amplitude_ax, phase_ax]:
ax.set_xlabel("Depth (m)")
ax.set_xlim(config.range_interval)
ax.grid(True)
amplitude_ax.set_ylabel("Amplitude")
amplitude_ax.set_ylim(0, 1.1 * amplitude_y_max)
phase_ax.set_ylabel("Phase")
example_utils.mpl_setup_yaxis_for_phase(phase_ax)
xs = np.linspace(*config.range_interval, num_points)
amplitude_line = amplitude_ax.plot(xs, np.zeros_like(xs))[0]
phase_line = phase_ax.plot(xs, np.zeros_like(xs))[0]
fig.tight_layout()
plt.ion()
plt.show()
interrupt_handler = example_utils.ExampleInterruptHandler()
print("Press Ctrl-C to end session")
client.start_streaming()
while not interrupt_handler.got_signal:
info, sweep = client.get_next()
amplitude = np.abs(sweep)
phase = np.angle(sweep)
max_amplitude = np.max(amplitude)
if max_amplitude > amplitude_y_max:
amplitude_y_max = max_amplitude
amplitude_ax.set_ylim(0, 1.1 * max_amplitude)
amplitude_line.set_ydata(amplitude)
phase_line.set_ydata(phase)
if not plt.fignum_exists(1): # Simple way to check if plot is closed
break
fig.canvas.flush_events()
print("Disconnecting...")
plt.close()
client.disconnect()
if __name__ == "__main__":
main()
|
#!/bin/bash
# Ensure that one server and n clients run, and print plausible output.
. test-common.sh
if [[ $# -eq 0 ]]; then
exit 1
fi
n=$1
if [[ $# -ge 2 ]]; then
port=$2
else
port=4444
fi
conf=$(mktemp /tmp/clockkit.conf.XXX)
srv=$(mktemp /tmp/clockkit.srv.XXX)
clis=''
for i in $(seq $n); do
cli[i]=$(mktemp /tmp/clockkit.cli.XXX)
clis+=${cli[i]}
clis+=' '
done
# Clean up after all possible exits.
trap "rm -f $conf $srv $clis" 0 2 3 15
sed "s/^port:.*/port:$port/" < clockkit.conf > $conf
nuke ckserver ckphaselock
./ckserver $port > $srv &
for i in $(seq $n); do
./ckphaselock $conf > ${cli[i]} &
done
sleep 4
nuke ckserver ckphaselock
a=$(tail -10 $srv | grepregex '<time \d+ +\d+>\s')
if [[ "$a" != "10" ]]; then
exit 1
fi
for i in $(seq $n); do
b=$(tail -20 ${cli[i]} | grepregex '<time \d+ +\d+>')
c=$(tail -20 ${cli[i]} | grepregex 'offset: [-\d]+')
if [[ "$b $c" != "10 10" ]]; then
exit 2
fi
done
|
#!/bin/bash
typeset __abspath=$(cd ${0%/*} && echo $PWD/${0##*/})
typeset __shelldir=`dirname "${__abspath}"`
typeset __testdir=`dirname "${__shelldir}"`
typeset __tmp=`mktemp /tmp/avn-test.XXXXXX`
typeset __written=""
export HOME="${__testdir}/fixtures"
# start in a known location
cd "${__testdir}/fixtures/home"
function _avn() {
echo "$@" >> ${__tmp}
}
source "${__shelldir}/helpers.sh"
source "${__testdir}/../bin/avn.sh"
# change to a directory where we can debug
cd "../v0.10.11"
echo "" > ${__tmp} # clear output
__avn_debug
__written=`echo $(cat ${__tmp})`
assertEqual "explain -v ${__testdir}/fixtures/v0.10.11 .node-version" "${__written}" || exit 1
rm ${__tmp}
|
# -----------------------------------------------------------------------------
#
# Package : markdown-it
# Version : 8.4.2
# Source repo : https://github.com/markdown-it/markdown-it
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=markdown-it
PACKAGE_VERSION=8.4.2
PACKAGE_URL=https://github.com/markdown-it/markdown-it
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
# Creation of routing instances
export ROUTER_NAMESPACE=router
oc new-project $ROUTER_NAMESPACE
oc project $ROUTER_NAMESPACE
# Creation of Service Account
echo \
'{"kind":"ServiceAccount","apiVersion":"v1","metadata":{"name":"router"}}' \
| oc create -f -
# Edit privileged, add to bottom under users: - system:serviceaccount:default:router
oc edit scc privileged
# Edit Project Node Selector to DMZ
# In the annotations list, add: openshift.io/node-selector: region=dmz
oc edit namespace $ROUTER_NAMESPACE
# Create Routing Instances
oadm router ha-router \
--replicas=2 \
--selector="region=dmz" --labels="ha-router=dmz" \
--credentials="/etc/openshift/master/openshift-router.kubeconfig" --service-account=router
oadm ipfailover ipf-ha-router --replicas=2 --watch-port=80 \
--selector="region=dmz" --virtual-ips="10.154.1.50" \
--credentials="/etc/openshift/master/openshift-router.kubeconfig" --service-account=router --create
|
#!/bin/bash
source ./box.sh
source ./variables.sh
box "Starting Docker Machine creation" "green" "blue"
for node in $(seq 1 $leaders);
do
box "Node leader $node" "light_purple" "red"
docker-machine create \
--engine-env 'DOCKER_OPTS="-H unix:///var/run/docker.sock"' \
--driver vmwarevsphere \
--vmwarevsphere-username=${ESX_USER} \
--vmwarevsphere-password=${ESX_PASS} \
--vmwarevsphere-vcenter=${ESX_IP} \
leader$node
done
for node in $(seq 1 $workers);
do
box "Node worker $node" "light_purple" "red"
docker-machine create \
--engine-env 'DOCKER_OPTS="-H unix:///var/run/docker.sock"' \
--driver vmwarevsphere \
--vmwarevsphere-username=${ESX_USER} \
--vmwarevsphere-password=${ESX_PASS} \
--vmwarevsphere-vcenter=${ESX_IP} \
worker$node
done
eval "$(docker-machine env leader1)"
box "Init Swarm cluster" "light_purple" "blue"
docker swarm init --listen-addr $(docker-machine ip leader1) --advertise-addr $(docker-machine ip leader1)
token=$(docker swarm join-token -q worker)
for node in $(seq 1 $workers);
do
eval "$(docker-machine env worker$node)"
docker swarm join --token $token $(docker-machine ip leader1):2377
done
eval $(docker-machine env leader1)
box "Overlay Network creation" "light_purple" "blue"
docker network create -d overlay swarmnet
box "Starting WebStack services" "light_green" "green"
docker service create --name phpfpm --network swarmnet --publish 9000:9000 itwars/phpfpm
docker service create --name web --network swarmnet --publish 8001:8001 itwars/h2o
docker service create --name cache --network swarmnet --publish 8000:8000 itwars/varnish
docker service create --name lb --network swarmnet --publish 80:80 itwars/haproxy
sleep $t
docker run -it -d -p 5000:5000 -e HOST=$(docker-machine ip leader1) -e PORT=5000 -v /var/run/docker.sock:/var/run/docker.sock manomarks/visualizer
box "Open web browser to visualize cluster" "light_purple" "green"
open http://$(docker-machine ip leader1):5000
box "To scale type:eval \$(docker-machine env leader1) && docker service scale web=10" "red" "red"
box "To remove swarm cluster and cleanup: ./remove.sh" "red" "red"
|
#!/bin/sh
# shellcheck disable=SC1090
# shellcheck disable=SC1091
# shellcheck disable=SC2039
test_load_config()
{
CONFIG="../etc/basepkg.conf"
_load_config
assertEquals "0" "$?"
}
test_load_config_invalid_path_pattern()
{
CONFIG="xxxxx"
local result="$(_load_config)"
local expected="$CONFIG not found"
assertEquals "$result" "$expected"
}
test_value_is_available_by_load_config()
{
CONFIG="../etc/basepkg.conf"
_load_config
test -n "$SRC"
assertEquals "0" "$?"
test -n "$OBJ"
assertEquals "0" "$?"
test -n "$LOG"
assertEquals "0" "$?"
test -n "$HOMEPAGE"
assertEquals "0" "$?"
test -n "$MAINTAINER"
assertEquals "0" "$?"
test -n "$CATEGORY"
assertEquals "0" "$?"
test -n "$PKGDB"
assertEquals "0" "$?"
test -n "$INVALID"
assertEquals "1" "$?"
}
test_usage()
{
local PROGNAME="basepkg"
local OBJ="."
local machine="amd64"
local expected="Usage: $PROGNAME [-a architecture] [-c config] [-d destdir] [-m machine] [-r releasedir] operation
Operations:
pkg Create packages.
kern Create kernel package.
clean Clean working directories.
cleanpkg Clean package directories.
Options:
-a Set machine_arch to architecture.
[Default: deduced from \"machine\"]
-c Specify config path
-d Set destdir.
[Default: $OBJ/destdir.$machine]
-m Set machine type for MACHINE_ARCH.
[Default: result of \`uname -m\`]
-r Set RELEASEDIR.
-h Show this message and exit."
local result="$(_usage)"
assertEquals "$expected" "$result"
}
test_getopt()
{
local arg="--obj=/usr/obj"
local expected="/usr/obj"
local result="$(_getopt $arg)"
assertEquals "$expected" "$result"
}
test_check_release_number_could_resolve()
{
_bomb()
{
echo "$@"
}
RELEASE="9.99.99"
local result="$(_check_release_number_could_resolve)"
local expected=""
assertEquals "$result" "$expected"
}
test_check_release_number_could_resolve_fail_pattern()
{
_bomb()
{
echo "$@"
}
RELEASE=""
local result="$(_check_release_number_could_resolve)"
local expected="cannot resolve \$RELEASE"
assertEquals "$result" "$expected"
}
test_check_non_posix_commands()
{
hostname()
{
:
}
mktemp(){
:
}
pkg_create(){
:
}
pkg_admin(){
:
}
_bomb_if_command_not_found()
{
command -v "$1" 2>&1 || return 1
}
_check_non_posix_commands
assertEquals "0" "$?"
}
. ./common.sh
. ../lib/CommandLine
. "$SHUNIT2"
|
'use strict';
const expect = require('chai').expect;
const ApplicationStore = require('../lib/application-store');
describe('ApplicationStore', function() {
describe('#set dataService', function() {
it('sets the data service', function() {
ApplicationStore.dataService = 'test';
expect(ApplicationStore.dataService).to.equal('test');
});
});
});
|
// Given class and methods
template <typename T>
class binary_indexed_tree {
std::vector<T> dat;
T merge(T a, T b) const { return a + b; }
// ... other constructors and methods
public:
binary_indexed_tree(size_t N) : dat(N + 1, NEUTRAL) {}
binary_indexed_tree(size_t N, T t) : dat(N + 1, NEUTRAL) {
for (int i = 0; i < (int)N; ++i) {
update(i, t);
}
}
// Your completed methods
void update(int idx, T val) {
idx++;
while (idx < dat.size()) {
dat[idx] = merge(dat[idx], val);
idx += idx & -idx;
}
}
T query(int idx) const {
T res = NEUTRAL;
while (idx > 0) {
res = merge(res, dat[idx]);
idx -= idx & -idx;
}
return res;
}
}; |
<reponame>sportsreport2/nodeless-trakt-ts<gh_stars>0
export type Status =
| "ended"
| "returning series"
| "canceled"
| "in production";
export type Type = "movie" | "show" | "episode" | "person" | "list";
export type ExtendedType =
| "full"
| "noseasons"
| "episodes"
| "full,episodes"
| "metadata";
export type ListPrivacy = "private" | "friends" | "public";
export type IdType = "trakt" | "imdb" | "tmdb" | "tvdb" | "tvrage";
export type Hdr = "dolby_vision" | "hdr10" | "hdr10_plus" | "hlg";
export type MediaType =
| "digital"
| "bluray"
| "hddvd"
| "dvd"
| "vcd"
| "vhs"
| "betamax"
| "laserdisc";
export type ProgressLastActivity = "collected" | "watched";
export type SortHow = "asc" | "desc";
export type RatingsFilter =
| ""
| "/1"
| "/2"
| "/3"
| "/4"
| "/5"
| "/6"
| "/7"
| "/8"
| "/9"
| "/10";
export type AudioChannels =
| "1.0"
| "2.0"
| "2.1"
| "3.0"
| "3.1"
| "4.0"
| "4.1"
| "5.0"
| "5.1"
| "5.1.2"
| "5.1.4"
| "6.1"
| "7.1"
| "7.1.2"
| "7.1.4"
| "9.1"
| "10.1";
export type HistoryType = "movies" | "shows" | "seasons" | "episodes";
export type CommentSortBy =
| "newest"
| "oldest"
| "likes"
| "replies"
| "highest"
| "lowest"
| "plays";
export type ListSortBy =
| "popular"
| "likes"
| "comments"
| "items"
| "added"
| "updated";
export type SortBy =
| "rank"
| "added"
| "title"
| "released"
| "runtime"
| "popularity"
| "percentage"
| "votes"
| "my_rating"
| "random";
export type Audio =
| "lpcm"
| "mp3"
| "mp2"
| "aac"
| "ogg"
| "ogg_opus"
| "wma"
| "flac"
| "dts"
| "dts_ma"
| "dts_hr"
| "dts_x"
| "auro_3d"
| "dolby_digital"
| "dolby_digital_plus"
| "dolby_digital_plus_atmos"
| "dolby_atmos"
| "dolby_truehd"
| "dolby_prologic";
export type Resolution =
| "uhd_4k"
| "hd_1080p"
| "hd_1080i"
| "hd_720p"
| "sd_480p"
| "sd_480i"
| "sd_576p"
| "sd_576i";
export type Rating = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10;
export type ListType =
| "all"
| "personal"
| "official"
| "watchlists"
| "recommendations";
export type CommentType = "all" | "reviews" | "shouts";
export type PeriodType = "daily" | "weekly" | "monthly" | "yearly" | "all";
type SearchIdType = "trakt" | "imdb" | "tmdb" | "tvdb";
export interface PlaybackResponse extends GenericProgress {
id?: number;
paused_at?: string;
type?: string;
action?: string;
sharing?: ShareSettings;
}
export interface DeviceCode {
device_code?: string;
user_code?: string;
verification_url?: string;
expires_in?: number;
interval?: number;
}
export interface SyncShow {
ids?: ShowIds;
seasons?: SyncSeason[];
collected_at?: string;
watched_at?: string;
rated_at?: string;
rating?: Rating;
}
export interface BaseEntity {
title?: string;
overview?: string;
rating?: number;
votes?: number;
updated_at?: string;
available_translations?: string[];
}
export interface Friend {
friends_at?: string;
user?: User;
}
export interface Connections {
facebook?: boolean;
twitter?: boolean;
tumblr?: boolean;
}
export interface EpisodeCheckinResponse extends BaseCheckinResponse {
episode?: Episode;
show?: Show;
}
export interface Account {
timezone?: string;
cover_image?: string;
}
export interface ListItemRank {
rank?: number[];
}
export interface TrendingMovie extends BaseTrendingEntity {
movie?: Movie;
}
export interface SyncEpisode {
season?: number;
number?: number;
ids?: EpisodeIds;
collected_at?: string;
watched_at?: string;
rated_at?: string;
rating?: Rating;
media_type?: MediaType;
resolution?: Resolution;
hdr?: Hdr;
audio?: Audio;
audio_channels?: AudioChannels;
"3d"?: boolean;
}
export interface CastMember {
character?: string;
movie?: Movie;
show?: Show;
person?: Person;
}
export interface ShareSettings {
facebook?: boolean;
twitter?: boolean;
tumblr?: boolean;
}
export interface CalendarMovieEntry {
released?: string;
movie?: Movie;
}
export interface TraktError {
message?: string;
}
export interface BaseIds {
trakt?: number;
imdb?: string;
tmdb?: number;
}
export interface Person {
name?: string;
ids?: PersonIds;
biography?: string;
birthday?: string;
death?: string;
birthplace?: string;
homepage?: string;
}
export interface SyncResponse {
added?: SyncStats;
existing?: SyncStats;
deleted?: SyncStats;
not_found?: SyncErrors;
}
export interface RatedShow extends BaseRatedEntity {
show?: Show;
}
export interface LastActivities {
all?: string;
movies?: LastActivityMore;
episodes?: LastActivityMore;
shows?: LastActivity;
seasons?: LastActivity;
lists?: ListsLastActivity;
}
export interface LastActivity {
rated_at?: string;
watchlisted_at?: string;
commented_at?: string;
}
export interface MovieTranslation extends Translation {
tagline?: string;
}
export interface CheckinError {
expires_at?: string;
}
export interface BaseShow {
show?: Show;
seasons?: BaseSeason[];
last_collected_at?: string;
listed_at?: string;
plays?: number;
last_watched_at?: string;
last_updated_at?: string;
reset_at?: string;
aired?: number;
completed?: number;
hidden_seasons?: Season[];
next_episode?: Episode;
last_episode?: Episode;
}
export interface Show extends BaseEntity {
year?: number;
ids?: ShowIds;
first_aired?: string;
airs?: Airs;
runtime?: number;
certification?: string;
network?: string;
country?: string;
trailer?: string;
homepage?: string;
status?: Status;
language?: string;
genres?: string[];
}
export interface Genre {
name?: string;
slug?: string;
}
export interface SyncItems {
movies?: SyncMovie[];
shows?: SyncShow[];
episodes?: SyncEpisode[];
people?: SyncPerson[];
ids?: number[];
}
export interface Followed {
approved_at?: string;
user?: User;
}
export interface AccessTokenRefreshRequest {
grant_type?: string;
refresh_token?: string;
client_id?: string;
client_secret?: string;
redirect_uri?: string;
}
export interface Follower {
followed_at?: string;
user?: User;
}
export interface ListIds {
trakt?: number;
slug?: string;
}
export interface SyncErrors {
movies?: SyncMovie[];
shows?: SyncShow[];
seasons?: SyncSeason[];
episodes?: SyncEpisode[];
people?: SyncPerson[];
ids?: number[];
}
export interface Crew {
writing?: CrewMember[];
production?: CrewMember[];
directing?: CrewMember[];
"costume & make-up"?: CrewMember[];
art?: CrewMember[];
sound?: CrewMember[];
camera?: CrewMember[];
}
export interface BaseTrendingEntity {
watchers?: number;
}
export interface TrendingShow extends BaseTrendingEntity {
show?: Show;
}
export interface ListsLastActivity {
liked_at?: string;
updated_at?: string;
commented_at?: string;
}
export interface SyncSeason {
number?: number;
episodes?: SyncEpisode[];
collected_at?: string;
watched_at?: string;
rated_at?: string;
rating?: Rating;
}
export interface SearchResult {
type?: string;
score?: number;
movie?: Movie;
show?: Show;
episode?: Episode;
person?: Person;
list?: TraktList;
}
export interface TraktList {
ids?: ListIds;
name?: string;
description?: string;
privacy?: ListPrivacy;
display_numbers?: boolean;
allow_comments?: boolean;
sort_by?: SortBy;
sort_how?: SortHow;
created_at?: string;
updated_at?: string;
item_count?: number;
comment_count?: number;
likes?: number;
user?: User;
}
export interface TraktTrendingList {
like_count: number;
comment_count: number;
list: TraktList;
}
export interface RatedSeason extends RatedShow {
season?: Season;
}
export interface SyncPerson {
ids?: PersonIds;
name?: string;
}
export interface SyncStats {
movies?: number;
shows?: number;
seasons?: number;
episodes?: number;
people?: number;
}
export interface TraktOAuthError {
error?: string;
error_description?: string;
}
export interface SyncMovie {
ids?: MovieIds;
collected_at?: string;
watched_at?: string;
rated_at?: string;
rating?: Rating;
media_type?: MediaType;
resolution?: Resolution;
hdr?: Hdr;
audio?: Audio;
audio_channels?: AudioChannels;
"3d"?: boolean;
}
export interface Episode extends BaseEntity {
season?: number;
number?: number;
ids?: EpisodeIds;
number_abs?: number;
first_aired?: string;
comment_count?: number;
runtime?: number;
}
export interface Settings {
user?: User;
account?: Account;
connections?: Connections;
sharing_text?: SharingText;
}
export interface Translation {
language?: string;
title?: string;
overview?: string;
}
export interface ListReorderResponse {
updated?: number;
skipped_ids?: number[];
}
export interface ScrobbleProgress extends GenericProgress {
app_version?: string;
app_date?: string;
}
export interface DeviceCodeAccessTokenRequest {
code?: string;
client_id?: string;
client_secret?: string;
}
export interface PersonIds extends BaseIds {
slug?: string;
tvrage?: string;
}
export interface EpisodeCheckin extends BaseCheckin {
show?: Show;
episode?: SyncEpisode;
}
export interface Builder {
show?: Show;
episode?: SyncEpisode;
sharing?: ShareSettings;
message?: string;
venue_id?: string;
venue_name?: string;
app_version?: string;
app_date?: string;
}
export interface Ratings {
rating?: number;
votes?: number;
distribution?: {
[key: string]: number;
};
}
export interface CrewMember {
job?: string;
movie?: Movie;
show?: Show;
person?: Person;
}
export interface WatchlistedSeason {
listed_at?: string;
season?: Season;
show?: Show;
}
export interface MovieIds extends BaseIds {
slug?: string;
}
export interface MovieCheckin extends BaseCheckin {
movie?: SyncMovie;
}
export interface Builder {
movie?: SyncMovie;
sharing?: ShareSettings;
message?: string;
venue_id?: string;
venue_name?: string;
app_version?: string;
app_date?: string;
}
export interface Season {
number?: number;
ids?: SeasonIds;
title?: string;
overview?: string;
network?: string;
first_aired?: string;
rating?: number;
votes?: number;
episode_count?: number;
aired_episodes?: number;
episodes?: Episode[];
}
export interface LastActivityMore extends LastActivity {
watched_at?: string;
collected_at?: string;
paused_at?: string;
hidden_at?: string;
}
export interface Credits {
cast?: CastMember[];
crew?: Crew;
}
export interface BaseCheckin {
sharing?: ShareSettings;
message?: string;
venue_id?: string;
venue_name?: string;
app_version?: string;
app_date?: string;
}
export interface ClientId {
client_id?: string;
}
export interface BaseMovie {
movie?: Movie;
collected_at?: string;
last_watched_at?: string;
last_updated_at?: string;
listed_at?: string;
plays?: number;
metadata?: Metadata;
}
export interface Comment {
id?: number;
parent_id?: number;
created_at?: string;
updated_at?: string;
comment?: string;
spoiler?: boolean;
review?: boolean;
replies?: number;
likes?: number;
user_rating?: number;
user?: User;
movie?: Movie;
show?: Show;
episode?: Episode;
}
export interface CommentListItem {
type?: Type;
movie?: Movie;
show?: Show;
season?: Season;
episode?: Episode;
list?: any;
comment?: Comment;
}
export interface RatedMovie extends BaseRatedEntity {
movie?: Movie;
}
export interface ListEntry {
id?: number;
rank?: number;
listed_at?: string;
type?: string;
movie?: Movie;
show?: Show;
episode?: Episode;
person?: Person;
}
export interface Movie extends BaseEntity {
year?: number;
ids?: MovieIds;
certification?: string;
tagline?: string;
released?: string;
runtime?: number;
trailer?: string;
homepage?: string;
language?: string;
genres?: string[];
}
export interface Images {
avatar?: ImageSizes;
}
export interface ImageSizes {
full?: string;
}
export interface EpisodeIds extends BaseIds {
tvdb?: number;
tvrage?: number;
}
export interface Stats {
watchers?: number;
plays?: number;
collectors?: number;
comments?: number;
lists?: number;
votes?: number;
collected_episodes?: number;
}
export interface MovieCheckinResponse extends BaseCheckinResponse {
movie?: Movie;
}
export interface BaseEpisode {
number?: number;
collected_at?: string;
plays?: number;
last_watched_at?: string;
completed?: boolean;
metadata?: Metadata;
}
export interface UserSlug {
ME?: UserSlug;
userSlug?: string;
}
export interface BaseCheckinResponse {
watched_at?: string;
sharing?: ShareSettings;
}
export interface Metadata {
media_type?: MediaType;
resolution?: Resolution;
hdr?: Hdr;
audio?: Audio;
audio_channels?: AudioChannels;
"3d"?: boolean;
}
export interface BaseSeason {
number?: number;
episodes?: BaseEpisode[];
aired?: number;
completed?: number;
}
export interface Airs {
day?: string;
time?: string;
timezone?: string;
}
export interface RatedEpisode extends RatedShow {
episode?: Episode;
}
export interface HistoryEntry {
id?: number;
watched_at?: string;
action?: string;
type?: string;
episode?: Episode;
show?: Show;
movie?: Movie;
}
export interface AccessToken {
access_token?: string;
token_type?: string;
expires_in?: number;
refresh_token?: string;
scope?: string;
created_at?: number;
}
export interface User {
username?: string;
private?: boolean;
name?: string;
vip?: boolean;
vip_ep?: boolean;
ids?: UserIds;
joined_at?: string;
location?: string;
about?: string;
gender?: string;
age?: number;
images?: Images;
}
export interface UserIds {
slug?: string;
}
export interface WatchlistedEpisode {
listed_at?: string;
episode?: Episode;
show?: Show;
}
export interface AccessTokenRequest {
grant_type?: string;
code?: string;
client_id?: string;
client_secret?: string;
redirect_uri?: string;
}
export interface SharingText {
watching?: string;
watched?: string;
}
export interface SeasonIds {
tvdb?: number;
tmdb?: number;
trakt?: number;
tvrage?: number;
}
export interface ShowIds extends BaseIds {
slug?: string;
tvdb?: number;
tvrage?: number;
}
export interface GenericProgress {
episode?: SyncEpisode;
show?: SyncShow;
movie?: SyncMovie;
progress?: number;
}
export interface BaseRatedEntity {
rated_at?: string;
rating?: Rating;
}
export interface CalendarShowEntry {
first_aired?: string;
episode?: Episode;
show?: Show;
}
export interface TraktSettings {
client_id: string;
client_secret: string;
redirect_uri?: string;
debug?: boolean;
endpoint?: string;
}
export interface Sharing {
twitter?: boolean;
tumblr?: boolean;
}
export interface Extended {
extended?: ExtendedType;
}
export interface Calendars extends Extended {
start_date: string;
days: string;
}
export interface CheckinBody {
movie?: Movie;
episode?: Episode;
sharing?: Sharing;
message?: string;
venue_id?: string;
venue_name?: string;
app_version?: string;
app_date?: string;
}
export interface ScrobbleBody {
movie?: Movie;
episode?: Episode;
progress?: number;
app_version?: string;
app_date?: string;
}
export interface CommentBody extends Comment {
movie?: Movie;
show?: Show;
season?: Season;
episode?: Episode;
list?: any;
}
export interface CommentQuery {
comment_type?: CommentType;
type?: "all" | "movies" | "shows" | "seasons" | "episodes" | "lists";
include_replies?: boolean;
}
export interface TypeQuery {
type?: ListType;
}
export interface Id {
id: string | number;
}
export interface PaginationQuery {
page?: number | string;
limit?: number | string;
pagination?: boolean;
}
export interface PeriodQuery {
period?: PeriodType;
}
export interface CommentSortQuery {
sort?: CommentSortBy;
}
export interface ListSortQuery {
sort?: ListSortBy;
}
export interface MovieQuery {
type: "movie";
fields?:
| "title"
| "tagline"
| "overview"
| "people"
| "translations"
| "aliases";
}
export interface ShowQuery {
type: "show";
query: string
years?: number
fields?: "title" | "overview" | "people" | "translations" | "aliases";
}
export interface EpisodeSearchQuery {
type: "episode";
fields?: "title" | "overview";
}
export interface PersonQuery {
type: "person";
fields?: "name" | "biography";
}
export interface ListQuery {
type: "list";
fields?: "name" | "description";
}
export interface SearchIdQuery {
id_type: SearchIdType;
type?: ListType;
}
export interface SeasonQuery {
id: number
season: number | string;
}
export interface EpisodeQuery {
episode: number | string;
}
export interface Country {
name: string;
code: string;
}
export interface Language {
name: string;
code: string;
}
export interface PlayedMovie {
watcher_count: number;
play_count: number;
collected_count: number;
movie: Movie;
}
export interface AnticipatedMovie {
list_count: number;
movie: Movie;
}
export interface BoxOfficeMovie {
revenue: number;
movie: Movie;
}
export interface UpdatedMovie {
updated_at: number;
movie: Movie;
}
export interface Alias {
title: string;
country: string;
}
export interface PlayedShow {
watcher_count: number;
play_count: number;
collector_count: number;
collected_count: number;
show: Show;
}
export interface AnticipatedShow {
list_count: number;
show: Show;
}
export interface BoxOfficeShow {
revenue: number;
show: Show;
}
export interface UpdatedShow {
updated_at: number;
show: Show;
}
export declare class Trakt {
constructor(settings: TraktSettings, debug?: boolean);
request: {
calendars: {
my: {
shows(params?: Calendars): Object;
new_shows(params?: Calendars): Object;
premieres_shows(params?: Calendars): Object;
movies(params?: Calendars): Object;
dvd(params?: Calendars): Object;
};
all: {
shows(params?: Calendars): Object;
new_shows(params?: Calendars): Object;
premieres_shows(params?: Calendars): Object;
movies(params?: Calendars): Object;
dvd(params?: Calendars): Object;
};
};
checkin: {
add(
params: CheckinBody
): Object;
delete(): Object;
};
certifications(params: { type: string }): Promise<any>;
comments: {
comment: {
add(params: CommentBody): Object;
get(params: Id): Object;
update(params: Id & Comment): Object;
remove(params: Id): Object;
};
replies: {
add(params: Id & Comment): Object;
get(params: Id & PaginationQuery): Object;
update(params: Id & Comment): Object;
remove(params: Id): Object;
};
item(
params: Id & Extended
): Object;
likes(
params: Id & PaginationQuery
): Object;
like: {
add(params: Id): Object;
remove(params: Id): Object;
};
trending(
params?: CommentQuery & Extended & PaginationQuery
): Object;
recent(
params?: CommentQuery & Extended & PaginationQuery
): Object;
updates(
params?: CommentQuery & Extended & PaginationQuery
): Object;
};
countries(params: { type: "movies" | "shows" }): Object;
genres(params: { type: "movies" | "shows" }): Object;
languages(params: { type: "movies" | "shows" }): Object;
lists: {
trending(params?: PaginationQuery): Object;
popular(params?: PaginationQuery): Object;
};
movies: {
trending(params?: PaginationQuery & Extended): Object;
popular(params?: PaginationQuery & Extended): Object;
played(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
watched(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
collected(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
anticipated(
params?: PaginationQuery & Extended
): Object;
boxoffice(params?: Extended): Object;
updates(
params?: { start_date?: string } & PaginationQuery & Extended
): Object;
summary(params: Id & Extended): Object;
aliases(params: Id): Object;
releases(params: Id & { country?: string }): Object;
translations(
params: Id & { language?: string }
): Object;
comments(
params: Id & CommentSortQuery & PaginationQuery
): Object;
lists(params: Id & TypeQuery & ListSortQuery): Object;
people(params: Id & Extended): Object;
ratings(params: Id): Object;
related(params: Id & Extended & PaginationQuery): Object;
stats(params: Id): Object;
watching(params: Id & Extended): Object;
};
networks(): Object;
people: {
summary(params: Id & Extended): Object;
shows(params: Id & Extended): Object;
movies(params: Id & Extended): Object;
lists(params: Id & TypeQuery & ListSortQuery): Object;
};
recommendations: {
movies: {
get(
params: Extended & {
limit?: number | string;
ignore_collected?: boolean;
}
): Object;
hide(params: Id): Object;
};
shows: {
get(
params: Extended & {
limit?: number | string;
ignore_collected?: boolean;
}
): Object;
hide(params: Id): Object;
};
};
scrobble: {
start(params: ScrobbleBody): Object;
pause(params: ScrobbleBody): Object;
stop(params: ScrobbleBody): Object;
};
search: {
text(
params: (
| MovieQuery
| ShowQuery
| EpisodeSearchQuery
| PersonQuery
| ListQuery
) &
PaginationQuery &
Extended
): Object;
id(
params: SearchIdQuery & PaginationQuery & Extended
): Object;
};
shows: {
trending(params?: PaginationQuery & Extended): Object;
popular(params?: PaginationQuery & Extended): Object;
played(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
watched(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
collected(
params?: PeriodQuery & PaginationQuery & Extended
): Object;
anticipated(
params?: PaginationQuery & Extended
): Object;
updates(
params?: { start_date?: string } & PaginationQuery & Extended
): Object;
summary(params: Id & Extended): Object;
aliases(params: Id): Object;
translations(params: Id & { language?: string }): Object;
comments(
params: Id & CommentSortQuery & PaginationQuery
): Object;
lists(params: Id & TypeQuery & ListSortQuery): Object;
progress: {
collection: any;
watched: any;
};
people(params: Id & Extended): Object;
ratings(params: Id): Object;
related(params: Id & Extended & PaginationQuery): Object;
stats(params: Id): Object;
watching(params: Id & Extended): Object;
next_episode(params: Id & Extended): Object;
last_episode(params: Id & Extended): Object;
};
seasons: {
summary(params: Id & Extended): Object;
season(
params?: Id & SeasonQuery & Extended & { translations?: string }
): Object;
comments(
params: Id & SeasonQuery & CommentSortQuery & PaginationQuery
): Object;
lists(
params: Id & SeasonQuery & TypeQuery & ListSortQuery & PaginationQuery
): Object;
people(params: Id & SeasonQuery & Extended): Object;
ratings(params: Id & SeasonQuery): Object;
watching(params: Id & SeasonQuery & Extended): Object;
};
episodes: {
summary(
params: Id & SeasonQuery & EpisodeQuery & Extended
): Object;
translations(
params: Id & SeasonQuery & EpisodeQuery & { language?: string }
): Object;
comments(
params: Id &
SeasonQuery &
EpisodeQuery &
CommentSortQuery &
PaginationQuery
): Object;
lists(
params: Id &
SeasonQuery &
EpisodeQuery &
TypeQuery &
ListSortQuery &
PaginationQuery
): Object;
people(
params: Id & SeasonQuery & EpisodeQuery & { extended?: "guest_stars" }
): Object;
ratings(params: Id & SeasonQuery & EpisodeQuery): Object;
stats(params: Id & SeasonQuery & EpisodeQuery): Object;
watching(
params: Id & SeasonQuery & EpisodeQuery & Extended
): Object;
};
}
calendars: {
my: {
shows(params?: Calendars): Promise<CalendarShowEntry[]>;
new_shows(params?: Calendars): Promise<CalendarShowEntry[]>;
premieres_shows(params?: Calendars): Promise<CalendarShowEntry[]>;
movies(params?: Calendars): Promise<CalendarMovieEntry[]>;
dvd(params?: Calendars): Promise<CalendarMovieEntry[]>;
};
all: {
shows(params?: Calendars): Promise<CalendarShowEntry[]>;
new_shows(params?: Calendars): Promise<CalendarShowEntry[]>;
premieres_shows(params?: Calendars): Promise<CalendarShowEntry[]>;
movies(params?: Calendars): Promise<CalendarMovieEntry[]>;
dvd(params?: Calendars): Promise<CalendarMovieEntry[]>;
};
};
checkin: {
add(
params: CheckinBody
): Promise<MovieCheckinResponse | EpisodeCheckinResponse>;
delete(): Promise<void>;
};
certifications(params: { type: string }): Promise<any>;
comments: {
comment: {
add(params: CommentBody): Promise<Comment>;
get(params: Id): Promise<Comment>;
update(params: Id & Comment): Promise<Comment>;
remove(params: Id): Promise<void>;
};
replies: {
add(params: Id & Comment): Promise<Comment>;
get(params: Id & PaginationQuery): Promise<Comment[]>;
update(params: Id & Comment): Promise<Comment>;
remove(params: Id): Promise<void>;
};
item(
params: Id & Extended
): Promise<Movie | Season | Show | TraktList | Person>;
likes(
params: Id & PaginationQuery
): Promise<{ liked_at: string; user: User }[]>;
like: {
add(params: Id): Promise<void>;
remove(params: Id): Promise<void>;
};
trending(
params?: CommentQuery & Extended & PaginationQuery
): Promise<CommentListItem[]>;
recent(
params?: CommentQuery & Extended & PaginationQuery
): Promise<CommentListItem[]>;
updates(
params?: CommentQuery & Extended & PaginationQuery
): Promise<CommentListItem[]>;
};
countries(params: { type: "movies" | "shows" }): Promise<Country[]>;
genres(params: { type: "movies" | "shows" }): Promise<Genre[]>;
languages(params: { type: "movies" | "shows" }): Promise<Language[]>;
lists: {
trending(params?: PaginationQuery): Promise<TraktTrendingList[]>;
popular(params?: PaginationQuery): Promise<TraktTrendingList[]>;
};
movies: {
trending(params?: PaginationQuery & Extended): Promise<TrendingMovie[]>;
popular(params?: PaginationQuery & Extended): Promise<Movie[]>;
played(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedMovie[]>;
watched(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedMovie[]>;
collected(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedMovie[]>;
anticipated(
params?: PaginationQuery & Extended
): Promise<AnticipatedMovie[]>;
boxoffice(params?: Extended): Promise<BoxOfficeMovie[]>;
updates(
params?: { start_date?: string } & PaginationQuery & Extended
): Promise<UpdatedMovie[]>;
summary(params: Id & Extended): Promise<Movie>;
aliases(params: Id): Promise<Alias[]>;
releases(params: Id & { country?: string }): Promise<any>;
translations(
params: Id & { language?: string }
): Promise<MovieTranslation[]>;
comments(
params: Id & CommentSortQuery & PaginationQuery
): Promise<Comment[]>;
lists(params: Id & TypeQuery & ListSortQuery): Promise<TraktList[]>;
people(params: Id & Extended): Promise<Credits>;
ratings(params: Id): Promise<Ratings>;
related(params: Id & Extended & PaginationQuery): Promise<Movie[]>;
stats(params: Id): Promise<Stats>;
watching(params: Id & Extended): Promise<User[]>;
};
networks(): Promise<{ name: string }[]>;
people: {
summary(params: Id & Extended): Promise<Person>;
shows(params: Id & Extended): Promise<Credits>;
movies(params: Id & Extended): Promise<Credits>;
lists(params: Id & TypeQuery & ListSortQuery): Promise<TraktList[]>;
};
recommendations: {
movies: {
get(
params: Extended & {
limit?: number | string;
ignore_collected?: boolean;
}
): Promise<Movie[]>;
hide(params: Id): Promise<void>;
};
shows: {
get(
params: Extended & {
limit?: number | string;
ignore_collected?: boolean;
}
): Promise<Show[]>;
hide(params: Id): Promise<void>;
};
};
scrobble: {
start(params: ScrobbleBody): Promise<PlaybackResponse>;
pause(params: ScrobbleBody): Promise<PlaybackResponse>;
stop(params: ScrobbleBody): Promise<PlaybackResponse>;
};
search: {
text(
params: (
| MovieQuery
| ShowQuery
| EpisodeSearchQuery
| PersonQuery
| ListQuery
) &
PaginationQuery &
Extended
): Promise<SearchResult[]>;
id(
params: SearchIdQuery & PaginationQuery & Extended
): Promise<SearchResult[]>;
};
shows: {
trending(params?: PaginationQuery & Extended): Promise<TrendingShow[]>;
popular(params?: PaginationQuery & Extended): Promise<Show[]>;
played(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedShow[]>;
watched(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedShow[]>;
collected(
params?: PeriodQuery & PaginationQuery & Extended
): Promise<PlayedShow[]>;
anticipated(
params?: PaginationQuery & Extended
): Promise<AnticipatedShow[]>;
updates(
params?: { start_date?: string } & PaginationQuery & Extended
): Promise<UpdatedShow[]>;
summary(params: Id & Extended): Promise<Show>;
aliases(params: Id): Promise<Alias[]>;
translations(params: Id & { language?: string }): Promise<Translation[]>;
comments(
params: Id & CommentSortQuery & PaginationQuery
): Promise<Comment[]>;
lists(params: Id & TypeQuery & ListSortQuery): Promise<TraktList[]>;
progress: {
collection: any;
watched: any;
};
people(params: Id & Extended): Promise<Credits>;
ratings(params: Id): Promise<Ratings>;
related(params: Id & Extended & PaginationQuery): Promise<Show[]>;
stats(params: Id): Promise<Stats>;
watching(params: Id & Extended): Promise<User[]>;
next_episode(params: Id & Extended): Promise<Show>;
last_episode(params: Id & Extended): Promise<Show>;
};
seasons: {
summary(params: Id & Extended): Promise<Season[]>;
season(
params?: Id & SeasonQuery & Extended & { translations?: string }
): Promise<Episode[]>;
comments(
params: Id & SeasonQuery & CommentSortQuery & PaginationQuery
): Promise<Comment[]>;
lists(
params: Id & SeasonQuery & TypeQuery & ListSortQuery & PaginationQuery
): Promise<TraktList[]>;
people(params: Id & SeasonQuery & Extended): Promise<Credits>;
ratings(params: Id & SeasonQuery): Promise<Ratings>;
watching(params: Id & SeasonQuery & Extended): Promise<User[]>;
};
episodes: {
summary(
params: Id & SeasonQuery & EpisodeQuery & Extended
): Promise<Episode>;
translations(
params: Id & SeasonQuery & EpisodeQuery & { language?: string }
): Promise<Translation[]>;
comments(
params: Id &
SeasonQuery &
EpisodeQuery &
CommentSortQuery &
PaginationQuery
): Promise<Comment[]>;
lists(
params: Id &
SeasonQuery &
EpisodeQuery &
TypeQuery &
ListSortQuery &
PaginationQuery
): Promise<TraktList[]>;
people(
params: Id & SeasonQuery & EpisodeQuery & { extended?: "guest_stars" }
): Promise<Credits>;
ratings(params: Id & SeasonQuery & EpisodeQuery): Promise<Ratings>;
stats(params: Id & SeasonQuery & EpisodeQuery): Promise<Stats>;
watching(
params: Id & SeasonQuery & EpisodeQuery & Extended
): Promise<User[]>;
};
sync: any;
users: any;
get_url(): string;
exchange_code(code: string, state?: string): Promise<AccessToken>;
get_codes(): Promise<DeviceCode>;
poll_access(poll: DeviceCode): Promise<any>;
refresh_token(): Promise<AccessToken>;
import_token(token: AccessToken): Promise<AccessToken>;
export_token(): {
access_token: string;
expires: number;
refresh_token: string;
};
}
|
<filename>src/components/GraduateCheck/index.ts
export { default } from "./GraduateCheck";
|
#!/bin/bash
# ******************************************************************************
# IBM Cloud Kubernetes Service, 5737-D43
# (C) Copyright IBM Corp. 2017, 2022 All Rights Reserved.
#
# SPDX-License-Identifier: Apache2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
set -e
SCRIPT_DIR=$(dirname "${BASH_SOURCE[0]}")
# import shared common bash functions
# shellcheck source=scripts/common_functions.sh
. "${SCRIPT_DIR}/scripts/common_functions.sh"
K8S_CURRENT_VERSION=$(grep "^TAG " Makefile | awk '{ print $3 }')
if [[ -z "${K8S_CURRENT_VERSION}" ]]; then
echo "FAIL: Unable to determine current Kubernetes version in Makefile."
exit 1
fi
# $1: Kubernetes update version
K8S_UPDATE_VERSION="${1}"
if [[ -z "${K8S_UPDATE_VERSION}" ]]; then
if [[ $TRAVIS_EVENT_TYPE == "cron" ]]; then
# Trim alpha/beta tag off of current release
K8S_SHORT_VERSION=${K8S_CURRENT_VERSION%-*}
# Looking up update version manually for updater cron job
MAJOR_MINOR=${K8S_SHORT_VERSION%.*}
K8S_UPDATE_VERSION=$(curl https://api.github.com/repos/kubernetes/kubernetes/releases | jq -r .[].name | grep "$MAJOR_MINOR" | head -1 | sed 's/^Kubernetes //g')
MOD_VERSION=$(go mod download -json "k8s.io/api@kubernetes-${K8S_UPDATE_VERSION#v}" | jq -r .Version)
if [[ -z "${K8S_UPDATE_VERSION}" ]]; then
echo "FAIL: Failed to retrieve latest kubernetes version."
exit 1
fi
if [[ "${K8S_UPDATE_VERSION}" == "${K8S_CURRENT_VERSION}" ]]; then
echo "INFO: No new version available, exiting gracefully"
exit 0
fi
# Ensure the go modules have also been updated. i.e. k8s.io/api v0.20.3
if [[ "${MOD_VERSION}" != "v0.${K8S_UPDATE_VERSION#*.}" ]]; then
echo "INFO: New go modules are not yet available, exiting gracefully"
exit 0
fi
else
echo "FAIL: Kubernetes update version not set."
exit 1
fi
fi
# Get the IBM current and update versions based on the Kubernetes versions.
IBM_CURRENT_VERSION=$(echo "${K8S_CURRENT_VERSION}" | cut -c 2- | awk -F. '{ print "release-"$1"."$2 }')
IBM_UPDATE_VERSION=$(echo "${K8S_UPDATE_VERSION}" | cut -c 2- | awk -F. '{ print "release-"$1"."$2 }')
echo "INFO: Starting Kubernetes update from version ${K8S_CURRENT_VERSION} to ${K8S_UPDATE_VERSION} ..."
make clean
K8S_DIRECTORY="/tmp/kubernetes"
if [[ ! -e "${K8S_DIRECTORY}" ]]; then
git clone --depth=1 --no-single-branch https://github.com/kubernetes/kubernetes.git ${K8S_DIRECTORY}
fi
git -C ${K8S_DIRECTORY} checkout master && git -C ${K8S_DIRECTORY} remote update && git -C ${K8S_DIRECTORY} pull --ff-only origin master
# Determine the current and update golang version.
git -C "${K8S_DIRECTORY}" checkout "${K8S_CURRENT_VERSION}"
K8S_GOLANG_CURRENT_VERSION=$(grep -A 1 "name: \"golang: upstream version" "${K8S_DIRECTORY}/build/dependencies.yaml" | grep "version:" | awk '{ print $2 }')
git -C "${K8S_DIRECTORY}" checkout "${K8S_UPDATE_VERSION}"
K8S_GOLANG_UPDATE_VERSION=$(grep -A 1 "name: \"golang: upstream version" "${K8S_DIRECTORY}/build/dependencies.yaml" | grep "version:" | awk '{ print $2 }')
# Update files based on Kubernetes and IBM release versions.
ALL_FILES=$(find . \( -path ./.git -o -path ./kube-update.sh -o -path './go.*' \) -prune -o \( -type f -print \))
# shellcheck disable=SC2086
FILES_TO_UPDATE_FOR_K8S_VERSION=$(grep -l -F "${K8S_CURRENT_VERSION}" $ALL_FILES)
# shellcheck disable=SC2086
FILES_TO_UPDATE_FOR_IBM_VERSION=$(grep -l -F "${IBM_CURRENT_VERSION}" $ALL_FILES)
for FILE_TO_UPDATE_FOR_K8S_VERSION in $FILES_TO_UPDATE_FOR_K8S_VERSION; do
sed -i -e "s/${K8S_CURRENT_VERSION}/${K8S_UPDATE_VERSION}/g" "${FILE_TO_UPDATE_FOR_K8S_VERSION}"
git add "${FILE_TO_UPDATE_FOR_K8S_VERSION}"
echo "INFO: Updated Kubernetes version in ${FILE_TO_UPDATE_FOR_K8S_VERSION}"
done
for FILE_TO_UPDATE_FOR_IBM_VERSION in $FILES_TO_UPDATE_FOR_IBM_VERSION; do
sed -i -e "s/${IBM_CURRENT_VERSION}/${IBM_UPDATE_VERSION}/g" "${FILE_TO_UPDATE_FOR_IBM_VERSION}"
git add "${FILE_TO_UPDATE_FOR_IBM_VERSION}"
echo "INFO: Updated IBM version in ${FILE_TO_UPDATE_FOR_IBM_VERSION}"
done
if [[ "${K8S_GOLANG_CURRENT_VERSION}" != "${K8S_GOLANG_UPDATE_VERSION}" ]]; then
FILES_TO_UPDATE=".travis.yml vagrant-kube-build/Vagrantfile"
for FILE_TO_UPDATE in $FILES_TO_UPDATE; do
if [[ "${FILE_TO_UPDATE}" == ".travis.yml" ]]; then
sed -i -e "s/^ - ${K8S_GOLANG_CURRENT_VERSION}/ - ${K8S_GOLANG_UPDATE_VERSION}/g" "${FILE_TO_UPDATE}"
sed -i -e "s/go:\s\+${K8S_GOLANG_CURRENT_VERSION}/go: ${K8S_GOLANG_UPDATE_VERSION}/g" "${FILE_TO_UPDATE}"
else
sed -i -e "s/${K8S_GOLANG_CURRENT_VERSION}/${K8S_GOLANG_UPDATE_VERSION}/g" "$FILE_TO_UPDATE"
fi
git add "$FILE_TO_UPDATE"
echo "INFO: Updated golang version in $FILE_TO_UPDATE"
done
fi
COMMIT_MESSAGE="Update from ${K8S_CURRENT_VERSION} to ${K8S_UPDATE_VERSION}"
git checkout -b "${K8S_UPDATE_VERSION}-initial"
git commit --no-verify -m "${COMMIT_MESSAGE}"
if [[ $TRAVIS_EVENT_TYPE == "cron" ]]; then
make hub-install
export GITHUB_TOKEN=${GHE_TOKEN}
hub pull-request -b "${TRAVIS_BRANCH}" -m "${COMMIT_MESSAGE}" --push
else
# Otherwise push up branch for manual runs
git push origin "${K8S_UPDATE_VERSION}-initial"
fi
echo "SUCCESS: Completed Kubernetes update from version ${K8S_CURRENT_VERSION} to ${K8S_UPDATE_VERSION}."
exit 0
|
<reponame>mia-platform/lc39
import { FastifyInstance } from 'fastify'
import { expectType } from 'tsd'
import lc39 from '../'
const server = lc39('../tests/modules/correct-module.js')
expectType<FastifyInstance>(server)
|
#Function to add two numbers
def add(val1, val2):
return val1+val2
#Function to subtract two numbers
def subtract(val1, val2):
return val1-val2
#Function to multiply two numbers
def multiply(val1, val2):
return val1*val2
#Function to divide two numbers
def divide(val1, val2):
return val1/val2
while True:
print("Please provide the operator and operands")
print("1. Add")
print("2. Subtract")
print("3. Multiply")
print("4. Divide")
print("5. Exit")
# Take input from the user
choice = int(input("Enter your choice of operation: "))
# Check if choice is one of the four options
if choice in (1, 2, 3, 4):
num1 = float(input("Enter the first number: "))
num2 = float(input("Enter the second number: "))
if choice == 1:
print(num1, "+", num2, "=",
add(num1,num2))
elif choice == 2:
print(num1, "-", num2, "=",
subtract(num1,num2))
elif choice == 3:
print(num1, "*", num2, "=",
multiply(num1,num2))
elif choice == 4:
print(num1, "/", num2, "=",
divide(num1,num2))
elif choice == 5:
print("Exiting Program!")
break
else:
print("Please enter a valid option!") |
<reponame>tylerchen/foss-qdp-project-v4<gh_stars>0
/*******************************************************************************
* Copyright (c) 2017-11-09 @author <a href="mailto:<EMAIL>"><NAME></a>.
* All rights reserved.
*
* Contributors:
* <a href="mailto:<EMAIL>"><NAME></a> - initial API and implementation.
* Auto Generate By foreveross.com Quick Deliver Platform.
******************************************************************************/
package com.foreveross.qdp.application.system.auth;
import com.foreveross.qdp.infra.vo.system.auth.AuthMenuVO;
import org.iff.infra.util.mybatis.plugin.Page;
/**
* AuthMenu Application.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version 1.0.0
* auto generate by qdp v3.0.
* @since 2017-11-09
*/
public interface AuthMenuApplication {
/**
* <pre>
* get AuthMenuVO by id.
* </pre>
*
* @param vo
* @return AuthMenuVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
AuthMenuVO getAuthMenu(AuthMenuVO vo);
/**
* <pre>
* get AuthMenuVO by id.
* </pre>
*
* @param vo
* @return AuthMenuVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
AuthMenuVO getAuthMenuById(String id);
/**
* <pre>
* page find AuthMenuVO.
* </pre>
*
* @param vo conditions
* @param page page setting
* @return Page
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
Page pageFindAuthMenu(AuthMenuVO vo, Page page);
/**
* <pre>
* page find AuthMenu Map.
* </pre>
*
* @param vo conditions
* @param page page setting
* @return Page
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
Page pageFindAuthMenuMap(AuthMenuVO vo, Page page);
/**
* <pre>
* add AuthMenu.
* </pre>
*
* @param vo
* @return AuthMenuVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
AuthMenuVO addAuthMenu(AuthMenuVO vo);
/**
* <pre>
* update AuthMenu.
* </pre>
*
* @param vo
* @return AuthMenuVO
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
AuthMenuVO updateAuthMenu(AuthMenuVO vo);
/**
* <pre>
* remove AuthMenu.
* </pre>
*
* @param vo conditions.
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
void removeAuthMenu(AuthMenuVO vo);
/**
* <pre>
* remove AuthMenu.
* </pre>
*
* @param id.
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
void removeAuthMenuById(String id);
/**
* <pre>
* remove AuthMenu.
* </pre>
*
* @param ids.
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
* auto generate by qdp v3.0.
*/
void removeAuthMenuByIds(String[] ids);
/**
* <pre>
* get AuthMenu by unique name
* </pre>
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @since 2017-11-09
*/
AuthMenuVO getByName(String name);
}
|
#!/bin/bash
QSQLPATH=$1
SQLCPATH=$2
echo "sqlcipher is in $SQLCPATH"
echo "qsqlcipher is in $QSQLPATH"
echo "If either of these is not correct, press [ctrl]+[C] now and try again."
echo "Otherwise, press [enter] to continue building sqlite3-cipher."
read dummy
cd $SQLCPATH
patch -p 0 < $QSQLPATH/Makefile.in.patch
patch -p 0 < $QSQLPATH/sqlite3.pc.in.patch
./configure --enable-tempstore=yes --disable-tcl CFLAGS="-DSQLITE_HAS_CODEC -DSQLITE_TEMP_STORE=2" LDFLAGS="-lcrypto" --prefix="$QSQLPATH/driver"
make
make install
echo "sqlite3-cipher is built and installed."
echo "Press [enter] to continue building qsqlite-cipher."
read dummy
cd $QSQLPATH
qmake
make
make install
echo "qsqlite-cipher is built."
echo "Press [enter] to continue building the test application."
read dummy
cd $QSQLPATH/test
qmake
make
echo "The test program has been built and is ready to run."
echo "All done."
|
# %matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
import scipy.fftpack
import comms_utils
bandwidth = 5
tb = 1/bandwidth
ts = tb*2
rect = comms_utils.pulse.Sinc(ts)
rect.set_max_pulses(20)
message_length = 10
oversampling_factor = 8
ak = comms_utils.ak.AK(n=message_length, levels=4)
comb = comms_utils.comb.Comb(ak, ts*4, oversampling_factor)
signal = comb.convolve(rect)
# signal.add_noise(1)
# signal.plot()
sig_data, sig_time = signal.get_data()
# Number of samplepoints
N = 600
# sample spacing
T = 1.0 / 800.0
x = sig_time
y = sig_data
yf = scipy.fftpack.fft(y)
xf = np.linspace(0.0, 1.0/(2.0*T), 300)
fig, ax = plt.subplots()
ax.plot(xf, 2.0/N * np.abs(yf[:N//2]))
plt.show() |
#!/bin/bash
git clone https://github.com/dockersamples/docker-swarm-visualizer
cd docker-swarm-visualizer
docker build -f Dockerfile.arm -t visualizer-arm:latest .
docker service create \
--name=viz \
--publish=8080:8080/tcp \
--constraint=node.role==manager \
--mount=type=bind,src=/var/run/docker.sock,dst=/var/run/docker.sock \
visualizer-arm:latest
cd ..
rm -rf docker-swarm-visualizer
|
<filename>controltool/src/main/java/com/mh/controltool2/serialize/json/FastjsonDataObjectSerialize.java
package com.mh.controltool2.serialize.json;
import com.alibaba.fastjson.JSON;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.mh.controltool2.exceptions.serialize.JsonHandlerException;
import java.lang.reflect.Type;
public class FastjsonDataObjectSerialize implements DataObjectSerialize {
@Override
public String toJson(Object obj) throws JsonHandlerException {
return JSON.toJSONString(obj);
}
@Override
public <T> T toObject(String data, Type typeOfT) throws JsonHandlerException {
return JSON.parseObject(data,typeOfT);
}
@Override
public Object getJsonObjectSerialize() {
return null;
}
@Override
public JsonObjTargetEnum getJsonObjectTargetEnum() {
return JsonObjTargetEnum.FastJson;
}
}
|
import * as DataDomain from '@redux/Stocks/Types/DataDomain';
/**
* Data structure for Stock quote used in Stock Reducer
*/
export interface QuoteData {
fetching: boolean;
data: DataDomain.Quote;
error?: Error;
}
/**
* Data structure for Stock Chart used in Stock Reducer
*/
export interface ChartData {
fetching: boolean;
data: DataDomain.Chart[];
error?: Error;
}
// export interface StockSearchResultData {
// fetching: boolean;
// data?: DataDomain.StockSearchBase[];
// error?: Error;
// }
export interface StockSearch {
keyword: string;
// results?: Reducer.StockSearchResultData;
}
/**
* A data structure wrapper for one Stock
*/
export interface StockData {
quote: QuoteData;
chart: ChartData;
// metadata?: DataDomain.StockSearchBase;
}
export interface SymbolsData {
fetching: boolean;
data: DataDomain.Symbols[];
error?: Error;
}
/**
* Stocks reducer state
*/
export interface ReducerState {
symbols: {
[symbol: string]: StockData;
};
search: StockSearch;
symbolsMetadata: SymbolsData;
}
/**
* A union of Stocks Reducer types
*/
export type StocksReducerTypes = QuoteData | ChartData | StockData | ReducerState;
|
require 'rails_helper'
RSpec.describe IpAddressMatcher do
it 'matches a single IP address' do
matcher = described_class.new('12.34.56.78')
expect(matcher).to include('12.34.56.78')
expect(matcher).not_to include('11.22.33.44')
end
it 'matches several IP addresses separated by commas' do
matcher = described_class.new('12.34.56.78,127.0.0.1,8.8.8.8')
expect(matcher).to include('12.34.56.78')
expect(matcher).to include('127.0.0.1')
expect(matcher).to include('8.8.8.8')
expect(matcher).not_to include('11.22.33.44')
end
it 'matches several IP addresses separated by semicolons' do
matcher = described_class.new('12.34.56.78;8.8.8.8')
expect(matcher).to include('12.34.56.78')
expect(matcher).to include('8.8.8.8')
expect(matcher).not_to include('11.22.33.44')
end
it 'matches CIDR ranges' do
matcher = described_class.new('12.34.0.0/16,127.0.0.0/24')
expect(matcher).to include('12.34.56.78')
expect(matcher).to include('127.0.0.1')
expect(matcher).not_to include('12.33.44.44')
end
it 'matches a single IPv6 address' do
matcher = described_class.new('fdf8:f53e:61e4::18')
expect(matcher).to include('fdf8:f53e:61e4::18')
expect(matcher).not_to include('fdf8:f53e:61e4::18')
end
it 'matches both IPv4 and IPv6 addresses' do
matcher = described_class.new('12.34.56.78,fdf8:f53e:61e4::18')
expect(matcher).to include('fdf8:f53e:61e4::18')
expect(matcher).to include('12.34.56.78')
end
it 'can match the IPv6 loopback' do
matcher = described_class.new('::1')
expect(matcher).to include('::1')
expect(matcher).not_to include('fdf8:f53e:61e4::18')
end
end
|
#! /bin/sh
mkdir -p /config
mkdir -p /config/cache
cd /sickrage
if [ -f /config/config.ini ]
then
rm -rf /sickrage/config.ini
rm -rf /sickrage/sickbeard.db
rm -rf /config/sickbeard.db.v32
rm -rf /config/sickbeard.db.v33
rm -rf /config/sickbeard.db.v34
rm -rf /config/sickbeard.db.v35
rm -rf /config/sickbeard.db.v36
rm -rf /config/sickbeard.db.v37
rm -rf /config/sickbeard.db.v38
rm -rf /config/sickbeard.db.v39
rm -rf /config/sickbeard.db.v40
rm -rf /config/sickbeard.db.v41
rm -rf /config/sickbeard.db.v42
rm -rf /config/sickbeard.db.v43
rm -rf /config/sickbeard.db.v44
else
touch /config/config.ini
touch /config/sickbeard.db
touch /config/sickbeard.db.v32
touch /config/sickbeard.db.v33
touch /config/sickbeard.db.v34
touch /config/sickbeard.db.v35
touch /config/sickbeard.db.v36
touch /config/sickbeard.db.v37
touch /config/sickbeard.db.v38
touch /config/sickbeard.db.v39
touch /config/sickbeard.db.v40
touch /config/sickbeard.db.v41
touch /config/sickbeard.db.v42
touch /config/sickbeard.db.v43
touch /config/sickbeard.db.v44
cp -fr /sickrage/cache/* /config/cache/
# mv -f /sickbeard/autoprocesstv/autoProcessTV.cfg /config/autoProcessTV.cfg
fi
ln -sf /config/config.ini /sickrage/config.ini
ln -sf /config/sickbeard.db* /sickrage/
ln -sf /config/cache /sickrage/cache
#ln -sf /config/autoProcessTV.cfg sickbeard/autoprocesstv/autoProcessTV.cfg
cp -ra /sickbeard/autoprocesstv /scripts
/usr/bin/python SickBeard.py
|
<template>
<table>
<thead>
<tr>
<th>Name</th><th>Age</th>
</tr>
</thead>
<tbody>
<tr v-for="person in persons" :key="person.name">
<td>{{ person.name }}</td>
<td>{{ person.age }}</td>
</tr>
</tbody>
</table>
</template>
<script>
export default {
data: () => ({
persons: [
{name: 'John', age: 25},
{name: 'Jane', age: 22},
{name: 'Bob', age: 32},
],
})
}
</script> |
<filename>src/ui/Clock.js<gh_stars>1-10
import Nanocomponent from 'nanocomponent'
import html from 'nanohtml'
class Clock extends Nanocomponent {
constructor() {
super()
this._interval = null
this.startTime = null
this.handleReset = this.handleReset.bind(this)
this.handleStart = this.handleStart.bind(this)
}
createElement() {
const now = new Date()
let timer
if (this.startTime) {
const duration = Math.floor((now - this.startTime) / 1000)
const minutes = Math.floor(duration / 60)
const seconds = duration - minutes * 60
timer = html`
<span style="margin-left: 16px">
${minutes}m ${String(seconds).padStart(2, '0')}s
</span>
`
}
const timerButton = this.startTime === null
? html`<button onclick=${this.handleStart}>start</button>`
: html`<button onclick=${this.handleReset}>reset</button>`
return html`
<div style="display: flex">
${timerButton}
${timer}
<span style="margin-left: 16px">
${now.toLocaleTimeString(undefined, {timeStyle: 'short'})}
</span>
</div>
`
}
update() {
return true
}
handleReset() {
this.startTime = null
this.rerender()
}
handleStart() {
this.startTime = Date.now()
this.rerender()
}
handleTick() {
this.rerender()
}
load() {
this._interval = setInterval(() => this.handleTick(), 1000)
}
unload() {
clearInterval(this._interval)
}
}
export default Clock
|
#!/bin/sh
TOOLSDIR=$(dirname $(realpath $0))
SOURCEFILE=$(realpath $1)
SCRIPTFILE=$(realpath $2)
kill_qemu() { killall -9 qemu-system-arm > /dev/null 2>&1 ;}
cd $TOOLSDIR
cd .. ; make -B KMAIN=$SOURCEFILE || exit 1
cd $TOOLSDIR
kill_qemu
./run-qemu.sh > /dev/null 2>&1 &
./run-gdb.sh $SCRIPTFILE
kill_qemu
|
import validator from 'is-my-json-valid';
import schema from './User.schema.json';
const __validate = validator(schema);
export default class User {
constructor(params) {
this.id = params.id;
this.key = params.key;
this.email = params.email;
this.name = params.name;
this.imageUrl = params.imageUrl;
this.bio = params.bio;
}
toJSON() {
return Object(this);
}
static isValid(params) {
return new Promise((resolve, reject) => {
if (!__validate(params)) {
return reject(new TypeError('USER_JSON_SHAPE_INVALID'));
}
return resolve(params);
});
}
static fromJSON(params) {
return User.isValid(params)
.then(params => {
const user = new User(params);
return Promise.resolve(user);
});
}
}
|
class FileCompressor {
private $compression;
public function __construct() {
$this->compression = null;
}
/**
* @param string $compression
*/
public function setCompression(string $compression): void {
$this->compression = $compression;
}
/**
* @return string|null
*/
public function getCompression(): ?string {
return $this->compression;
}
} |
<reponame>maraboinavamshi/courses
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
''' Read: http://pandas.pydata.org/pandas-docs/stable/api.html#api-dataframe-stats '''
def symbol_to_path(symbol, base_dir = 'data'):
return os.path.join(base_dir, "{}.csv".format(str(symbol)))
def dates_creator():
start_date = '2012-01-01'
end_date = '2013-12-31'
dates = pd.date_range(start_date, end_date)
return dates
def get_data(symbols, dates):
df = pd.DataFrame(index = dates)
if 'SPY' not in symbols: # adding SPY as the main reference
symbols.insert(0, 'SPY')
for symbol in symbols:
df_temp = pd.read_csv(symbol_to_path(symbol),
index_col = 'Date',
parse_dates = True,
usecols = ['Date', 'Adj Close'],
na_values = ['nan'])
df_temp = df_temp.rename(columns = {'Adj Close': symbol})
df = df.join(df_temp)
if symbol == 'SPY':
df = df.dropna(subset = ['SPY'])
return df
def normalize_data(df):
return df / df.ix[0,:]
def plot(df, title):
ax = df.plot(title = title, fontsize = 12)
ax.set_xlabel('Date')
ax.set_ylabel(title)
plt.show()
def get_daily_returns(df):
daily_returns = df.copy()
# Calculating daily returns
daily_returns[1:] = (df / df.shift(1)) - 1
# Setting daily returns for row 0 to 0.
daily_returns.ix[0, :] = 0
return daily_returns
def annualised_sharpe(returns, N=252):
"""
Calculate the annualised Sharpe ratio of a returns stream
based on a number of trading periods, N. N defaults to 252,
which then assumes a stream of daily returns.
The function assumes that the returns are the excess of
those compared to a benchmark.
"""
return np.sqrt(N) * returns.mean() / returns.std()
if __name__ == "__main__":
'''
*********************************************************************************************
Sharpe ratio = (Rp - Rf) / Stdev
in other words:
SR = mean(daily-returns - daily-risk-free-rate) / std((daily-returns - daily-risk-free-rate))
There are few ways to get value of the daily risk free rate:
- LIBOR
- 3 months treasury bill
- 0 %
We can calculate it in more traditional (easy and fast) way:
daily_risk_free_rate = [(1 + BANK_RETURN)^(-252)] - 1
Sharpe ratio can widely depend on the sampling window.
Originally it was meant to be calculated annually.
With that, if we calculate it daily, weekly or monthly we need to adjust it.
SR_annualized = K * SR
where:
K = sqrt(sample_per_year)
- K for daily calcuations will be equal to sqrt(252)
- K for weekly calcuations will be equal to sqrt(52)
- K for weekly calcuations will be equal to sqrt(12).
A Sharp Ratio>1 is considered good, >2 or 3 is what hedge funds look for.
Anything too high (>3 or 4) is suspicious.
*********************************************************************************************
'''
symbols = ['SPY', 'AAPL', 'GOOG', 'IBM', 'TSLA']
dates = dates_creator()
df = get_data(symbols, dates)
daily_returns = get_daily_returns(df)
# Sharpe ratio
annualized_sharpe_ratio = annualised_sharpe(daily_returns, N=252)
print('Annualized Sharpe ratio:')
print(annualized_sharpe_ratio) |
#!/bin/bash
# Environment variables required by the AWS CLI
export AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-us-west-2}
export AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-CHANGEME}
export AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-CHANGEME}
# Environment variables required to identify the correct EC2 instances
PROJECT=${PROJECT:-Weapon-X}
ENVIRONMENT=${ENVIRONMENT:-development}
BASTION_TAG=${BASTION_TAG:-Bastion}
PRIVATE_TAG=${PRIVATE_TAG:-Docker}
# Environment variables needed to contact Hashicorp's Vault
VAULT_ADDR=${VAULT_ADDR:-http://192.168.254.90:8200}
ROLE_ID=${ROLE_ID:-CHANGEME}
SECRET_ID=${SECRET_ID:-CHANGEME}
VAULT_PATH=${VAULT_PATH:-CHANGEME}
function determineBastionAddress() {
local STATE_FILTER=Name=instance-state-name,Values=running
local PROJECT_FILTER=Name=tag:Project,Values=${PROJECT}
local ENVIRONMENT_FILTER=Name=tag:Environment,Values=${ENVIRONMENT}
local DUTY_FILTER=Name=tag:Duty,Values=${BASTION_TAG}
local CMD="aws ec2 describe-instances \
--filters ${STATE_FILTER} ${PROJECT_FILTER} ${ENVIRONMENT_FILTER} ${DUTY_FILTER} \
--query Reservations[0].Instances[*].[PublicIpAddress] \
--output text"
echo ${CMD}
BASTION=$(${CMD})
echo "Bastion IP address is ${BASTION}"
}
function sshIntoBastion() {
local CMD="ssh -A ec2-user@${BASTION}"
echo $CMD
$CMD
}
function addSshKey() {
export VAULT_TOKEN=$(vault write -field=token auth/approle/login role_id=${ROLE_ID} secret_id=${SECRET_ID})
local KEY_FILE=/tmp/private-key
rm -f ${KEY_FILE}
vault read -field=value ${VAULT_PATH} > ${KEY_FILE}
chmod 0400 ${KEY_FILE}
eval "$(ssh-agent)"
ssh-add ${KEY_FILE}
ssh-add -L
}
determineBastionAddress
addSshKey
sshIntoBastion
|
# Handle incrementing the docker host port for instances unless a port range is defined.
DOCKER_PUBLISH=
if [[ ${DOCKER_PORT_MAP_TCP_80} != NULL ]]
then
if grep -qE \
'^([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}:)?[1-9][0-9]*$' \
<<< "${DOCKER_PORT_MAP_TCP_80}" \
&& grep -qE \
'^.+\.[0-9]+(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}"
then
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s%s:80' \
"${DOCKER_PUBLISH}" \
"$(
grep -o \
'^[0-9\.]*:' \
<<< "${DOCKER_PORT_MAP_TCP_80}"
)" \
"$((
$(
grep -oE \
'[0-9]+$' \
<<< "${DOCKER_PORT_MAP_TCP_80}"
) \
+ $(
grep -oE \
'([0-9]+)(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}" \
| awk -F. \
'{ print $1; }'
) \
- 1
))"
else
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s:80' \
"${DOCKER_PUBLISH}" \
"${DOCKER_PORT_MAP_TCP_80}"
fi
fi
if [[ ${APACHE_MOD_SSL_ENABLED} == true ]] \
&& [[ ${DOCKER_PORT_MAP_TCP_443} != NULL ]]
then
if grep -qE \
'^([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}:)?[1-9][0-9]*$' \
<<< "${DOCKER_PORT_MAP_TCP_443}" \
&& grep -qE \
'^.+\.[0-9]+(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}"
then
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s%s:443' \
"${DOCKER_PUBLISH}" \
"$(
grep -o \
'^[0-9\.]*:' \
<<< "${DOCKER_PORT_MAP_TCP_443}"
)" \
"$((
$(
grep -oE \
'[0-9]+$' \
<<< "${DOCKER_PORT_MAP_TCP_443}"
) \
+ $(
grep -oE \
'([0-9]+)(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}" \
| awk -F. \
'{ print $1; }'
) \
- 1
))"
else
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s:443' \
"${DOCKER_PUBLISH}" \
"${DOCKER_PORT_MAP_TCP_443}"
fi
fi
if [[ ${DOCKER_PORT_MAP_TCP_8443} != NULL ]]
then
if grep -qE \
'^([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}:)?[1-9][0-9]*$' \
<<< "${DOCKER_PORT_MAP_TCP_8443}" \
&& grep -qE \
'^.+\.[0-9]+(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}"
then
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s%s:8443' \
"${DOCKER_PUBLISH}" \
"$(
grep -o \
'^[0-9\.]*:' \
<<< "${DOCKER_PORT_MAP_TCP_8443}"
)" \
"$((
$(
grep -oE \
'[0-9]+$' \
<<< "${DOCKER_PORT_MAP_TCP_8443}"
) \
+ $(
grep -oE \
'([0-9]+)(\.[0-9]+)?$' \
<<< "${DOCKER_NAME}" \
| awk -F. \
'{ print $1; }'
) \
- 1
))"
else
printf -v \
DOCKER_PUBLISH \
-- '%s --publish %s:8443' \
"${DOCKER_PUBLISH}" \
"${DOCKER_PORT_MAP_TCP_8443}"
fi
fi
# Common parameters of create and run targets
DOCKER_CONTAINER_PARAMETERS="--name ${DOCKER_NAME} \
--restart ${DOCKER_RESTART_POLICY} \
--env \"APACHE_CONTENT_ROOT=${APACHE_CONTENT_ROOT}\" \
--env \"APACHE_CUSTOM_LOG_FORMAT=${APACHE_CUSTOM_LOG_FORMAT}\" \
--env \"APACHE_CUSTOM_LOG_LOCATION=${APACHE_CUSTOM_LOG_LOCATION}\" \
--env \"APACHE_ERROR_LOG_LOCATION=${APACHE_ERROR_LOG_LOCATION}\" \
--env \"APACHE_ERROR_LOG_LEVEL=${APACHE_ERROR_LOG_LEVEL}\" \
--env \"APACHE_EXTENDED_STATUS_ENABLED=${APACHE_EXTENDED_STATUS_ENABLED}\" \
--env \"APACHE_HEADER_X_SERVICE_UID=${APACHE_HEADER_X_SERVICE_UID}\" \
--env \"APACHE_LOAD_MODULES=${APACHE_LOAD_MODULES}\" \
--env \"APACHE_MOD_SSL_ENABLED=${APACHE_MOD_SSL_ENABLED}\" \
--env \"APACHE_MPM=${APACHE_MPM}\" \
--env \"APACHE_OPERATING_MODE=${APACHE_OPERATING_MODE}\" \
--env \"APACHE_PUBLIC_DIRECTORY=${APACHE_PUBLIC_DIRECTORY}\" \
--env \"APACHE_RUN_GROUP=${APACHE_RUN_GROUP}\" \
--env \"APACHE_RUN_USER=${APACHE_RUN_USER}\" \
--env \"APACHE_SERVER_ALIAS=${APACHE_SERVER_ALIAS}\" \
--env \"APACHE_SERVER_NAME=${APACHE_SERVER_NAME}\" \
--env \"APACHE_SSL_CERTIFICATE=${APACHE_SSL_CERTIFICATE}\" \
--env \"APACHE_SSL_CIPHER_SUITE=${APACHE_SSL_CIPHER_SUITE}\" \
--env \"APACHE_SSL_PROTOCOL=${APACHE_SSL_PROTOCOL}\" \
--env \"APACHE_SYSTEM_USER=${APACHE_SYSTEM_USER}\" \
--env \"ENABLE_HTTPD_BOOTSTRAP=${ENABLE_HTTPD_BOOTSTRAP}\" \
--env \"ENABLE_HTTPD_WRAPPER=${ENABLE_HTTPD_WRAPPER}\" \
--env \"ENABLE_PHP_FPM_WRAPPER=${ENABLE_PHP_FPM_WRAPPER}\" \
--env \"PHP_OPTIONS_DATE_TIMEZONE=${PHP_OPTIONS_DATE_TIMEZONE}\" \
--env \"PHP_OPTIONS_SESSION_NAME=${PHP_OPTIONS_SESSION_NAME}\" \
--env \"PHP_OPTIONS_SESSION_SAVE_HANDLER=${PHP_OPTIONS_SESSION_SAVE_HANDLER}\" \
--env \"PHP_OPTIONS_SESSION_SAVE_PATH=${PHP_OPTIONS_SESSION_SAVE_PATH}\" \
--env \"SYSTEM_TIMEZONE=${SYSTEM_TIMEZONE}\" \
${DOCKER_PUBLISH}"
|
public static void main(String[] args) {
int size = 10;
// Create an array
int[] array = new int[size];
// Generate numbers in range 0-99
Random random = new Random();
for (int i=0; i<size; i++) {
array[i] = random.nextInt(100);
}
// Find the biggest and smallest number
int min = array[0];
int max = array[0];
for (int i=1; i<array.length; i++) {
if (array[i] < min) {
min = array[i];
}
if (array[i] > max) {
max = array[i];
}
}
System.out.println("The smallest number in the array is: " + min);
System.out.println("The biggest number in the array is: " + max);
} |
<filename>lib/capistrano/tasks/config.rake
require 'capistrano/runit'
require 'capistrano/helpers/puma/template_paths'
include Capistrano::DSL::BasePaths
include Capistrano::DSL::RunitPaths
include Capistrano::Helpers::Base
include Capistrano::Helpers::Runit
namespace :load do
task :defaults do
# Puma Configuration
set :puma_runit_service_name, 'puma'
set :puma_workers, 2 # Must use a minimum of 1 worker (cluster mode, else restart/stop fails in the state file?)
set :puma_min_threads, 8
set :puma_max_threads, 8
set :puma_bin, 'bundle exec puma'
set :puma_control, 'bundle exec pumactl'
# Control files
set :puma_socket_file, proc { "#{File.join(fetch(:sockets_path), 'puma.sock')}" }
set :puma_socket_url, proc { "unix://#{fetch(:puma_socket_file)}" }
set :puma_pid_file, proc { File.join(fetch(:pids_path), 'puma.pid') }
set :puma_state_file, proc { File.join(fetch(:sockets_path), 'puma.state') }
set :puma_control_file, proc { "#{File.join(fetch(:sockets_path), 'pumactl.sock')}" }
set :puma_control_url, proc { "unix://#{fetch(:puma_control_file)}" }
# This must be set to false if phased restarts should be used
set :puma_use_preload_app, false
set :pruma_prune_bundler, false
set :puma_before_fork, nil
set :puma_on_worker_boot, nil
set :puma_on_restart, nil
set :puma_activate_control_app, true
set :puma_on_restart_active, true
# Logging to path
set :puma_log_path, proc { runit_var_log_service_single_service_path(fetch(:puma_runit_service_name)) }
# Configuration files
set :puma_config_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'puma-config.rb.erb')
# The remote location of puma's config file. Used by runit when starting puma
set :puma_remote_config_folder, proc { shared_path.join('config') }
set :puma_config_file, proc { File.join(fetch(:puma_remote_config_folder), 'puma.rb') }
# runit defaults
set :puma_restart_interval, proc { fetch(:runit_restart_interval) }
set :puma_restart_count, proc { fetch(:runit_restart_count) }
set :puma_autorestart_clear_interval, proc { fetch(:runit_autorestart_clear_interval) }
# runit paths
set :puma_runit_run_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'runit', 'run.erb')
set :puma_runit_finish_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'runit', 'finish.erb') # rubocop:disable Metrics/LineLength
set :puma_runit_control_q_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'runit', 'control', 'q.erb') # rubocop:disable Metrics/LineLength
set :puma_runit_log_run_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'runit', 'log', 'run.erb') # rubocop:disable Metrics/LineLength
# monit configuration
set :puma_monit_service_name, proc { "#{user_app_env_underscore}_puma" }
set :puma_monit_start_command, proc { "/bin/bash -c '[ ! -h #{runit_service_path(fetch(:puma_runit_service_name))}/run ] || /usr/bin/sv start #{runit_service_path(fetch(:puma_runit_service_name))}'" } # rubocop:disable Metrics/LineLength
set :puma_monit_stop_command, proc { "/usr/bin/sv -w 12 force-stop #{runit_service_path(fetch(:puma_runit_service_name))}" } # rubocop:disable Metrics/LineLength
set :puma_monit_memory_alert_threshold, '150.0 MB for 2 cycles'
set :puma_monit_memory_restart_threshold, '175.0 MB for 3 cycles'
set :puma_monit_cpu_alert_threshold, '90% for 2 cycles'
set :puma_monit_cpu_restart_threshold, '95% for 5 cycles'
set :puma_monit_config_template, File.join(Capistrano::Helpers::Puma::TemplatePaths.template_base_path, 'monit', 'puma.conf.erb') # rubocop:disable Metrics/LineLength
end
end
|
/* jshint indent: 2 */
module.exports = function (sequelize, DataTypes) {
return sequelize.define('user', {
id: {
type: DataTypes.INTEGER(10).UNSIGNED,
allowNull: false,
primaryKey: true,
autoIncrement: true,
field: 'id',
},
fbId: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'fbId',
},
name: {
type: DataTypes.STRING(45),
allowNull: true,
field: 'name',
},
authToken: {
type: DataTypes.STRING(255),
allowNull: false,
field: 'authToken',
},
thumbnailUrl: {
type: DataTypes.STRING(255),
allowNull: true,
field: 'thumbnailUrl',
},
createdAt: {
type: DataTypes.DATE,
allowNull: false,
defaultValue: sequelize.literal('CURRENT_TIMESTAMP'),
field: 'createdAt',
},
refreshAt: {
type: DataTypes.DATE,
allowNull: true,
field: 'refreshAt',
},
}, {
tableName: 'user',
timestamps: false,
});
};
|
<reponame>Clunt/shqz
Game.Map.XXCD = function() {};
Game.Map.XXCD.prototype = {
preload: function() {
this.game.load.tilemap('MAP_XXCD', 'client/data/map/xxcd/xxcd.json', null, Phaser.Tilemap.TILED_JSON);
this.game.load.image('MAP_XXCD', 'client/data/map/xxcd/xxcd.jpg');
this.game.load.image('MAP_XXCD_M', 'client/data/map/xxcd/xxcdm.png');
this.game.load.image('MAP_COLLISION', 'client/data/map/collision.png');
},
create: function() {
this.game.add.tileSprite(0, 0, 1408, 1024, 'MAP_XXCD');
this.game.custom.map = this.map = game.add.tilemap('MAP_XXCD');
this.map.addTilesetImage('MAP_COLLISION');
this.map.collision = this.map.createLayer('Collision');
this.map.collision.visible = false;
this.map.setCollision(1);
this.map.collision.resizeWorld();
this.game.physics.startSystem(Phaser.Physics.P2JS);
this.game.physics.p2.convertTilemap(this.map, this.map.collision);
this.game.physics.p2.restitution = 0;
this.game.world.setBounds(0, 0, 1408, 1024);
this.game.physics.p2.setBoundsToWorld(true, true, true, true, false);
Game.Map.create(this.game);
this.game.add.tileSprite(0, 0, 1408, 1024, 'MAP_XXCD_M');
},
update: function() {
Game.player.update();
},
render: function() {
game.debug.cameraInfo(game.camera, 10, 20);
game.debug.spriteCoords(Game.player.sprite, 10, Game.height - 45);
game.debug.body(Game.player.sprite);
}
}; |
<reponame>PongsakDev/hacktoberfest2021<gh_stars>1-10
//Difference with bubble sort, Here at any iteration of outerloop,
//the array to the left of the element will be sorted
func InsertionSort(numbers []int) []int{
for i :=0; i< len(numbers); i++{
for j:=0; j<i+1; j++{
//compare element present at index i with every element present
// left of it place it in right place so that array on the
//left remains sorted
if numbers[j] > numbers[i]{
intermediate := numbers[j]
numbers[j] = numbers[i]
numbers[i] = intermediate
}
}
fmt.Println(numbers)
}
return numbers
}
func main() {
a := []int{212, 12, 3001, 14, 501, 7800, 9932, 33, 45, 91, 99, 37, 102, 102, 104, 106, 109, 106}
fmt.Println(InsertionSort(a))
}
// Output:
// [212 12 3001 14 501 7800 9932 33 45 91 99 37 102 102 104 106 109 106]i=0
// [12 212 3001 14 501 7800 9932 33 45 91 99 37 102 102 104 106 109 106]i=1
// [12 212 3001 14 501 7800 9932 33 45 91 99 37 102 102 104 106 109 106]i=2
// [12 14 212 3001 501 7800 9932 33 45 91 99 37 102 102 104 106 109 106]i=3
// [12 14 212 501 3001 7800 9932 33 45 91 99 37 102 102 104 106 109 106]i=4
// [12 14 212 501 3001 7800 9932 33 45 91 99 37 102 102 104 106 109 106]...
// [12 14 212 501 3001 7800 9932 33 45 91 99 37 102 102 104 106 109 106]...
// [12 14 33 212 501 3001 7800 9932 45 91 99 37 102 102 104 106 109 106]...
// [12 14 33 45 212 501 3001 7800 9932 91 99 37 102 102 104 106 109 106]...
// [12 14 33 45 91 212 501 3001 7800 9932 99 37 102 102 104 106 109 106].so
// [12 14 33 45 91 99 212 501 3001 7800 9932 37 102 102 104 106 109 106]
// [12 14 33 37 45 91 99 212 501 3001 7800 9932 102 102 104 106 109 106]
// [12 14 33 37 45 91 99 102 212 501 3001 7800 9932 102 104 106 109 106]
// [12 14 33 37 45 91 99 102 102 212 501 3001 7800 9932 104 106 109 106].on
// [12 14 33 37 45 91 99 102 102 104 212 501 3001 7800 9932 106 109 106]
// [12 14 33 37 45 91 99 102 102 104 106 212 501 3001 7800 9932 109 106]
// [12 14 33 37 45 91 99 102 102 104 106 109 212 501 3001 7800 9932 106]
// [12 14 33 37 45 91 99 102 102 104 106 106 109 212 501 3001 7800 9932]
// [12 14 33 37 45 91 99 102 102 104 106 106 109 212 501 3001 7800 9932] |
<gh_stars>0
//
// MBECurrentRecordingSetProvider.h
// Echoes
//
// Created by <NAME> on 04.06.2015.
// Copyright (c) 2015 SO MANY APPS. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "MBERequest.h"
#import "MBERecord.h"
@interface MBECurrentRecordingSetProvider : NSObject
@property NSMutableDictionary *recordings;
+ (void)addRecord:(MBERecord *)record forRequest:(MBERequest *)request;
+ (MBERecord *)recordForRequest:(MBERequest *)request;
+ (BOOL)isRecordForRequest:(MBERequest *)request;
+ (void)save;
+ (instancetype)sharedInstance;
@end
|
def optimize_array(arr,target):
# Initialize the result list
result = []
# Iterate over the list and find the closest
# combination to the target
for x in arr:
if (target - x) > 0 and (target - x) in arr:
result.append(x)
result.append(target-x)
break
# If there are matching elements, return the result
if len(result) > 0:
return result
else:
return None
array = [4,7,1,1,9,7]
target = 15
res = optimize_array(array, target)
print(res) # [1,14] |
# List all Azure Firewalls for a given resource group
RESOURCE_GROUP="myresourcegroup"
az resource show --id /subscriptions/$SUBSCRIPTION_ID/resourceGroups/$RESOURCE_GROUP/providers/Microsoft.Network/azureFirewalls --api-version 2018-11-01 |
#!/bin/bash
if [ -f /.root_pw_set ]; then
echo "Root password already set!"
exit 0
fi
PASS=${ROOT_PASS:-$(pwgen -s 12 1)}
_word=$( [ ${ROOT_PASS} ] && echo "preset" || echo "random" )
echo "=> Setting a ${_word} password to the root user"
echo "root:$PASS" | chpasswd
echo "=> Done!"
touch /.root_pw_set
echo "========================================================================"
echo "You can now connect to this CentOS container via SSH using:"
echo ""
echo " ssh -p <port> root@<host>"
echo "and enter the root password '$PASS' when prompted"
echo ""
echo "Please remember to change the above password as soon as possible!"
echo "========================================================================"
|
#ifndef COMMUNICATION_H
#define COMMUNICATION_H
#include <QtCore>
#include <QtWidgets>
#include <QtSerialPort/QSerialPort>
#include <QtSerialPort/QSerialPortInfo>
#include <vector>
#include <thread>
#include "kfly_comm/kfly_comm.hpp"
class communication : public QObject
{
Q_OBJECT
private:
QSerialPort _serialport;
kfly_comm::codec _kfly_comm;
std::mutex _serialmutex;
std::vector<uint8_t> _transmitt_buffer;
QTimer _transmit_timer;
public:
explicit communication(QObject *parent = 0);
~communication();
bool openPort(const QString& portname, int baudrate);
void closePort();
void send(const std::vector<uint8_t>& message);
void subscribe(kfly_comm::commands cmd, unsigned int dt_ms);
void unsubscribe(kfly_comm::commands cmd);
void unsubscribe_all();
/* Functions registered to the KFly interface. */
void regPing(kfly_comm::datagrams::Ping);
void regSystemStrings(kfly_comm::datagrams::SystemStrings msg);
void regSystemStatus(kfly_comm::datagrams::SystemStatus msg);
void regRCInputSettings(kfly_comm::datagrams::RCInputSettings msg);
void regRCValues(kfly_comm::datagrams::RCValues msg);
void regRCOutputSettings(kfly_comm::datagrams::RCOutputSettings msg);
void regControlSignals(kfly_comm::datagrams::ControlSignals msg);
void regChannelMix(kfly_comm::datagrams::ChannelMix msg);
void regArmSettings(kfly_comm::datagrams::ArmSettings msg);
void regRateControllerData(kfly_comm::datagrams::RateControllerData msg);
void regAttitudeControllerData(kfly_comm::datagrams::AttitudeControllerData msg);
void regControllerLimits(kfly_comm::datagrams::ControllerLimits msg);
void regIMUCalibration(kfly_comm::datagrams::IMUCalibration msg);
void regRawIMUData(kfly_comm::datagrams::RawIMUData msg);
void regIMUData(kfly_comm::datagrams::IMUData msg);
void regControlFilterSettings(kfly_comm::datagrams::ControlFilterSettings msg);
private slots:
void parseSerialData();
void handleSerialError(QSerialPort::SerialPortError error);
void transmit_buffer();
signals:
void sigConnectionError(void);
void sigPing(void);
void sigSystemStrings(kfly_comm::datagrams::SystemStrings msg);
void sigSystemStatus(kfly_comm::datagrams::SystemStatus msg);
void sigRCInputSettings(kfly_comm::datagrams::RCInputSettings msg);
void sigRCValues(kfly_comm::datagrams::RCValues msg);
void sigRCOutputSettings(kfly_comm::datagrams::RCOutputSettings msg);
void sigControlSignals(kfly_comm::datagrams::ControlSignals msg);
void sigChannelMix(kfly_comm::datagrams::ChannelMix msg);
void sigArmSettings(kfly_comm::datagrams::ArmSettings msg);
void sigRateControllerData(kfly_comm::datagrams::RateControllerData msg);
void sigAttitudeControllerData(kfly_comm::datagrams::AttitudeControllerData msg);
void sigControllerLimits(kfly_comm::datagrams::ControllerLimits msg);
void sigIMUCalibration(kfly_comm::datagrams::IMUCalibration msg);
void sigRawIMUData(kfly_comm::datagrams::RawIMUData msg);
void sigIMUData(kfly_comm::datagrams::IMUData msg);
void sigControlFilterSettings(kfly_comm::datagrams::ControlFilterSettings msg);
};
#endif // COMMUNICATION_H
|
#!/bin/sh
###############################################################################
### FUNCTIONS ###
###############################################################################
# Creates a validator for a given node
# Take 1 arg the name of the node e.g poadnode0
createValidator() {
echo "Creating validator for node $1\n"
# Create the validator
docker exec -e MONIKER=$1 $1 /bin/sh -c 'poacli tx poa create-validator $(poacli keys show validator --bech val -a --keyring-backend test) $(poad tendermint show-validator) $(echo $MONIKER) identity website security@contact details -y --trust-node --from validator --chain-id cash --keyring-backend test'
sleep 5
}
# Votes for a perspecitve canidate
# Take 2 args the name of the node voting and the candidate node e.g poadnode0 poadnode1
voteForValidator() {
eval CANDIDATE=$(docker exec $2 /bin/sh -c "poacli keys show validator --bech val -a --keyring-backend test")
echo "Voter $1 is voting for candidate $2"
docker exec -e CANDIDATE=$CANDIDATE $1 /bin/sh -c 'poacli tx poa vote-validator $(echo $CANDIDATE) -y --trust-node --from validator --chain-id cash --keyring-backend test'
sleep 5
}
# Kicks for a perspecitve canidate
# Take 2 args the name of the node voting and the candidate node e.g poadnode0 poadnode1
kickValidator() {
eval CANDIDATE=$(docker exec $2 /bin/sh -c "poacli keys show validator --bech val -a --keyring-backend test")
echo "Votee $1 is voting to kick candidate $2"
docker exec -e CANDIDATE=$CANDIDATE $1 /bin/sh -c 'poacli tx poa kick-validator $(echo $CANDIDATE) -y --trust-node --from validator --chain-id cash --keyring-backend test'
sleep 5
}
###############################################################################
### STEP 1 ###
###############################################################################
# Import the exported key for the first node
docker exec poadnode0 /bin/sh -c "echo -e 'password1234\n' | poacli keys import validator validator --keyring-backend test"
## Create the validator
voteForValidator poadnode0 poadnode0
###############################################################################
### STEP 2 ###
###############################################################################
# Create the keys for each node
for var in poadnode1 poadnode2 poadnode3
do
echo "Creating key for node $var\n"
docker exec $var /bin/sh -c "poacli keys add validator --keyring-backend test"
done
## Send tokens to each validator
for node in poadnode1 poadnode2 poadnode3
do
eval ADDRESS=$(docker exec $node /bin/sh -c "poacli keys show validator -a --keyring-backend test")
echo "Sending tokens to $ADDRESS\n"
docker exec -e ADDRESS=$ADDRESS poadnode0 /bin/sh -c 'poacli tx send $(poacli keys show validator -a --keyring-backend test) $(echo $ADDRESS) 100000stake -y --trust-node --from validator --chain-id cash --keyring-backend test'
sleep 5
done
###############################################################################
### STEP 3 ###
###############################################################################
# Create validator for validator set
for var in poadnode1 poadnode2 poadnode3
do
createValidator $var
done
###############################################################################
### STEP 4 ###
###############################################################################
# Adding new validators to the set
# Vote for validator1 to join the set
voteForValidator poadnode0 poadnode1
# poadnode1 votes for poadnode0 to prove the node is in the consensus
voteForValidator poadnode1 poadnode0
# poadnode1 votes for poadnode1 to stay relevant in the consensus
voteForValidator poadnode1 poadnode1
# poadnode1 and poanode0 votes for poadnode2 to join the consensus
voteForValidator poadnode0 poadnode2
voteForValidator poadnode1 poadnode2
# poadnode2 votes for poadnode2 to stay relevant in the consensus
voteForValidator poadnode2 poadnode2
# poadnode2 votes for poadnode1 to prove the node is in the consensus
voteForValidator poadnode2 poadnode1
# poadnode2 votes for poadnode0 to prove the node is in the consensus
voteForValidator poadnode2 poadnode0
# kick poadnode2 out of the consensus
kickValidator poadnode0 poadnode2
kickValidator poadnode1 poadnode2
echo "POA Consensus started with 2 nodes :thumbs_up:\n"
## Verify valdiators are in the set by checking the proposer address of the block
#curl 0.0.0.0:26657/block?height?803 | jq '.result.block.header.proposer_address'
## Verify valdiators are in the set by checking the validator set
#curl -X GET "localhost/validators?height=50&page=1&per_page=30" -H "accept: application/json"
|
#!/bin/bash
if [[ "${DEPLOY:-true}" == "true" ]]; then
node ./output/deployment/deploySideChainNetwork.js $@
if [[ "$?" != "0" ]]; then
echo "Error while deploying side chain contracts to $ETHEREUM_NETWORK, exiting and skipping artifact management"
exit 1
fi
else
echo "Skipping deploy, set DEPLOY=true to do it"
fi
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See License.txt in the project root.
package com.microsoft.alm.plugin.external.commands;
import com.microsoft.alm.common.utils.ArgumentHelper;
import com.microsoft.alm.plugin.context.ServerContext;
import com.microsoft.alm.plugin.external.ToolRunner;
import com.microsoft.alm.plugin.external.models.ExtendedItemInfo;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* This command calls Info which returns local and server information about an item in the workspace.
* <p/>
* info [/recursive] [/version:<value>] <itemSpec>...
*/
public class InfoCommand extends Command<List<ExtendedItemInfo>> {
private final List<String> itemPaths;
private final String workingFolder;
public InfoCommand(final ServerContext context, final List<String> itemPaths) {
this(context, null, itemPaths);
}
public InfoCommand(final ServerContext context, final String workingFolder, final List<String> itemPaths) {
super("info", context);
ArgumentHelper.checkNotNullOrEmpty(itemPaths, "itemPaths");
this.itemPaths = itemPaths;
this.workingFolder = workingFolder;
}
@Override
public ToolRunner.ArgumentBuilder getArgumentBuilder() {
ToolRunner.ArgumentBuilder builder = super.getArgumentBuilder()
.setWorkingDirectory(workingFolder);
for (final String file : itemPaths) {
builder.add(file);
}
return builder;
}
/**
* Example of output
* Local information:
* Local path: D:\tmp\TFVC_1\build.xml
* Server path: $/TFVC_1/build.xml
* Changeset: 18
* Change: none
* Type: file
* Server information:
* Server path: $/TFVC_1/build.xml
* Changeset: 18
* Deletion ID: 0
* Lock: none
* Lock owner:
* Last modified: Nov 18, 2016 11:10:20 AM
* Type: file
* File type: windows-1252
* Size: 1385
*/
@Override
public List<ExtendedItemInfo> parseOutput(final String stdout, final String stderr) {
super.throwIfError(stderr);
final List<ExtendedItemInfo> itemInfos = new ArrayList<>(itemPaths.size());
final Map<String, String> propertyMap = new HashMap<String, String>(15);
final String[] output = getLines(stdout);
String prefix = "";
for (final String line : output) {
if (StringUtils.startsWithIgnoreCase(line, "local information:")) {
// switch to local mode
prefix = "";
if (!propertyMap.isEmpty()) {
itemInfos.add(getItemInfo(propertyMap));
}
propertyMap.clear();
} else if (StringUtils.startsWithIgnoreCase(line, "server information:")) {
// switch to server mode
prefix = "server ";
} else if (StringUtils.isNotBlank(line)) {
// add property
final int colonPos = line.indexOf(":");
if (colonPos > 0) {
final String key = prefix + line.substring(0, colonPos).trim().toLowerCase();
final String value = colonPos + 1 < line.length() ? line.substring(colonPos + 1).trim() : StringUtils.EMPTY;
propertyMap.put(key, value);
}
}
}
if (!propertyMap.isEmpty()) {
itemInfos.add(getItemInfo(propertyMap));
}
return itemInfos;
}
private ExtendedItemInfo getItemInfo(Map<String, String> propertyMap) {
return new ExtendedItemInfo(
propertyMap.get("server path"),
propertyMap.get("local path"),
propertyMap.get("server changeset"),
propertyMap.get("changeset"),
propertyMap.get("change"),
propertyMap.get("type"),
propertyMap.get("server lock"),
propertyMap.get("server lock owner"),
propertyMap.get("server last modified"),
propertyMap.get("server file type")
);
}
}
|
<gh_stars>0
/******************************************************************************
*
* Project: CPL - Common Portability Library
* Author: <NAME>, <EMAIL>
* Purpose: Progress function implementations.
*
******************************************************************************
* Copyright (c) 2013, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "cpl_progress.h"
#include "cpl_conv.h"
#include <cmath>
#include <algorithm>
CPL_CVSID("$Id$");
/************************************************************************/
/* GDALDummyProgress() */
/************************************************************************/
/**
* \brief Stub progress function.
*
* This is a stub (does nothing) implementation of the GDALProgressFunc()
* semantics. It is primarily useful for passing to functions that take
* a GDALProgressFunc() argument but for which the application does not want
* to use one of the other progress functions that actually do something.
*/
int CPL_STDCALL GDALDummyProgress( double /* dfComplete */ ,
const char * /* pszMessage */ ,
void * /* pData */ )
{
return TRUE;
}
/************************************************************************/
/* GDALScaledProgress() */
/************************************************************************/
typedef struct {
GDALProgressFunc pfnProgress;
void *pData;
double dfMin;
double dfMax;
} GDALScaledProgressInfo;
/**
* \brief Scaled progress transformer.
*
* This is the progress function that should be passed along with the
* callback data returned by GDALCreateScaledProgress().
*/
int CPL_STDCALL GDALScaledProgress( double dfComplete, const char *pszMessage,
void *pData )
{
GDALScaledProgressInfo *psInfo
= reinterpret_cast<GDALScaledProgressInfo *>( pData );
// Optimization if GDALCreateScaledProgress() provided with
// GDALDummyProgress.
if( psInfo == NULL )
return TRUE;
return psInfo->pfnProgress( dfComplete * (psInfo->dfMax - psInfo->dfMin)
+ psInfo->dfMin,
pszMessage, psInfo->pData );
}
/************************************************************************/
/* GDALCreateScaledProgress() */
/************************************************************************/
/**
* \brief Create scaled progress transformer.
*
* Sometimes when an operations wants to report progress it actually
* invokes several subprocesses which also take GDALProgressFunc()s,
* and it is desirable to map the progress of each sub operation into
* a portion of 0.0 to 1.0 progress of the overall process. The scaled
* progress function can be used for this.
*
* For each subsection a scaled progress function is created and
* instead of passing the overall progress func down to the sub functions,
* the GDALScaledProgress() function is passed instead.
*
* @param dfMin the value to which 0.0 in the sub operation is mapped.
* @param dfMax the value to which 1.0 is the sub operation is mapped.
* @param pfnProgress the overall progress function.
* @param pData the overall progress function callback data.
*
* @return pointer to pass as pProgressArg to sub functions. Should be freed
* with GDALDestroyScaledProgress().
*
* Example:
*
* \code
* int MyOperation( ..., GDALProgressFunc pfnProgress, void *pProgressData );
*
* {
* void *pScaledProgress;
*
* pScaledProgress = GDALCreateScaledProgress( 0.0, 0.5, pfnProgress,
* pProgressData );
* GDALDoLongSlowOperation( ..., GDALScaledProgress, pScaledProgress );
* GDALDestroyScaledProgress( pScaledProgress );
*
* pScaledProgress = GDALCreateScaledProgress( 0.5, 1.0, pfnProgress,
* pProgressData );
* GDALDoAnotherOperation( ..., GDALScaledProgress, pScaledProgress );
* GDALDestroyScaledProgress( pScaledProgress );
*
* return ...;
* }
* \endcode
*/
void * CPL_STDCALL GDALCreateScaledProgress( double dfMin, double dfMax,
GDALProgressFunc pfnProgress,
void * pData )
{
if( pfnProgress == NULL || pfnProgress == GDALDummyProgress )
return NULL;
GDALScaledProgressInfo *psInfo
= static_cast<GDALScaledProgressInfo *>(
CPLCalloc( sizeof(GDALScaledProgressInfo), 1 ) );
if( std::abs(dfMin-dfMax) < 0.0000001 )
dfMax = dfMin + 0.01;
psInfo->pData = pData;
psInfo->pfnProgress = pfnProgress;
psInfo->dfMin = dfMin;
psInfo->dfMax = dfMax;
return static_cast<void *>( psInfo );
}
/************************************************************************/
/* GDALDestroyScaledProgress() */
/************************************************************************/
/**
* \brief Cleanup scaled progress handle.
*
* This function cleans up the data associated with a scaled progress function
* as returned by GADLCreateScaledProgress().
*
* @param pData scaled progress handle returned by GDALCreateScaledProgress().
*/
void CPL_STDCALL GDALDestroyScaledProgress( void * pData )
{
CPLFree( pData );
}
/************************************************************************/
/* GDALTermProgress() */
/************************************************************************/
/**
* \brief Simple progress report to terminal.
*
* This progress reporter prints simple progress report to the
* terminal window. The progress report generally looks something like
* this:
\verbatim
0...10...20...30...40...50...60...70...80...90...100 - done.
\endverbatim
* Every 2.5% of progress another number or period is emitted. Note that
* GDALTermProgress() uses internal static data to keep track of the last
* percentage reported and will get confused if two terminal based progress
* reportings are active at the same time.
*
* The GDALTermProgress() function maintains an internal memory of the
* last percentage complete reported in a static variable, and this makes
* it unsuitable to have multiple GDALTermProgress()'s active either in a
* single thread or across multiple threads.
*
* @param dfComplete completion ratio from 0.0 to 1.0.
* @param pszMessage optional message.
* @param pProgressArg ignored callback data argument.
*
* @return Always returns TRUE indicating the process should continue.
*/
int CPL_STDCALL GDALTermProgress( double dfComplete,
const char * /* pszMessage */,
void * /* pProgressArg */ )
{
int nThisTick = std::min(40, std::max(0,
static_cast<int>(dfComplete * 40.0) ));
// Have we started a new progress run?
static int nLastTick = -1;
if( nThisTick < nLastTick && nLastTick >= 39 )
nLastTick = -1;
if( nThisTick <= nLastTick )
return TRUE;
while( nThisTick > nLastTick )
{
++nLastTick;
if( nLastTick % 4 == 0 )
fprintf( stdout, "%d", (nLastTick / 4) * 10 );
else
fprintf( stdout, "." );
}
if( nThisTick == 40 )
fprintf( stdout, " - done.\n" );
else
fflush( stdout );
return TRUE;
}
|
#!/bin/sh
export PORT=5000
exec java -server -cp target/notepad-0.0.1-SNAPSHOT.jar:"target/dependency/*" notepad.Launcher
|
class ScoreBoardsController < ApplicationController
# def index
# @scores = ScoreBoard.all
# render json: @scores
# end
def create
@score = ScoreBoard.create(name: params[:name], score: params[:score])
@scores = ScoreBoard.all.order(score: :desc).limit(5)
render json: @scores
end
end
|
#include <iostream>
using namespace std;
int main() {
ios::sync_with_stdio(false);
int n, k;
while (cin >> n >> k) {
if (n == -1 && k == -1)
break;
for (int i = 0; i < k; i++) {
if (n % 2 == 0)
n /= 2;
else
n = n * 3 + 1;
}
cout << n << endl;
}
return 0;
} |
"This is an example of HTML code
This is a paragraph." |
#! /bin/sh
node --heap-prof --cpu-prof kubeless.js &&
node servelogs.js |
import React from "react";
export const CustomColorInput: React.FC<InputColorProps> = (props) => {
const { name, form, push, remove } = props;
const [selectedColor, setSelectedColor] = React.useState("");
const handleColorChange = (e: any) => {
const val = e.target.value;
setSelectedColor(val);
};
const handleAddSelectedColor = () => {
if (!form.values[name].includes(selectedColor)) {
push(selectedColor);
setSelectedColor("");
}
};
return (
<div className="d-flex">
<div className="input-group product-form-field">
<div className="d-flex">
{form.touched[name] && form.errors[name] ? (
<span className="label-input label-error">{form.errors[name]}</span>
) : (
<label className="label-input" htmlFor={name}>
Available Colors
</label>
)}
{selectedColor && (
<>
<div className="color-item" style={{ background: selectedColor }} />
<h4
className="text-link"
onClick={handleAddSelectedColor}
style={{ textDecoration: "underline" }}
role="presentation">
<i className="fa fa-check" />
Add Selected Color
</h4>
</>
)}
</div>
<input name={name} type="color" onChange={handleColorChange} id={name} />
</div>
<div className="product-form-field">
<span className="d-block padding-s">Selected Color(s)</span>
<div className="color-chooser">
{form.values[name]?.map((color, index) => (
<div
key={color}
onClick={() => remove(index)}
className="color-item color-item-deletable"
title={`Remove ${color}`}
style={{ backgroundColor: color }}
role="presentation"
/>
))}
</div>
</div>
</div>
);
};
type InputColorProps = {
name: string;
form: {
values: Record<string, string[]>;
touched: Record<string, any>;
errors: Record<string, any>;
};
push: (path: string) => void;
remove: (index: number) => void;
};
|
//
// Created by matthew on 23/11/2020.
//
#include <iostream>
#include "../../../include/parser/old/Parser.h"
Parser::Parser(const Tokeniser &tokeniser)
: tokeniser(tokeniser) {
}
std::unique_ptr<ParseTree> Parser::parse(std::unique_ptr<ParseTree> &&tree) {
fileIndex = tree->addFile(tokeniser.sourceFileName());
while (tokeniser.nextToken() != Token::EndOfFile) {
switch (tokeniser.currentTokenType()) {
case Token::InvalidToken:
return nullptr;
case Token::Infix:
// If the source defines an infix function or type, jump to the next token and see if
// it is a function or a type. Otherwise return an error.
switch (tokeniser.nextToken()) {
case Token::FuncDecl:
if (!(tree = parseInfixFunction(std::move(tree)))) {
return nullptr;
}
break;
case Token::TypeDecl:
if (!(tree = parseInfixType(std::move(tree)))) {
return nullptr;
}
break;
default:
logError("Invalid use of 'infix' specifier.");
return nullptr;
}
break;
case Token::FuncDecl:
if (!(tree = parsePrefixFunction(std::move(tree)))) {
return nullptr;
}
break;
case Token::ValueDecl:
if (!(tree = parseValue(std::move(tree)))) {
return nullptr;
}
break;
case Token::TypeDecl:
if (!(tree = parsePrefixType(std::move(tree)))) {
return nullptr;
}
break;
case Token::OpenParenthesis:
case Token::Identifier:
// TODO: Catch empty list and unit
if (!(tree = parseDefinition(std::move(tree)))) {
return nullptr;
}
break;
case Token::EndOfFile:
case Token::EndDecl:
break;
default:
logError("Invalid top level token '" + tokeniser.currentToken() + "'.");
return nullptr;
}
}
return std::move(tree);
}
std::unique_ptr<ParseTree> Parser::parsePrefixFunction(std::unique_ptr<ParseTree> tree) {
// Syntax:
// func name: a -> b;
// The next token should be the name.
std::string functionName;
if (tokeniser.nextToken() == Token::Identifier) {
functionName = tokeniser.currentToken();
} else {
logError("'func' keyword should be followed by function identifier.");
return nullptr;
}
// Next check if the function already exists. If it does, this is a redefinition and thus invalid
if (tree->core().builtinFunctionExists(functionName)) {
logError("Redefinition of core function '" + functionName + "'.");
return nullptr;
}
if (tree->functionExists(functionName)) {
logError("Redefinition of function '" + functionName + "'.");
return nullptr;
}
// The next token should be a colon (ExprTypeSpecifier) to specify the type of the function
if (tokeniser.nextToken() != Token::ExprTypeSpecifier) {
logError("Function name should be followed by ':' and then function type (e.g. 'func " +
functionName + ": a -> b;').");
return nullptr;
}
// Consume the ':' token
tokeniser.nextToken();
std::unique_ptr<TypeInstanceASTNode> funcType = parseTypeInstance(tree);
if (!funcType) {
return nullptr;
}
if (funcType->typeUsage() != TypeUsage::Function) {
logError("Function must have function type (a -> b)");
return nullptr;
}
if (tokeniser.currentTokenType() != Token::EndDecl) {
logError("Function declaration should end with ';'.");
return nullptr;
}
tree->addFunctionDeclaration(std::make_unique<PrefixFunctionDeclASTNode>(
tokeniser.lineNumber(), fileIndex, functionName, std::move(funcType))
);
return std::move(tree);
}
std::unique_ptr<ParseTree> Parser::parseInfixFunction(std::unique_ptr<ParseTree> tree) {
// Syntax:
// infix func name: a -> b;
// The next token should be the name.
std::string functionName;
if (tokeniser.nextToken() == Token::Identifier) {
functionName = tokeniser.currentToken();
} else {
logError("'func' keyword should be followed by function identifier.");
return nullptr;
}
// Next check if the function already exists. If it does, this is a redefinition and thus invalid
if (tree->core().builtinFunctionExists(functionName)) {
logError("Redefinition of core function '" + functionName + "'.");
return nullptr;
}
if (tree->functionExists(functionName)) {
logError("Redefinition of function '" + functionName + "'.");
return nullptr;
}
// The next token should be a colon (ExprTypeSpecifier) to specify the type of the function
if (tokeniser.nextToken() != Token::ExprTypeSpecifier) {
logError("Function name should be followed by ':' and then function type (e.g. 'infix func " +
functionName + ": a -> b -> c;').");
return nullptr;
}
// Consume the ':' token
tokeniser.nextToken();
std::unique_ptr<TypeInstanceASTNode> funcType = parseTypeInstance(tree);
if (!funcType) {
return nullptr;
}
if (funcType->typeUsage() != TypeUsage::Function) {
logError("Infix function must have function-to-function type (a -> b -> c)");
return nullptr;
}
if (dynamic_cast<FunctionTypeInstanceASTNode *>(funcType.get())->right()->typeUsage() != TypeUsage::Function) {
logError("Infix function must have function-to-function type (a -> b -> c)");
return nullptr;
}
if (tokeniser.currentTokenType() != Token::EndDecl) {
logError("Infix function declaration should end with ';'.");
return nullptr;
}
// TODO: Customisable precedence and associativity?
tree->addFunctionDeclaration(std::make_unique<InfixFunctionDeclASTNode>(
tokeniser.lineNumber(), fileIndex, functionName, std::move(funcType), Associativity::Left
));
return std::move(tree);
}
std::unique_ptr<ParseTree> Parser::parseValue(std::unique_ptr<ParseTree> tree) {
// Syntax:
// value name: a;
// The next token should be a name
if (tokeniser.nextToken() != Token::Identifier) {
logError("'value' keyword should be followed by value identifier.");
return nullptr;
}
std::string valueName = tokeniser.currentToken();
if (tree->core().builtinFunctionExists(valueName)) {
logError("Redefinition of core function '" + valueName + "'.");
return nullptr;
}
if (tree->functionExists(valueName)) {
logError("Redefinition of function '" + valueName + "'.");
return nullptr;
}
// The next token should be a colon
if (tokeniser.nextToken() != Token::ExprTypeSpecifier) {
logError("Value name should be followed by ':' and then value type (e.g. 'value " +
valueName + ": a;').");
return nullptr;
}
// Consume the ':' token
tokeniser.nextToken();
std::unique_ptr<TypeInstanceASTNode> valueType = parseTypeInstance(tree);
if (!valueType) {
return nullptr;
}
tree->addFunctionDeclaration(std::make_unique<ValueFunctionDeclASTNode>(
tokeniser.lineNumber(), fileIndex, valueName, std::move(valueType)
));
return std::move(tree);
}
std::unique_ptr<ParseTree> Parser::parsePrefixType(std::unique_ptr<ParseTree> tree) {
// Syntax:
// type Name [a0 [a1 ...]] [::= Cons0 [| Cons1 [| ...]]];
// The next token should be the type name
if (tokeniser.nextToken() != Token::Identifier) {
logError("'type' keyword should be followed by type name.");
return nullptr;
}
std::string typeName = tokeniser.currentToken();
if (tree->core().builtinTypeExists(typeName)) {
logError("Redefinition of core type '" + typeName + "'.");
}
if (tree->typeExists(typeName)) {
logError("Redefinition of type '" + typeName + "'.");
return nullptr;
}
// Next look for 0 or more type variables
std::unordered_set<std::string> typeParameters;
while (tokeniser.nextToken() != Token::TypeConstructorSpecifier) {
switch (tokeniser.currentTokenType()) {
case Token::Identifier:
break;
case Token::EndDecl:
// If there are no constructors, then we don't expect to see a '::=' token.
// Construct the new prefix type and pass it to the tree
tree->addTypeDeclaration(std::make_unique<PrefixTypeDeclASTNode>(
tokeniser.lineNumber(), fileIndex, typeName,
std::vector<std::string>(typeParameters.begin(), typeParameters.end()))
);
return std::move(tree);
default:
logError("Unexpected token '" + tokeniser.currentToken() +
"' in type constructor declaration.\n"
"Type name should be followed by type variables and optionally "
"type constructor list (e.g. type " + typeName + " [a [b [c ...]]] [::= ...]).");
return nullptr;
}
if (typeParameters.contains(tokeniser.currentToken())) {
logError("Duplicate type variable '" + tokeniser.currentToken() + "' in type constructor.");
return nullptr;
}
if (tree->core().builtinTypeExists(tokeniser.currentToken()) || tree->typeExists(tokeniser.currentToken())) {
logError("Type variable '" + tokeniser.currentToken() +
"' is already a type name.\n"
"Consider using uppercase names for types and lowercase for type variables.");
return nullptr;
}
typeParameters.insert(tokeniser.currentToken());
}
// Add the type declaration to the parse tree - this allows for recursive types
const std::unique_ptr<TypeDeclASTNode> &typeNode = tree->addTypeDeclaration(std::make_unique<PrefixTypeDeclASTNode>(
tokeniser.lineNumber(), fileIndex, typeName,
std::vector<std::string>(typeParameters.begin(), typeParameters.end())
));
// Skip over '::=' token
tokeniser.nextToken();
// Now we are looking for constructors of which there should be one or more
return parseDataConstructors(std::move(tree), typeNode, typeParameters);
}
std::unique_ptr<ParseTree> Parser::parseInfixType(std::unique_ptr<ParseTree> tree) {
// Syntax:
// infix type a Name b [::= Cons0 [| Cons1 [| ...]]];
// The next token should be the lhs type variable
if (tokeniser.nextToken() != Token::Identifier) {
logError("'infix type' should be followed by a type variable.");
return nullptr;
}
if (tree->core().builtinTypeExists(tokeniser.currentToken()) || tree->typeExists(tokeniser.currentToken())) {
logError("Type variable '" + tokeniser.currentToken() +
"' is already a type name.\n "
"Consider using uppercase names for types and lowercase for type variables.");
return nullptr;
}
std::string lhs = tokeniser.currentToken();
// The next token should be the type name
if (tokeniser.nextToken() != Token::Identifier) {
logError("'infix type " + lhs + "' should be followed by a type name.");
return nullptr;
}
std::string typeName = tokeniser.currentToken();
// The next token should be the rhs type variable
if (tokeniser.nextToken() != Token::Identifier) {
logError("'infix type " + lhs + " " + typeName + "' should be followed by a type variable.");
return nullptr;
}
if (tree->core().builtinTypeExists(tokeniser.currentToken()) || tree->typeExists(tokeniser.currentToken())) {
logError("Type variable '" + tokeniser.currentToken() +
"' is already a type name.\n "
"Consider using uppercase names for types and lowercase for type variables.");
return nullptr;
}
std::string rhs = tokeniser.currentToken();
if (lhs == rhs) {
logError("Left hand type variable cannot have the same name as right hand type variable.");
return nullptr;
}
if (tree->core().builtinTypeExists(typeName)) {
logError("Redefinition of core type '" + typeName + "'.");
return nullptr;
}
if (tree->typeExists(typeName)) {
logError("Redefinition of type '" + typeName + "'.");
return nullptr;
}
// Add the type declaration to the parse tree - this allows for recursive types
const std::unique_ptr<TypeDeclASTNode> &typeNode =
tree->addTypeDeclaration(std::make_unique<InfixTypeDeclASTNode>(
tokeniser.lineNumber(), fileIndex, typeName, lhs, rhs
));
switch (tokeniser.nextToken()) {
case Token::EndDecl:
return std::move(tree);
case Token::TypeConstructorSpecifier:
break;
default:
logError("Expected ';' or '::=' after type declaration.");
return nullptr;
}
// Skip over '::=' token
tokeniser.nextToken();
// Now we are looking for constructors of which there should be one or more
return parseDataConstructors(std::move(tree), typeNode, {lhs, rhs});
}
std::unique_ptr<ParseTree> Parser::parseDataConstructors(std::unique_ptr<ParseTree> tree,
const std::unique_ptr<TypeDeclASTNode> &type,
const std::unordered_set<std::string> &typeParameters) {
do {
switch (tokeniser.currentTokenType()) {
case Token::Infix: {
// If the constructor is infix we expect 'infix l Cons r'.
// Skip the token
tokeniser.nextToken();
// Next we expect a type instance. This will either be a polymorphic type from the type constructor,
// a preexisting type, or a nested type which we can then outsource to the type parser.
std::unique_ptr<TypeInstanceASTNode> lhs = parseConstructorTypeInstance(tree, typeParameters);
if (!lhs) {
return nullptr;
}
// The current token should now be a closing parenthesis, or the type we have just put into
// lhs, so skip over and the next should be the constructor name.
if (tokeniser.currentTokenType() != Token::Identifier) {
logError("Expected constructor name, but found '" + tokeniser.currentToken() + "'.");
return nullptr;
}
std::string constructorName = tokeniser.currentToken();
if (tree->constructorExists(constructorName)) {
logError("Re-used constructor name '" + constructorName + "' in type '" + type->typeName() + "'.");
return nullptr;
}
// Skip to the next token
tokeniser.nextToken();
// Finally, we get the rhs with the same procedure as above
std::unique_ptr<TypeInstanceASTNode> rhs = parseConstructorTypeInstance(tree, typeParameters);
if (!rhs) {
return nullptr;
}
// Check that the token is either the '|' or ';' token
switch (tokeniser.currentTokenType()) {
case Token::TypeUnionSplitter:
// Skip over the '|'
tokeniser.nextToken();
case Token::EndDecl:
break;
default:
logError("Unexpected token '" + tokeniser.currentToken() + "' after infix type declaration.");
return nullptr;
}
tree->addDataConstructor(type, std::make_unique<InfixDataConstructorASTNode>(
tokeniser.lineNumber(), fileIndex, type, constructorName, std::move(lhs), std::move(rhs)
));
break;
}
case Token::Identifier: {
std::string constructorName = tokeniser.currentToken();
std::vector<std::unique_ptr<TypeInstanceASTNode>> params;
tokeniser.nextToken();
while (tokeniser.currentTokenType() != Token::TypeUnionSplitter &&
tokeniser.currentTokenType() != Token::EndDecl) {
std::unique_ptr<TypeInstanceASTNode> param = parseConstructorTypeInstance(tree, typeParameters);
if (!param) {
return nullptr;
}
params.push_back(std::move(param));
if (tokeniser.currentTokenType() == Token::TypeUnionSplitter ||
tokeniser.currentTokenType() == Token::EndDecl) {
break;
}
tokeniser.nextToken();
}
tree->addDataConstructor(type, std::make_unique<PrefixDataConstructorASTNode>(
tokeniser.lineNumber(), fileIndex, type, constructorName, std::move(params)
));
if (tokeniser.currentTokenType() == Token::EndDecl) {
return std::move(tree);
}
// Skip over '|' token
tokeniser.nextToken();
}
case Token::EndDecl:
break;
default:
logError("Unexpected token '" + tokeniser.currentToken() +
"' in data constructor.\n"
"Constructors should be either 'Cons [a [b ...]]' or 'infix l Cons r'.");
return nullptr;
}
} while (tokeniser.currentTokenType() != Token::EndDecl);
return std::move(tree);
}
std::unique_ptr<ParseTree> Parser::parseDefinition(std::unique_ptr<ParseTree> tree) {
// Definitions come in three forms: values, prefix functions and infix functions.
// Prefix function syntax:
// f [a [b ...]] => expr;
// Infix function syntax:
// a f b => expr;
// Value function syntax:
// f = expr;
// (note that a, b, ... are patterns)
// First, we need to work out which of the above three this definition is.
std::string functionName;
FunctionUsage usage;
// Create a map of binders to avoid duplication of names and for parsing the body
BinderMap usedBinders;
// We know that the token is either an identifier or bracket because that is the only
// time "parse" calls this function.
if (tokeniser.currentTokenType() == Token::Identifier) {
if (tree->functionExists(tokeniser.currentToken())) {
functionName = tokeniser.currentToken();
const std::unique_ptr<FunctionDeclASTNode> &decl = tree->getFuncByName(functionName)->decl();
usage = decl->funcUsage();
if (usage == FunctionUsage::Infix) {
logError(
"Infix function definitions should not start with function name. (e.g. a " + functionName +
" b => expr;).");
return nullptr;
} else if (usage == FunctionUsage::Value) {
if (tokeniser.nextToken() != Token::ValueBodySpecifier) {
logError("Value definition should be followed by '=' and have no parameters.");
return nullptr;
}
tokeniser.nextToken();
std::unique_ptr<ExpressionASTNode> body = parseExpression(tree, {}, 0);
if (!body) {
return nullptr;
}
tree->addFunctionImplementation(functionName, std::make_unique<PrefixFunctionImplASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(body)
));
// If the next token is not a ';', there as an error
if (tokeniser.currentTokenType() != Token::EndDecl) {
return nullptr;
}
return std::move(tree);
}
// Otherwise, this is a prefix function definition
std::vector<std::unique_ptr<PatternASTNode>> patterns;
// Skip over name
tokeniser.nextToken();
while (tokeniser.currentTokenType() != Token::FuncBodySpecifier) {
std::unique_ptr<PatternASTNode> pattern = parsePattern(tree, usedBinders);
if (!pattern) {
return nullptr;
}
patterns.push_back(std::move(pattern));
}
// If too many arguments are applied to the function
size_t maxArgs = decl->maxArgs();
if (maxArgs < patterns.size()) {
logError("Too many arguments for function '" + functionName + "' (expected <= " +
std::to_string(maxArgs) + ").");
return nullptr;
}
// Skip over '=>'
tokeniser.nextToken();
std::unique_ptr<ExpressionASTNode> body = parseExpression(tree, usedBinders, 0);
if (!body) {
return nullptr;
}
tree->addFunctionImplementation(functionName, std::make_unique<PrefixFunctionImplASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(body), std::move(patterns)
));
// If the next token is not a ';', there as an error
if (tokeniser.currentTokenType() != Token::EndDecl) {
return nullptr;
}
return std::move(tree);
}
}
// If we have fallen through to this point, the function is either infix or erroneous.
std::unique_ptr<PatternASTNode> lhs = parsePattern(tree, usedBinders);
if (!lhs) {
return nullptr;
}
// The next token should now be an identifier
switch (tokeniser.currentTokenType()) {
case Token::ValueBodySpecifier:
// If we mistakenly parsed a missing value name as a pattern
if (lhs->patternUsage() == PatternUsage::Variable) {
logError(
"Unrecognised value name '" + dynamic_cast<VariablePatternASTNode *>(lhs.get())->name() + "'.");
} else {
logError("Cannot assign expression to data constructor.");
}
return nullptr;
case Token::FuncBodySpecifier:
// If we mistakenly parsed a missing function name as a pattern
if (lhs->patternUsage() == PatternUsage::Variable) {
logError("Unrecognised function name '" + dynamic_cast<VariablePatternASTNode *>(lhs.get())->name() +
"'.");
} else {
logError("Cannot assign expression to data constructor.");
}
return nullptr;
case Token::Identifier:
break;
default:
logError("Expected infix function identifier after argument.");
return nullptr;
}
// Next, we check the function exists
if (!tree->functionExists(tokeniser.currentToken())) {
if (lhs->patternUsage() == PatternUsage::Variable) {
logError("Unrecognised function name '" + dynamic_cast<VariablePatternASTNode *>(lhs.get())->name() +
"'.");
} else {
logError("Unrecognised infix function name '" + tokeniser.currentToken() + "'.");
}
return nullptr;
}
functionName = tokeniser.currentToken();
// Skip to next token and check for func body error
if (tokeniser.nextToken() == Token::FuncBodySpecifier) {
logError("Expected right hand side argument after infix function name before '=>' (e.g. a " + functionName +
" b => expr;).");
return nullptr;
}
std::unique_ptr<PatternASTNode> rhs = parsePattern(tree, usedBinders);
if (!rhs) {
return nullptr;
}
if (tokeniser.currentTokenType() != Token::FuncBodySpecifier) {
logError("Unexpected token '" + tokeniser.currentToken() + "'. Expected '=>'.");
return nullptr;
}
// Skip over '=>'
tokeniser.nextToken();
std::unique_ptr<ExpressionASTNode> body = parseExpression(tree, usedBinders, 0);
if (!body) {
return nullptr;
}
tree->addFunctionImplementation(functionName, std::make_unique<InfixFunctionImplASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(body), std::move(lhs), std::move(rhs)
));
// If the next token is not a ';', there as an error
if (tokeniser.currentTokenType() != Token::EndDecl) {
return nullptr;
}
return std::move(tree);
}
std::unique_ptr<TypeInstanceASTNode> Parser::parseTypeInstance(const std::unique_ptr<ParseTree> &tree) {
return parseTypeInstance(tree, false, std::unordered_set<std::string>());
}
std::unique_ptr<TypeInstanceASTNode>
Parser::parseTypeInstance(const std::unique_ptr<ParseTree> &tree, const std::unordered_set<std::string> &polyTypes) {
return parseTypeInstance(tree, true, polyTypes);
}
std::unique_ptr<TypeInstanceASTNode> Parser::parseTypeInstance(const std::unique_ptr<ParseTree> &tree,
bool checkPolyTypes,
const std::unordered_set<std::string> &polyTypes) {
std::unique_ptr<TypeInstanceASTNode> lhs;
switch (tokeniser.currentTokenType()) {
case Token::Identifier:
if (tree->core().primitiveExists(tokeniser.currentToken())) {
lhs = std::make_unique<PrimitiveTypeInstanceASTNode>(
tree->core().getPrimitiveType(tokeniser.currentToken())
);
} else if (tree->core().builtinTypeExists(tokeniser.currentToken()) ||
tree->typeExists(tokeniser.currentToken())) {
lhs = std::make_unique<PrefixTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
} else {
if (checkPolyTypes && !polyTypes.contains(tokeniser.currentToken())) {
logError("Unrecognised type '" + tokeniser.currentToken() + "'.");
return nullptr;
}
lhs = std::make_unique<PolymorphicTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
}
break;
case Token::OpenParenthesis:
tokeniser.nextToken();
lhs = parseTypeInstance(tree);
if (!lhs) {
return nullptr;
}
if (tokeniser.currentTokenType() != Token::CloseParenthesis) {
logError("Expected closing ')' after nested type.");
return nullptr;
}
break;
case Token::FuncType:
logError("Function type is an infix type constructor (a -> b).");
return nullptr;
case Token::EndDecl:
logError("Expected type before ';'.");
return nullptr;
case Token::InvalidToken:
return nullptr;
default:
// TODO: Handle list and unit types
logError("Invalid token '" + tokeniser.currentToken() + "' in type instance.");
return nullptr;
}
// Skip to next token
tokeniser.nextToken();
while (true) {
switch (tokeniser.currentTokenType()) {
case Token::Identifier:
if (tree->core().builtinTypeExists(tokeniser.currentToken()) ||
tree->typeExists(tokeniser.currentToken())) {
// TODO: Fix this for primitive types
// If the identifier is not an infix type constructor, bind the new type
if (tree->getTypeByName(tokeniser.currentToken())->typeUsage() != TypeUsage::Infix) {
switch (lhs->typeUsage()) {
case TypeUsage::Prefix:
dynamic_cast<PrefixTypeInstanceASTNode *>(lhs.get())->bindParameter(
std::make_unique<PrefixTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
));
break;
case TypeUsage::Infix:
case TypeUsage::Function:
logError("Cannot bind type to infix type in left position.");
return nullptr;
case TypeUsage::Polymorphic:
// TODO: This can be allowed if var is a type constructor!
logError("Cannot bind type to polymorphic type variable.");
return nullptr;
case TypeUsage::Primitive:
logError("Cannot bind type to primitive type.");
return nullptr;
}
} else {
// If the token is an infix type constructor, create an infix type
std::unique_ptr<InfixTypeInstanceASTNode> infixType = std::make_unique<InfixTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
infixType->bindLeft(std::move(lhs));
// Consume the infix type name token
tokeniser.nextToken();
// Get the right hand side by recursively parsing the type
std::unique_ptr<TypeInstanceASTNode> rhs = parseTypeInstance(tree);
// If this failed, return null
if (!rhs) {
return nullptr;
}
// otherwise, bind the rhs and return the infix type
infixType->bindRight(std::move(rhs));
return infixType;
}
} else {
switch (lhs->typeUsage()) {
case TypeUsage::Prefix:
if (checkPolyTypes && !polyTypes.contains(tokeniser.currentToken())) {
logError("Unrecognised type '" + tokeniser.currentToken() + "'.");
return nullptr;
}
dynamic_cast<PrefixTypeInstanceASTNode *>(lhs.get())->bindParameter(
std::make_unique<PolymorphicTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
));
break;
case TypeUsage::Infix:
case TypeUsage::Function:
logError("Cannot bind polymorphic type variable to infix type in left position.");
return nullptr;
case TypeUsage::Polymorphic:
logError("Cannot bind polymorphic type variable to polymorphic type variable.");
return nullptr;
case TypeUsage::Primitive:
logError("Cannot bind polymorphic type variable to primitive type.");
return nullptr;
}
}
break;
case Token::FuncType: {
// Here, we are handling an infix function type, but it is very similar to other infix types
std::unique_ptr<FunctionTypeInstanceASTNode> funcType = std::make_unique<FunctionTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
funcType->bindLeft(std::move(lhs));
// Consume the '->' token
tokeniser.nextToken();
// Get the rhs recursively
std::unique_ptr<TypeInstanceASTNode> rhs = parseTypeInstance(tree);
// Propagate failure
if (!rhs) {
return nullptr;
}
// Bind the rhs and return the function type
funcType->bindRight(std::move(rhs));
return funcType;
}
case Token::CloseParenthesis:
case Token::EndDecl:
case Token::TypeUnionSplitter:
return std::move(lhs);
case Token::InvalidToken:
return nullptr;
default:
logError("Invalid token '" + tokeniser.currentToken() + "' in type instance.");
return nullptr;
}
// Skip the current token
tokeniser.nextToken();
}
}
std::unique_ptr<TypeInstanceASTNode> Parser::parseConstructorTypeInstance(const std::unique_ptr<ParseTree> &tree,
const std::unordered_set<std::string> &polyTypes) {
std::unique_ptr<TypeInstanceASTNode> type;
switch (tokeniser.currentTokenType()) {
case Token::OpenParenthesis:
type = parseTypeInstance(tree, polyTypes);
if (!type) {
return nullptr;
}
break;
case Token::Identifier:
if (tree->core().primitiveExists(tokeniser.currentToken())) {
type = std::make_unique<PrimitiveTypeInstanceASTNode>(
tree->core().getPrimitiveType(tokeniser.currentToken())
);
tokeniser.nextToken();
break;
}
if (tree->typeExists(tokeniser.currentToken())) {
type = std::make_unique<PrefixTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
tokeniser.nextToken();
break;
}
if (polyTypes.contains(tokeniser.currentToken())) {
type = std::make_unique<PolymorphicTypeInstanceASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
tokeniser.nextToken();
break;
}
logError("Unrecognised type '" + tokeniser.currentToken() +
"'. Parameters for data constructors must name types or reference type variables.");
return nullptr;
default:
logError("Invalid token '" + tokeniser.currentToken() + "' in data constructor.");
return nullptr;
}
return std::move(type);
}
std::unique_ptr<PatternASTNode>
Parser::parsePattern(const std::unique_ptr<ParseTree> &tree, BinderMap &usedBinders) {
switch (tokeniser.currentTokenType()) {
case Token::OpenParenthesis:
// Skip over parenthesis
tokeniser.nextToken();
// TODO: Handle infix constructors
if (tokeniser.currentTokenType() != Token::Identifier) {
logError("Unexpected token '" + tokeniser.currentToken() + "'. Expected identifier in nested pattern.");
return nullptr;
}
if (tree->constructorExists(tokeniser.currentToken())) {
std::string constructorName = tokeniser.currentToken();
std::vector<std::unique_ptr<PatternASTNode>> subPatterns;
tokeniser.nextToken();
while (tokeniser.currentTokenType() != Token::CloseParenthesis) {
std::unique_ptr<PatternASTNode> pattern = parsePattern(tree, usedBinders);
if (!pattern) {
return nullptr;
}
subPatterns.push_back(std::move(pattern));
}
size_t expectedArgs = tree->getConstructorByName(constructorName)->args();
if (subPatterns.size() != expectedArgs) {
logError("Incorrect number of arguments for constructor '" + constructorName + "' (expected " +
std::to_string(expectedArgs) + ").");
return nullptr;
}
// Skip over close parenthesis
tokeniser.nextToken();
return std::make_unique<ConstructorPatternASTNode>(
tokeniser.lineNumber(), fileIndex, constructorName, std::move(subPatterns)
);
} else {
if (usedBinders.contains(tokeniser.currentToken())) {
logError("Duplicate variable name '" + tokeniser.currentToken() + "' used in pattern expression.");
return nullptr;
}
std::unique_ptr<VariablePatternASTNode> binder = std::make_unique<VariablePatternASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
// Check that the next token is a close parenthesis
if (tokeniser.nextToken() != Token::CloseParenthesis) {
logError("Unrecognised data constructor '" + binder->name() + "'.");
return nullptr;
}
// Skip over the close parenthesis
tokeniser.nextToken();
usedBinders[binder->name()] = binder.get();
return std::move(binder);
}
case Token::Identifier:
if (tree->constructorExists(tokeniser.currentToken())) {
std::string constructor = tokeniser.currentToken();
// Skip over token
tokeniser.nextToken();
return std::make_unique<ConstructorPatternASTNode>(tokeniser.lineNumber(), fileIndex, constructor);
} else {
if (usedBinders.contains(tokeniser.currentToken())) {
logError("Duplicate variable name '" + tokeniser.currentToken() + "' used in pattern expression.");
return nullptr;
}
// std::string binder = tokeniser.currentToken();
std::unique_ptr<VariablePatternASTNode> binder = std::make_unique<VariablePatternASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
// Skip to next token
tokeniser.nextToken();
usedBinders[binder->name()] = binder.get();
return std::move(binder);
}
default:
logError("Unexpected token '" + tokeniser.currentToken() + "' in pattern.");
return nullptr;
}
}
std::unique_ptr<ExpressionASTNode>
Parser::parseExpression(const std::unique_ptr<ParseTree> &tree, const BinderMap &binders,
int currentPrecedence) {
// We want to build up an expression tree going left to right
std::unique_ptr<ExpressionASTNode> expr;
while (true) {
switch (tokeniser.currentTokenType()) {
// Handle nested expressions
case Token::OpenParenthesis: {
// Skip over '('
tokeniser.nextToken();
// Parse sub expression with precedence 0 (reset)
std::unique_ptr<ExpressionASTNode> nested = parseExpression(tree, binders, 0);
if (!nested) {
return nullptr;
}
if (tokeniser.currentTokenType() != Token::CloseParenthesis) {
logError("Unmatched '(' detected.");
return nullptr;
}
tokeniser.nextToken();
if (!expr) {
expr = std::move(nested);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(nested)
);
}
break;
}
// Handle unravelling from the recursion
case Token::CloseParenthesis:
case Token::InSpecifier:
case Token::EndDecl:
return std::move(expr);
case Token::Identifier:
// First, check if this is in the binders
// Next, check if it is a known function, first in Core then in the tree
// Then, check if it is a type constructor
// Finally, fail
if (binders.contains(tokeniser.currentToken())) {
std::unique_ptr<VariableASTNode> varExpr = std::make_unique<VariableASTNode>(
tokeniser.lineNumber(), fileIndex, binders.at(tokeniser.currentToken())
);
if (!expr) {
expr = std::move(varExpr);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(varExpr)
);
}
// Skip to next
tokeniser.nextToken();
} else if (tree->core().builtinFunctionExists(tokeniser.currentToken())
|| tree->functionExists(tokeniser.currentToken())) {
std::unique_ptr<ExpressionASTNode> func;
const std::unique_ptr<FunctionDeclASTNode> &decl = tree->getFuncByName(
tokeniser.currentToken()
)->decl();
if (tree->core().builtinFunctionExists(tokeniser.currentToken())) {
func = std::make_unique<BuiltinFunctionASTNode>(
tokeniser.lineNumber(), fileIndex,
tree->core().getBuiltinFunction(tokeniser.currentToken())
);
} else {
func = std::make_unique<FunctionASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken(), decl->maxArgs() == 0
);
}
switch (decl->funcUsage()) {
case FunctionUsage::Prefix:
case FunctionUsage::Value: {
if (!expr) {
expr = std::move(func);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(func)
);
}
// Skip to next
tokeniser.nextToken();
break;
}
case FunctionUsage::Infix: {
if (!expr) {
logError("Expected expression before infix function '" + tokeniser.currentToken() +
"'.");
return nullptr;
}
InfixFunctionDeclASTNode *infixDecl = dynamic_cast<InfixFunctionDeclASTNode *>(decl.get());
// Check that this operator has sufficiently high precedence. If it doesn't,
// return the current expression.
if (infixDecl->operatorPrecedence() * 2 < currentPrecedence) {
return std::move(expr);
}
// Skip over infix operator
tokeniser.nextToken();
// Recursively parse the right hand side of the operator. Here, we set the precedence
// as 2 * the operator precedence + associativity. This way, we can preserve
// precedence of operators.
std::unique_ptr<ExpressionASTNode> rhs = parseExpression(
tree, binders,
infixDecl->operatorPrecedence() * 2 + (int) infixDecl->associativity()
);
// Propagate error
if (!rhs) {
return nullptr;
}
// Construct the application if this was a sufficiently high precedence operator
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(func), std::move(expr)
);
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(rhs)
);
break;
}
}
} else if (tree->constructorExists(tokeniser.currentToken())) {
std::unique_ptr<ConstructorASTNode> cons = std::make_unique<ConstructorASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
switch (tree->getConstructorByName(tokeniser.currentToken())->usage()) {
case TypeUsage::Prefix:
if (expr) {
// If there is already an expression, this is an application,
// so this constructor is applied to a function directly (i.e.
// not called with any further values)
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(cons)
);
} else {
// Otherwise, just set the expression to this value
expr = std::move(cons);
}
// Skip to next
tokeniser.nextToken();
break;
case TypeUsage::Infix: {
if (!expr) {
logError("Expected expression before infix constructor '" + tokeniser.currentToken() +
"'.");
return nullptr;
}
// Skip over the constructor name
tokeniser.nextToken();
// Get right hand argument
std::unique_ptr<ExpressionASTNode> rhs = parseExpression(tree, binders, currentPrecedence);
if (!rhs) {
return nullptr;
}
// Apply the left argument
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(cons), std::move(expr)
);
// Apply the right argument
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(rhs)
);
break;
}
default:
return nullptr;
}
} else {
logError("Unrecognised symbol '" + tokeniser.currentToken() + "'.");
return nullptr;
}
break;
case Token::SpecialInfixOperator: {
// These should always be in the parse tree in the core
if (!tree->core().builtinFunctionExists(tokeniser.currentToken())) {
logError("DEVELOPER: Missing builtin '" + tokeniser.currentToken() + "'.");
return nullptr;
}
// This should follow an expression
if (!expr) {
logError("Expected expression before infix operator '" + tokeniser.currentToken() +
"'.");
return nullptr;
}
const std::unique_ptr<BuiltinFunction> &decl = tree->core().getBuiltinFunction(
tokeniser.currentToken());
// If this operator has lower precedence than the previous then just drop out with the expression
if (decl->operatorPrecedence() * 2 < currentPrecedence) {
return std::move(expr);
}
// Otherwise, make the function node and skip over the token
std::unique_ptr<BuiltinFunctionASTNode> func = std::make_unique<BuiltinFunctionASTNode>(
tokeniser.lineNumber(), fileIndex, decl
);
tokeniser.nextToken();
// Now parse the right hand side of the operator with this precedence plus associativity as
// the current precedence
std::unique_ptr<ExpressionASTNode> rhs = parseExpression(
tree, binders, decl->operatorPrecedence() * 2 + (int) decl->associativity()
);
// Propagate null
if (!rhs) {
return nullptr;
}
// Build expression calls
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(func), std::move(expr)
);
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(rhs)
);
break;
}
case Token::SpecialPrefixOperator: {
// These should also always be in the parse tree in the core
if (!tree->core().builtinFunctionExists(tokeniser.currentToken())) {
logError("DEVELOPER: Missing builtin '" + tokeniser.currentToken() + "'.");
return nullptr;
}
std::unique_ptr<BuiltinFunctionASTNode> func = std::make_unique<BuiltinFunctionASTNode>(
tokeniser.lineNumber(), fileIndex, tree->core().getBuiltinFunction(tokeniser.currentToken())
);
if (!expr) {
expr = std::move(func);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(func)
);
}
// Skip to next
tokeniser.nextToken();
break;
}
case Token::LetSpecifier: {
if (expr) {
logError("Cannot directly apply 'let' expression to function.");
return nullptr;
}
std::unique_ptr<LetBindingASTNode> letBinding = parseLetBinding(tree, binders);
if (!letBinding) {
return nullptr;
}
expr = std::move(letBinding);
break;
}
case Token::LambdaSpecifier: {
if (expr) {
logError("Cannot directly apply lambda ($) expression to function.");
return nullptr;
}
std::unique_ptr<LambdaExpressionASTNode> lambda = parseLambda(tree, binders);
if (!lambda) {
return nullptr;
}
expr = std::move(lambda);
break;
}
case Token::IntegralLiteral: {
std::unique_ptr<IntegralConstructorASTNode> literal = std::make_unique<IntegralConstructorASTNode>(
tokeniser.lineNumber(), fileIndex, std::stoll(tokeniser.currentToken())
);
if (!expr) {
expr = std::move(literal);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(literal)
);
}
tokeniser.nextToken();
break;
}
case Token::DecimalLiteral: {
std::unique_ptr<DecimalConstructorASTNode> literal = std::make_unique<DecimalConstructorASTNode>(
tokeniser.lineNumber(), fileIndex, std::stod(tokeniser.currentToken())
);
if (!expr) {
expr = std::move(literal);
} else {
expr = std::make_unique<ApplicationASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(expr), std::move(literal)
);
}
tokeniser.nextToken();
break;
}
case Token::StringLiteral:
case Token::OpenList:
case Token::EmptyList:
case Token::Unit:
// TODO: Implement these!
logError("NOT YET IMPLEMENTED!");
return nullptr;
default:
logError("Unexpected token '" + tokeniser.currentToken() + "' in expression.");
return nullptr;
}
}
}
std::unique_ptr<LetBindingASTNode>
Parser::parseLetBinding(const std::unique_ptr<ParseTree> &tree, const BinderMap &binders) {
if (tokeniser.nextToken() != Token::Identifier) {
logError("Expected identifier after 'let'.");
return nullptr;
}
// TODO: Parse arbitrary pattern
// std::string var = tokeniser.currentToken();
std::unique_ptr<PatternASTNode> varPattern = std::make_unique<VariablePatternASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
if (tokeniser.nextToken() != Token::ValueBodySpecifier) {
logError("Expected '=' after let variable name.");
return nullptr;
}
tokeniser.nextToken();
std::unique_ptr<ExpressionASTNode> body = parseExpression(tree, binders, 0);
if (!body) {
return nullptr;
}
if (tokeniser.currentTokenType() != Token::InSpecifier) {
logError("Expected 'in' after let binding body.");
return nullptr;
}
// Skip over 'in'
tokeniser.nextToken();
BinderMap usageBinders = binders;
varPattern->addAllBinders(usageBinders);
std::unique_ptr<ExpressionASTNode> usage = parseExpression(tree, usageBinders, 0);
if (!usage) {
return nullptr;
}
return std::make_unique<LetBindingASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(varPattern), std::move(body), std::move(usage)
);
}
std::unique_ptr<LambdaExpressionASTNode>
Parser::parseLambda(const std::unique_ptr<ParseTree> &tree, const BinderMap &binders) {
if (tokeniser.nextToken() != Token::Identifier) {
logError("Expected identifier after '$' (anonymous function).");
return nullptr;
}
// TODO: Allow composite patterns (needs checking)
// std::string var = tokeniser.currentToken();
std::unique_ptr<PatternASTNode> varPattern = std::make_unique<VariablePatternASTNode>(
tokeniser.lineNumber(), fileIndex, tokeniser.currentToken()
);
if (tokeniser.nextToken() != Token::FuncBodySpecifier) {
logError("Expected '=>' after anonymous function variable name.");
return nullptr;
}
tokeniser.nextToken();
BinderMap bodyBinders = binders;
varPattern->addAllBinders(bodyBinders);
std::unique_ptr<ExpressionASTNode> body = parseExpression(tree, bodyBinders, 0);
if (!body) {
return nullptr;
}
return std::make_unique<LambdaExpressionASTNode>(
tokeniser.lineNumber(), fileIndex, std::move(varPattern), std::move(body)
);
}
void Parser::logError(const std::string &message) {
std::cerr << "[Parse Error] " << tokeniser.sourceFileName() << " (line " << tokeniser.lineNumber() << "): "
<< message << std::endl;
}
|
package event
import (
"testing"
)
type TestListener struct {
evchan chan Event
}
func (l *TestListener) HandleEventTypeOne(ev Event) {
go func() { l.evchan <- ev }()
}
func TestEventBus(t *testing.T) {
evchan := make(chan Event)
bus := NewEventBus()
bus.AddListener("test", &TestListener{evchan})
ev := Event{"EventTypeOne", "payload", "Y"}
bus.Dispatch(&ev)
select {
case recv := <-evchan:
if recv.Payload.(string) != "payload" {
t.Error("event payload is incorrect")
}
if recv.Context.(string) != "Y" {
t.Error("event context is incorrect")
}
default:
t.Fatalf("Failed to dispatch event")
}
}
func TestEventBusNoDispatch(t *testing.T) {
evchan := make(chan Event)
bus := NewEventBus()
bus.AddListener("test", &TestListener{evchan})
go func() {
ev := Event{"EventTypeTwo", "payload", "Y"}
bus.Dispatch(&ev)
}()
go func() {
ev := Event{"EventTypeOne", "payload", "Y"}
bus.Dispatch(&ev)
}()
recv := <-evchan
if recv.Type != "EventTypeOne" {
t.Fatalf("handler received unexpected event")
}
}
|
CHALLENGE = function () {
return {
init: function () {
$.ajaxSetup({
headers: {
'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content')
}
})
$('.js-user-name').on('blur', function (event) {
event.preventDefault()
let user = $(this).closest('.js-user')
CHALLENGE.clearValidationErrors()
CHALLENGE.signUp(user.data('key'), $(this).val())
})
$('.js-guess').on('click', function (event) {
event.preventDefault()
let user = $(this).closest('.js-user')
let guessedNumber = user.find('input[name="guessedNumber"]').val()
CHALLENGE.clearValidationErrors()
CHALLENGE.guessNumber(user.data('key'), guessedNumber)
})
},
signUp: function (key, userName) {
$.ajax({
type: 'POST',
url: route.challenge.sign_up,
data: { key, userName },
dataType: 'json',
}).done(function (data) {
CHALLENGE.handleEnablingButtonsAndInput(data.playingIsDisabled)
}).fail(function (error) {
let encoded = error.responseJSON
this.showValidationErrors(encoded)
})
},
guessNumber: function (key, guessedNumber) {
$.ajax({
type: 'POST',
url: route.challenge.guess_number,
data: { key, guessedNumber },
dataType: 'json',
}).done(function (data) {
if (data.guessIsCorrect) {
CHALLENGE.showGuessSuccess()
} else {
CHALLENGE.showNumberDiffersInfo(data.key, data.numberCompare)
}
}).fail(function (error) {
let encoded = error.responseJSON
if (encoded.exception === 'Webmozart\\Assert\\InvalidArgumentException') {
$('.js-error').text(encoded.message)
}
})
},
showGuessSuccess: function () {
this.hideAllNumberDiffersInfo()
$('#numberFoundModal').modal('show')
},
showNumberDiffersInfo: function (key, numberCompare) {
let form = $('form[data-key=\'' + key + '\']')
if (numberCompare === -1) {
form.find('.js-higher:not(.d-none)').addClass('d-none')
form.find('.js-lower').removeClass('d-none')
}
if (numberCompare === 1) {
form.find('.js-lower:not(.d-none)').addClass('d-none')
form.find('.js-higher').removeClass('d-none')
}
},
hideAllNumberDiffersInfo: function () {
$('.js-higher:not(.d-none)').addClass('d-none')
$('.js-lower:not(.d-none)').addClass('d-none')
},
handleEnablingButtonsAndInput (playingIsDisabled) {
if (playingIsDisabled) {
$('.js-button').addClass('disabled')
} else {
$('.js-button').removeClass('disabled')
}
$('.js-input').prop('disabled', playingIsDisabled)
},
showValidationErrors: function (encoded) {
if (encoded.exception === 'Webmozart\\Assert\\InvalidArgumentException') {
$('.js-error').text(encoded.message)
}
},
clearValidationErrors: function () {
$('.js-error').empty()
},
}
}()
CHALLENGE.init()
|
package com.mblinn.mbfpp.oo.iterator
import org.junit.runner.RunWith
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.FunSpec
import org.scalatest.junit.JUnitRunner
import com.mblinn.mbfpp.oo.iterator.TheLambdaBarAndGrille._
import com.mblinn.mbfpp.oo.iterator.TheLambdaBarAndGrille.Person
@RunWith(classOf[JUnitRunner])
class LambdaBarAndGrille extends FunSpec with ShouldMatchers {
describe("generateGreetings") {
it("should generate a list of greetings for people in the appropriate zip codes") {
val expectedMessages = Vector("Hello, Mike, and welcome to the Lambda Bar And Grille!", "Hello, John, and welcome to the Lambda Bar And Grille!")
val people = Vector(
Person("Mike", Address(19123)),
Person("John", Address(19103)),
Person("Jill", Address(19098))
)
val computedMessages = generateGreetings(people)
expectedMessages should equal(computedMessages)
}
}
} |
/*
* Copyright 2018 WebAssembly Community Group participants
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef wasm_ir_function_h
#define wasm_ir_function_h
#include "wasm.h"
#include "ir/utils.h"
namespace wasm {
namespace FunctionUtils {
// Checks if two functions are equal in all functional aspects,
// everything but their name (which can't be the same, in the same
// module!) - same params, vars, body, result, etc.
inline bool equal(Function* left, Function* right) {
if (left->getNumParams() != right->getNumParams()) return false;
if (left->getNumVars() != right->getNumVars()) return false;
for (Index i = 0; i < left->getNumLocals(); i++) {
if (left->getLocalType(i) != right->getLocalType(i)) return false;
}
if (left->result != right->result) return false;
if (left->type != right->type) return false;
return ExpressionAnalyzer::equal(left->body, right->body);
}
} // namespace FunctionUtils
} // namespace wasm
#endif // wasm_ir_function_h
|
<reponame>yann-soubeyrand/kubernetes-ingress
// Copyright 2019 HAProxy Technologies LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
//ServicePort describes port of a service
type ServicePort struct {
Name string
Protocol string
ServicePort int64
TargetPort int64
Status Status
}
type ServicePorts []*ServicePort
type EndpointIP struct {
IP string
Name string
HAProxyName string
Disabled bool
Status Status
}
type EndpointIPs map[string]*EndpointIP
//Endpoints is usefull data from k8s structures about Endpoints
type Endpoints struct {
Namespace string
Service StringW
BackendName string
Ports *ServicePorts
Addresses *EndpointIPs
Status Status
}
//Service is usefull data from k8s structures about service
type Service struct {
Namespace string
Name string
ClusterIP string
ExternalIP string
Ports []ServicePort
Annotations MapStringW
Selector MapStringW
Status Status
}
//Namespace is usefull data from k8s structures about namespace
type Namespace struct {
_ [0]int
Name string
Relevant bool
Ingresses map[string]*Ingress
Endpoints map[string]*Endpoints
Services map[string]*Service
Secret map[string]*Secret
Status Status
}
//IngressPath is usefull data from k8s structures about ingress path
type IngressPath struct {
ServiceName string
ServicePortInt int64
ServicePortString string
Path string
PathIndex int
Status Status
}
//IngressRule is usefull data from k8s structures about ingress rule
type IngressRule struct {
Host string
Paths map[string]*IngressPath
Status Status
}
//Ingress is usefull data from k8s structures about ingress
type Ingress struct {
Namespace string
Name string
Annotations MapStringW
Rules map[string]*IngressRule
TLS map[string]*IngressTLS
Status Status
}
// IngressTLS describes the transport layer security associated with an Ingress.
type IngressTLS struct {
Host string
SecretName StringW
Status Status
}
//ConfigMap is usefull data from k8s structures about configmap
type ConfigMap struct {
Namespace string
Name string
Annotations MapStringW
Status Status
}
//Secret is usefull data from k8s structures about secret
type Secret struct {
Namespace string
Name string
Data map[string][]byte
Status Status
}
|
#!/bin/bash
# ==============================================================================
# Copyright (C) 2018-2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
# ==============================================================================
set -e
BASEDIR=$(dirname "$0")/../..
if [ -n ${GST_SAMPLES_DIR} ]
then
source $BASEDIR/scripts/setup_env.sh
fi
source $BASEDIR/scripts/setlocale.sh
#import GET_MODEL_PATH
source $BASEDIR/scripts/path_extractor.sh
if [ -z ${1} ]; then
echo "ERROR set path to video"
echo "Usage: ./vehicle_detection_2sources_gpu.sh <path/to/your/video/sample>"
exit
fi
FILE=${1}
MODEL=vehicle-license-plate-detection-barrier-0106
DETECT_MODEL_PATH=$(GET_MODEL_PATH $MODEL )
# Note that two pipelines create instances of singleton element 'inf0', so we can specify parameters only in first instance
gst-launch-1.0 --gst-plugin-path ${GST_PLUGIN_PATH} \
filesrc location=$FILE ! decodebin ! video/x-raw ! videoconvert ! \
gvadetect inference-id=inf0 model=$DETECT_MODEL_PATH device=GPU every-nth-frame=1 batch-size=1 ! queue ! \
gvawatermark ! videoconvert ! fpsdisplaysink video-sink=xvimagesink sync=false \
filesrc location=${FILE} ! decodebin ! video/x-raw ! videoconvert ! \
gvadetect inference-id=inf0 ! queue ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=xvimagesink sync=false
|
/**
* Classes to rewrite Query algebra, expressions and other objects to handle variable replacement for
* the prepared statement functionality.
*
* In most cases developers will not need to access the rewriters directly.
*/
package org.apache.jena.arq.querybuilder.rewriters; |
<gh_stars>1-10
import { hbs } from 'ember-cli-htmlbars';
export const returnTo = (args) => {
return {
template: hbs`
<PixReturnTo @route='profile' @shade={{shade}} />
`,
context: args,
};
};
returnTo.args = {
shade: 'blue',
};
export const returnToWithText = (args) => {
return {
template: hbs`
<PixReturnTo @route='profile' @shade={{shade}}>
Retour vers mon profil
</PixReturnTo>
`,
context: args,
};
};
export const argTypes = {
route: {
name: 'route',
description: 'Route de redirection',
type: { name: 'string', required: true },
defaultValue: null,
},
model: {
name: 'model',
description: 'Model Ember',
type: { required: false },
},
shade: {
name: 'shade',
description: 'Couleur du lien',
type: { name: 'string', required: false },
defaultValue: 'black',
control: { type: 'select', options: ['white', 'black', 'blue'] },
},
};
|
"""Classes to handle constant-time mean/stddev updates"""
import numpy as np
class ParallelStats:
def __init__(self, stabilize=False):
"""Object for aggregation of stats across multiple arrays/values
Parameters
----------
stabilize : bool
Should a potentially more stable method be used to update the mean
Note
----------
Stabilize should be set to True for cases where self._count is rougly equal to the incoming count and both are large. In practice, there did not seem to be much error (<1e-10) even when repeatedly using arrays with 100k values each.
The algorithm used is adapted from:
https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Parallel_algorithm
"""
self._count = 0
self._mean = 0.0
self._ssd = 0.0 # sum of square differences
self._min = float('inf')
self._max = float('-inf')
self.stabilize = stabilize
def __add__(self, value):
stats_a = self.copy()
stats_a += value
return stats_a
def __iadd__(self, value):
count_a = self._count
mean_a = self._mean
ssd_a = self._ssd
if isinstance(value, ParallelStats):
count_b = value.count()
mean_b = value.mean()
ssd_b = value.ssd()
else:
value = np.array(value)
count_b = value.size
mean_b = value.mean()
ssd_b = ((value - mean_b) ** 2).sum()
self._min = min(self._min, value.min())
self._max = max(self._max, value.max())
self._count = count_a + count_b
count_denom = 1 / self._count
delta = mean_b - mean_a
if self.stabilize:
self._mean = (count_a * mean_a + count_b * mean_b) / self._count
else:
self._mean = mean_a + delta * count_b * count_denom
self._ssd = ssd_a + ssd_b + delta ** 2 * count_a * count_b * count_denom
return self
def __call__(self, value, stabilize=None):
if stabilize is not None:
self.stabilize = stabilize
self += value
def copy(self):
stats_copy = ParallelStats()
stats_copy.copy_stats(self)
return stats_copy
def copy_stats(self, stats):
self._count = stats.count()
self._mean = stats.mean()
self._ssd = stats.ssd()
self._min = stats.min()
self._max = stats.max()
self.stabilize = stats.stabilize
def count(self):
return self._count
def mean(self):
return self._mean
def summary(self):
print(f"Count: {self._count:d}, Min: {self._min:.2f}, Max: {self._max:.2f}")
print(f"Mean: {self._mean:.4f}, Std: {self.std():.4f}, Var: {self.var():.4f}")
def ssd(self):
"""Return the sum of squared differences"""
return self._ssd
def std(self, sample_std=False):
"""Return the standard deviation
Parameters
----------
sample_std : bool
Whether to return the sample standard deviation (use n-1 in the denominator)
"""
if self._count <= 1:
return 0
return np.sqrt(self.var(sample_variance=sample_std))
def var(self, sample_variance=False):
"""Return the variance
Parameters
----------
sample_variance : bool
Whether to return the sample variance (use n-1 in the denominator)
"""
if self._count <= 1:
return 0
if sample_variance:
return self._ssd / (self._count - 1)
return self._ssd / self._count
def min(self):
return self._min
def max(self):
return self._max
class MMean(object):
"""Class to hold a moving mean with constant-time update and memory."""
def __init__(self):
self._mean = 0.0
self._count = 0
@property
def count(self):
"""The count of items included in the mean"""
return self._count
def __iadd__(self, value):
"""Update the mean, including the given value."""
self._count += 1
self._mean += (1.0 / self._count) * (value - self._mean)
return self
def __add__(self, value):
"""Add a new value to the mean, does not update class values."""
return self._mean + value
def __sub__(self, value):
"""Subtract a new value from the mean, does not update class values."""
return self._mean - value
def __str__(self):
"""Return the mean as a string."""
return str(self._mean)
def __call__(self):
"""Alternate method for self.mean()"""
return self._mean
def mean(self):
"""Return the mean."""
return self._mean
class MStddev(object):
"""Class to hold a moving standard deviation with constant-time update and memory."""
def __init__(self):
self._count = 0.0
self._mean = 0.0
self._variance = 0.0
@property
def count(self):
"""The count of items included in the standard deviation"""
return self._count
def __iadd__(self, value):
"""Update the mean and stddev, including the new value."""
self._count += 1
prev_mean = self._mean
self._mean += (1.0 / self._count) * (value - self._mean)
self._variance += (value - self._mean) * (value - prev_mean)
return self
def __add__(self, value):
"""Add a value to the stddev, does not update class values."""
return self.stddev() + value
def __call__(self):
"""Alternate method for self.stddev"""
return self.stddev()
def __sub__(self, value):
"""Subtract a value from the stddev, does not update class values."""
return self.stddev() - value
def __str__(self):
"""Return the stddev as a string."""
if self._count == 0:
return str(0)
stddev = np.sqrt(self._variance / self._count)
return str(stddev)
def mean(self):
"""Return the mean"""
return self._mean
def stddev(self):
"""Return the current stddev."""
if self._count == 0:
return 0
return np.sqrt(self._variance / self._count)
class MMeanArray(object):
"""Class to hold an array of element-wise independent means that update in constant-time and memory.
Note
----
Value shape must be the same or broadcastable to the shape of the
mean array for all operations.
"""
def __init__(self, shape, dtype=np.float):
self._mean = np.zeros(shape, dtype=dtype)
self._count = 0
@property
def shape(self):
"""The shape of the array"""
return self._mean.shape
@property
def dtype(self):
"""The type of data stored in the array"""
return self._mean.dtype
@property
def count(self):
"""The number of examples used for the mean of each item in the array"""
return self._count
def __iadd__(self, value):
"""Update the _mean, including the given value."""
if value.shape != self.shape:
raise ValueError('Input values must have the same shape as the MMeanArray')
self._count += 1
self._mean += (value - self._mean) * (1.0 / self._count)
return self
def __add__(self, value):
"""Add a new value to the mean, does not update class values."""
return self._mean + value
def __sub__(self, value):
"""Subtract a new value from the mean, does not update class values."""
return self._mean - value
def __str__(self):
"""Return the _mean as a string."""
return str(self._mean)
def __call__(self):
"""Alternate method for self.mean()"""
return self._mean
def mean(self):
"""Return the _mean."""
return self._mean
class MStddevArray(object):
"""Class to hold an array of element-wise independent standard deviations that update in constant-time and memory.
Note
----
Value shape must be the same or broadcastable to the shape of the
mean array for all operations.
"""
def __init__(self, shape, dtype=np.float):
self._mean = np.zeros(shape, dtype=dtype)
self._variance = np.zeros(shape, dtype=dtype)
self._count = 0
@property
def shape(self):
"""The shape of the array"""
return self._variance.shape
@property
def dtype(self):
"""The type of data stored in the array"""
return self._mean.dtype
@property
def count(self):
"""The number of examples used for the standard deviation of each item in the array"""
return self._count
def mean(self):
"""Return the mean of the array"""
return self._mean
def variance(self):
"""Return the variance of the array"""
if self._count == 0:
return np.zeros_like(self._variance)
return self._variance / self._count
def __iadd__(self, value):
"""Update the mean and stddev, including the new value."""
if value.shape != self.shape:
raise ValueError('Input values must have the same shape as the MStddevArray')
self._count += 1
prev_mean = np.copy(self._mean)
self._mean += (1.0 / self._count) * (value - self._mean)
self._variance += (value - self._mean) * (value - prev_mean)
return self
def __add__(self, value):
"""Add a value to the stddev, does not update class values."""
return self.stddev() + value
def __sub__(self, value):
"""Subtract a value from the stddev, does not update class values."""
return self.stddev() - value
def __str__(self):
"""Return the stddev as a string."""
return str(self.stddev())
def __call__(self):
"""Alternate method for self.stddev()"""
return self.stddev()
def stddev(self):
"""Return the current stddev."""
if self._count == 0:
return np.zeros_like(self._variance)
return np.sqrt(self._variance / self._count)
|
/*
* Copyright (c) 2008-2019, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.scheduledexecutor.impl;
import com.hazelcast.core.Member;
import com.hazelcast.logging.ILogger;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.OperationService;
import com.hazelcast.spi.annotation.PrivateApi;
import com.hazelcast.spi.serialization.SerializationService;
import com.hazelcast.util.function.Supplier;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
/**
* Executes an operation on a set of targets. Similar to {@link com.hazelcast.spi.impl.operationservice.impl.InvokeOnPartitions}
* but for members.
*/
@PrivateApi
public final class InvokeOnMembers {
private static final int TRY_COUNT = 10;
private static final int TRY_PAUSE_MILLIS = 300;
private final ILogger logger;
private final OperationService operationService;
private final SerializationService serializationService;
private final String serviceName;
private final Supplier<Operation> operationFactory;
private final Collection<Member> targets;
private final Map<Member, Future> futures;
private final Map<Member, Object> results;
public InvokeOnMembers(NodeEngine nodeEngine, String serviceName, Supplier<Operation> operationFactory,
Collection<Member> targets) {
this.logger = nodeEngine.getLogger(getClass());
this.operationService = nodeEngine.getOperationService();
this.serializationService = nodeEngine.getSerializationService();
this.serviceName = serviceName;
this.operationFactory = operationFactory;
this.targets = targets;
this.futures = new HashMap<Member, Future>(targets.size());
this.results = new HashMap<Member, Object>(targets.size());
}
/**
* Executes the operation on all targets.
*/
public Map<Member, Object> invoke()
throws Exception {
invokeOnAllTargets();
awaitCompletion();
retryFailedTargets();
return results;
}
private void invokeOnAllTargets() {
for (Member target : targets) {
Future future = operationService.createInvocationBuilder(serviceName, operationFactory.get(), target.getAddress())
.setTryCount(TRY_COUNT).setTryPauseMillis(TRY_PAUSE_MILLIS).invoke();
futures.put(target, future);
}
}
private void awaitCompletion() {
for (Map.Entry<Member, Future> responseEntry : futures.entrySet()) {
try {
Future future = responseEntry.getValue();
results.put(responseEntry.getKey(), serializationService.toObject(future.get()));
} catch (Throwable t) {
if (logger.isFinestEnabled()) {
logger.finest(t);
} else {
logger.warning(t.getMessage());
}
results.put(responseEntry.getKey(), t);
}
}
}
private void retryFailedTargets()
throws InterruptedException, ExecutionException {
List<Member> failedMembers = new LinkedList<Member>();
for (Map.Entry<Member, Object> memberResult : results.entrySet()) {
Member member = memberResult.getKey();
Object result = memberResult.getValue();
if (result instanceof Throwable) {
failedMembers.add(member);
}
}
for (Member failedMember : failedMembers) {
Operation operation = operationFactory.get();
Future future = operationService.createInvocationBuilder(serviceName, operation, failedMember.getAddress()).invoke();
results.put(failedMember, future);
}
for (Member failedMember : failedMembers) {
Future future = (Future) results.get(failedMember);
Object result = future.get();
results.put(failedMember, result);
}
}
}
|
import requests
from typing import List
class DataDogExporter:
def __init__(self, api_key: str):
self.api_key = api_key
self.api_base_url = 'https://api.datadoghq.com/api/v1/'
def _make_request(self, method: str, endpoint: str, data: dict = None):
headers = {
'Content-Type': 'application/json',
'DD-API-KEY': self.api_key
}
url = self.api_base_url + endpoint
response = requests.request(method, url, headers=headers, json=data)
response.raise_for_status()
return response.json()
def export_metric(self, metric_name: str, value: float, tags: List[str]):
data = {
'series': [{
'metric': metric_name,
'points': [(int(time.time()), value)],
'type': 'gauge',
'tags': tags
}]
}
self._make_request('POST', 'series', data)
def export_event(self, title: str, text: str, tags: List[str]):
data = {
'title': title,
'text': text,
'tags': tags
}
self._make_request('POST', 'events', data)
def export_service_check(self, name: str, status: str, tags: List[str]):
data = {
'check': name,
'status': status,
'tags': tags
}
self._make_request('POST', 'check_run', data) |
<gh_stars>0
function iniciador(){
var url = "data/proyects.json";
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function(){
if (xhttp.readyState ==4 && xhttp.status == 200){
//console.log(xhttp.status);
//console.log(xhttp.response);
var json = JSON.parse(xhttp.responseText) ;
for (i = 0 ; i<json.length ; i++){
$("#accordion").append(
'<div class="panel panel-default name="' + i + '">' +
'<div class="panel-heading" role="tab" id="heading' + i + '">' +
'<h4 class="panel-title ">' +
'<a role="button" data-toggle="collapse" data-parent="#accordion" href="#collapse'+ i +'" aria-expanded="false" aria-controls="collapse'+ i + '">'+
json[i].titulo +
'</a>' +
'</h4>' +
'</div>' +
'<div id="collapse'+ i + '" class="panel-collapse collapse in" role="tabpanel" aria-labelledby="heading'+ i +'">' +
'<div class="panel-body">'+
'<div class="row">'+
'<div class="col-xs-12 col-sm-12 col-md-12">'+
'<div class="descripcion">'+
'<p class="acordeon-title"> <b> Descripciรณn</b></p> '+
'<p class="acordeon-content"> '+ json[i].descripcion +' </p>'+
'</div>'+
'<hr>'+
'<div class="fecha">'+
'<p class="acordeon-title"> <b> Fecha</b></p> '+
'<p class="acordeon-content"> '+ json[i].fecha +' </p>'+
'</div>'+
'<hr>'+
'<div class="etiquetas">'+
'<p class="acordeon-title"> <b> Etiquetas</b></p> '+
'<p class="acordeon-content"> '+ json[i].etiquetas +' </p>'+
'</div>'+
'<hr>'+
'<div class="archivo">'+
'<p class="acordeon-title"> <b> Archivo</b></p> '+
'<a href=" '+ json[i].archivo + ' "> '+ json[i].titulo + '.pdf' +' </a>'+
'</div>'+
'</div>'+
'</div>'+
'</div>' +
'</div>' +
'</div>'
)
}
}
}
xhttp.open("GET", url);
xhttp.send();
}
$(document).ready(function(){
iniciador();
})
|
<gh_stars>1-10
class RegistrationsController < Devise::RegistrationsController
private
def sign_up_params
params.require(:user).permit(:username, :email, :password, :about, location_id: [])
end
def account_update_params
params.require(:user).permit(:username, :email, :password, :current_password, :about, location_id: [])
end
end
|
<filename>src/app/components/activity_entry/training_result/training_result.component.ts<gh_stars>0
import 'jquery';
import {
Component,
AfterViewInit,
OnInit,
OnChanges,
} from '@angular/core';
import { Router, ActivatedRoute, Params } from '@angular/router';
import {
AuthService,
ApiService,
LoadingService,
LoggerService,
ErrorService,
PopupService,
ActivityService
} from "./../../../services";
import{
Course,
Question,
ActivitySession,
Result,
PopupBase,
Answer,
UserAnswer,
} from "../../../models"
@Component({
selector: 'courses-show',
templateUrl: './training_result.component.html',
styleUrls: ['./training_result.component.scss'],
providers: [
ApiService,
AuthService,
ActivityService,
LoggerService,
PopupService,
],
})
export class TrainingResultComponent implements OnInit {
activity: ActivitySession;
user_answers: UserAnswer[] = [];
training_result: Result = new Result();
private _entry_id: string;
constructor(
private route: ActivatedRoute,
private _router: Router,
private _api: ApiService,
private _activity: ActivityService,
private _auth: AuthService,
private _logger: LoggerService,
private _error: ErrorService,
private _loading: LoadingService,
private _popup: PopupService
){
this._loading.setCurtain();
}
ngOnInit() {
// this._entry_id = this.routeParam.params['entry_id'];
this.route.params.forEach((params: Params) => {
this._entry_id = params['entry_id']; // (+) converts string 'id' to a number
});
this._loading.setCurtain();
this._api.getTrainingResults(this._entry_id).subscribe(
data => {
var body = data.json();
this._logger.debug(body);
this._logger.debug(this.training_result);
this.training_result.assignParams(body.data.result);
var answers = body.data.answers;
answers.sort(function(a,b){
if(a.question.index < b.question.index) return -1;
if(a.question.index > b.question.index) return 1;
return 0;
});
answers.forEach(
answer => {
var question = new Question();
question.assignParams(answer.question);
var ua = new UserAnswer();
console.log("@@@@@@@@@");
console.log("@@@@@@@@@");
console.log(answer);
ua.assignParams(answer.user_answer);
ua.question = question;
this.user_answers.push(ua);
console.log(this.user_answers);
}
);
// ็ตๆๆๅ็ป้ขใซ็งปๅ
},
err => {
this._error.errorInit(err);
this._popup.displayError(err, "Error!");
},
() => {
setTimeout(
() => {
console.log("load Collapsible!!!!");
jQuery(".collapsible").collapsible({accordion : true});
}, 0);
this._loading.endLoading();
}
)
}
progressBarWidth(): string {
return "width: " + this.training_result.score.raw/this.training_result.score.max * 100 + "%;";
}
}
|
#!/bin/bash
# Relase.sh will make a new realse of k8s including hyperkube image and deployment scripts
# all in aio.tar.gz
set -ex
export VERSION=v0.18.2
cd image && make
cd ..
sudo docker save wizardcxy/hyperkube:${VERSION} > hyper.tar
sudo docker pull docker.io/kubernetes/pause
sudo docker save docker.io/kubernetes/pause > pause.tar
tar czvf aio.tar.gz master.sh minion.sh pause.tar hyper.tar gorouter.tar registry.tar etcd.tar flannel.tar |
<gh_stars>100-1000
require 'sinatra'
# Handle message status hook,
# which is called whenever the message status changes
post '/MessageStatus' do
message_sid = params['MessageSid']
message_status = params['MessageStatus']
print "SID: #{message_sid}, Status: #{message_status}\n"
response.status = 204
end
|
<gh_stars>1-10
#ifndef PSO_H
#define PSO_H
#include<vector>
#include"Particle.h"
#include"PSO_AlgorithmParam.h"
#include <fstream>
using namespace std;
enum ParticleType { Default };
class PSO
{
private:
static vector<Particle*> particles;
pair<vector<float>&, float> getLocalBest(int particleIdx, int NoNeighbors);
pair<vector<float>&, float> getGlobalBest();
float getDistance(Particle* p1, Particle* p2);
bool checkConstraint(float minChangeDistance, float prev, float now, float targetValue, float targetValError);
void writeToFile(vector<float>& vec, ofstream* file);
public:
static vector<Particle*> getParticles();
PSO(int NoParticles, int numberOfFeatures, ParticleType type);
/*const vector<float>& optimize(int NoIterations = numeric_limits<int>::max(), float minChangeDistance = 0, float targetValue = numeric_limits<float>::max(),
float targetValError = 0, int LocalNoNeighbors = 0, float c1 = 1, float c2 = 1);*/
const vector<float>& optimize(PSO_AlgorithmParam* psoAlg, int NoIterations = numeric_limits<int>::max(), float minChangeDistance = 0, float targetValue = numeric_limits<float>::max(),
float targetValError = 0, int LocalNoNeighbors = 0);
};
#endif |
<filename>open-sphere-plugins/geopackage/src/main/java/io/opensphere/geopackage/mantle/GeoPackageLayerActivationHandler.java
package io.opensphere.geopackage.mantle;
import java.awt.Color;
import io.opensphere.core.event.EventListener;
import io.opensphere.core.event.EventManager;
import io.opensphere.core.geometry.renderproperties.TileRenderProperties;
import io.opensphere.core.util.ColorUtilities;
import io.opensphere.core.util.lang.PhasedTaskCanceller;
import io.opensphere.geopackage.model.GeoPackageLayer;
import io.opensphere.geopackage.model.LayerType;
import io.opensphere.mantle.controller.event.impl.DataTypeRemovedEvent;
import io.opensphere.mantle.data.AbstractActivationListener;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.event.DataTypeInfoColorChangeEvent;
import io.opensphere.mantle.data.event.DataTypeVisibilityChangeEvent;
/**
* Listens for when geopackage layers have been activated and notifies a
* {@link LayerActivationListener} of the active layer.
*/
public class GeoPackageLayerActivationHandler extends AbstractActivationListener
{
/**
* The color changed listener.
*/
private final EventListener<DataTypeInfoColorChangeEvent> myColorListener = this::handleColorChanged;
/**
* The event manager.
*/
private final EventManager myEventManager;
/**
* The layer activation listener for feature layers.
*/
private final LayerActivationListener myFeatureLayerListener;
/**
* The removed listener.
*/
private final EventListener<DataTypeRemovedEvent> myRemovedListener = this::handleRemoved;
/**
* The listener wanting notification when a geopackage tile layer is
* activated.
*/
private final LayerActivationListener myTileListener;
/**
* The visibility listener.
*/
private final EventListener<DataTypeVisibilityChangeEvent> myVisibilityListener = this::handleVisibilityChanged;
/**
* Constructs a new layer provider.
*
* @param eventManager The event manager.
* @param tileLayerListener The listener wanting notification when a
* geopackage tile layer is activated.
* @param featureLayerListener The layer activation listener for feature
* layers.
*/
public GeoPackageLayerActivationHandler(EventManager eventManager, LayerActivationListener tileLayerListener,
LayerActivationListener featureLayerListener)
{
myTileListener = tileLayerListener;
myEventManager = eventManager;
myEventManager.subscribe(DataTypeVisibilityChangeEvent.class, myVisibilityListener);
myEventManager.subscribe(DataTypeInfoColorChangeEvent.class, myColorListener);
myEventManager.subscribe(DataTypeRemovedEvent.class, myRemovedListener);
myFeatureLayerListener = featureLayerListener;
}
/**
* Unsubscribes from the event manager.
*/
public void close()
{
myEventManager.unsubscribe(DataTypeVisibilityChangeEvent.class, myVisibilityListener);
myEventManager.unsubscribe(DataTypeInfoColorChangeEvent.class, myColorListener);
myEventManager.unsubscribe(DataTypeRemovedEvent.class, myRemovedListener);
}
/**
* Gets the tile layer listener.
*
* @return The new tile layer listener.
*/
public LayerActivationListener getTileLayerListener()
{
return myTileListener;
}
@Override
public void handleCommit(boolean active, DataGroupInfo dgi, PhasedTaskCanceller canceller)
{
super.handleCommit(active, dgi, canceller);
if (dgi == null)
{
return;
}
for (DataTypeInfo info : dgi.getMembers(false))
{
if (info instanceof GeoPackageDataTypeInfo)
{
GeoPackageDataTypeInfo geopackageInfo = (GeoPackageDataTypeInfo)info;
GeoPackageLayer layer = geopackageInfo.getLayer();
if (layer.getLayerType() == LayerType.TILE)
{
if (active)
{
myTileListener.layerActivated(geopackageInfo);
}
else
{
myTileListener.layerDeactivated(geopackageInfo);
}
}
else if (layer.getLayerType() == LayerType.FEATURE && canceller != null)
{
if (active)
{
myFeatureLayerListener.layerActivated(geopackageInfo);
}
else
{
myFeatureLayerListener.layerDeactivated(geopackageInfo);
}
}
}
}
}
/**
* Handles when the color has changed at sets the new opacity value on the
* {@link TileRenderProperties}.
*
* @param event The color changed event.
*/
private void handleColorChanged(DataTypeInfoColorChangeEvent event)
{
if (event.getDataTypeInfo() instanceof GeoPackageDataTypeInfo
&& ((GeoPackageDataTypeInfo)event.getDataTypeInfo()).getLayer().getLayerType() == LayerType.TILE)
{
Color color = event.getColor();
event.getDataTypeInfo().getMapVisualizationInfo().getTileRenderProperties()
.setOpacity((float)color.getAlpha() / ColorUtilities.COLOR_COMPONENT_MAX_VALUE);
}
}
/**
* Handles removed changes and notifies the activation listener.
*
* @param event The event.
*/
private void handleRemoved(DataTypeRemovedEvent event)
{
handleCommit(false, event.getDataType().getParent(), null);
}
/**
* Handles visibility changes and notifies the activation listener.
*
* @param event The event.
*/
private void handleVisibilityChanged(DataTypeVisibilityChangeEvent event)
{
handleCommit(event.isVisible(), event.getDataTypeInfo().getParent(), null);
}
}
|
const prompt = "Do you want to continue?";
const logChoice = (prompt) => {
const response = prompt(prompt);
console.log(`The user's choice was ${response}`);
};
logChoice(prompt); |
The algorithm should involve several steps. Firstly, the input data should be filtered and pre-processed to detect any anomalies. After this data pre-processing, the algorithm should build a probabilistic model using historical data, such as the amount and type of transactions, to detect suspicious patterns. The output of the model should then be evaluated using metrics such as precision and recall, and a threshold should be set to identify the fraud cases. Once the model is trained and evaluated, it should be deployed to detect suspicious transactions in real time. |
package com.smart.controller;
import javax.servlet.http.HttpSession;
import javax.validation.Valid;
import org.apache.tomcat.jni.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import com.smart.dao.MyUserRepository;
import com.smart.entities.MyUser;
import com.smart.helper.Message;
@Controller
public class HomeController {
@Autowired
private MyUserRepository myUserRepository;
@RequestMapping("/")
public String home(Model model) {
model.addAttribute("title", "Home-Smart Contact Manager");
return "home";
}
@RequestMapping("/about")
public String about(Model model) {
model.addAttribute("title", "About-Smart Contact Manager");
return "about";
}
@RequestMapping("/signup")
public String signup(Model model) {
model.addAttribute("title", "Register-Smart Contact Manager");
model.addAttribute("user", new MyUser());
return "signup";
}
// handler for registering user
@PostMapping("/do_register")
public String registerUser(@Valid @ModelAttribute("user") MyUser myUser, BindingResult result,
@RequestParam(value = "aggrement", defaultValue = "false") boolean aggrement, Model model,
HttpSession session) {
try {
if (!aggrement) {
System.out.println("you have not checked the aggrement");
// exception aagai to neeche wala block chal jaega
throw new Exception("you have not checked the aggrement");
}
if (result.hasErrors()) {
model.addAttribute("user", myUser);
System.out.println("error " + result.toString());
return "signup";
}
myUser.setRole("ROLE_USER");
myUser.setEnabled(true);
myUser.setImageUrl("default.png");
System.out.println("Aggrement" + aggrement);
System.out.println("user" + myUser);
MyUser user = myUserRepository.save(myUser);
model.addAttribute("user", new MyUser());
session.setAttribute("message", new Message("Successfully Registered !! ", "alert-success"));
return "signup";
} catch (Exception e) {
e.printStackTrace();
model.addAttribute("user", myUser);
session.setAttribute("message", new Message("Something went wrong !! " + e.getMessage(), "alert-danger"));
return "signup";
}
}
}
|
#!/bin/bash
cd "$(dirname "$0")"
if [[ ! -d esp-idf ]]; then
git clone -b v4.0 --recursive https://github.com/espressif/esp-idf.git
fi
source esp-idf/export.sh
cd esp32-ogn-tracker
cd utils && make read_log && make serial_dump && cd ..
function disable {
opt=$1
sed -i "s/^\s*#define\s*$opt\s/\/\/#define $opt /g" main/config.h
}
function enable {
opt=$1
sed -i "s/^\s*\/\/\s*#define\s*$opt\s/#define $opt /g" main/config.h
grep $opt -q main/config.h || echo "#define $opt" >> main/config.h # add option if it doesn't exist yet
}
## Initial basic configuration
disable WITH_FollowMe
disable WITH_U8G2_OLED
disable WITH_U8G2_SH1106
disable WITH_U8G2_FLIP
disable WITH_GPS_ENABLE
disable WITH_GPS_MTK
disable WITH_SD
disable WITH_SDLOG
disable WITH_FANET # not ready yet
# ?? WITH_FANET, WITH_LORAWAN
enable WITH_GPS_UBX
enable WITH_GPS_UBX_PASS
enable WITH_GPS_NMEA_PASS
enable WITH_BME280
enable WITH_PAW
enable WITH_LORAWAN
# First build for old T-Beams
disable WITH_TBEAM_V10
disable WITH_AXP
disable WITH_GPS_PPS
enable WITH_TBEAM
make -B -j16 > /dev/null
source bin-arch.sh
rm -r stratux
mkdir stratux
cd stratux
tar xzf ../esp32-ogn-tracker-bin.tgz && zip -r esp32-ogn-tracker-bin-07.zip *
mv esp32-ogn-tracker-bin-07.zip ../../
cd ..
rm -r stratux
# Second build for new T-Beams
disable WITH_TBEAM
enable WITH_TBEAM_V10
enable WITH_AXP
enable WITH_GPS_PPS
make -B -j16 > /dev/null
source bin-arch.sh
mkdir stratux
cd stratux
tar xzf ../esp32-ogn-tracker-bin.tgz && zip -r esp32-ogn-tracker-bin-10+.zip *
mv esp32-ogn-tracker-bin-10+.zip ../../
cd ..
rm -r stratux
# Clean up
git checkout .
rm -r esp32-ogn-tracker-bin.tgz utils/read_log utils/serial_dump build
|
package com.carlos.popularmovies.themoviedb.api.client;
import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Created by Carlos on 22/07/2016.
*/
public class Constants {
public static final String API_KEY="fd743d7e561dafce3e95178a536b5450";
public static final String POSTER_BASE_URL="http://image.tmdb.org/t/p/w185/";
public static final String POSTER_HQ_BASE_URL="http://image.tmdb.org/t/p/w342/";
public static final String YOURTUBE_BASE_URL="http://www.youtube.com/watch?v=";
public static final String YOUTUBE_IMAGE_BASE_URL="http://img.youtube.com/vi/%1$s/mqdefault.jpg";
@Retention(RetentionPolicy.SOURCE)
@IntDef({MOVIES_BY_POPULARITY, MOVIES_BY_RATE})
public @interface MovieType {
}
public static final int MOVIES_BY_POPULARITY = 0;
public static final int MOVIES_BY_RATE = 1;
}
|
#!/bin/bash
CLUSTER_API=${CLUSTER_API:-cluster-manager-api.cnct.io}
CLUSTER_API_PORT=${CLUSTER_API_PORT:-443}
CLUSTER_NAME=${CLUSTER_NAME:-vmware-test-$(date +%s)}
[[ -n $DEBUG ]] && set -o xtrace
set -o errexit
set -o nounset
set -o pipefail
main() {
curl -X GET \
"https://${CLUSTER_API}:${CLUSTER_API_PORT}/api/v1/cluster?provider=vmware&name=${CLUSTER_NAME}" \
-H 'Cache-Control: no-cache' \
-H 'Content-Type: application/json' \
-iks
}
main
|
TERMUX_PKG_HOMEPAGE=https://github.com/google/protobuf
TERMUX_PKG_DESCRIPTION="Protocol buffers C++ library"
TERMUX_PKG_LICENSE="BSD 3-Clause"
TERMUX_PKG_VERSION=3.11.4
TERMUX_PKG_SRCURL=https://github.com/google/protobuf/archive/v${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=a79d19dcdf9139fa4b81206e318e33d245c4c9da1ffed21c87288ed4380426f9
TERMUX_PKG_DEPENDS="libc++, zlib"
TERMUX_PKG_BREAKS="libprotobuf-dev"
TERMUX_PKG_REPLACES="libprotobuf-dev"
TERMUX_PKG_FORCE_CMAKE=true
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-Dprotobuf_BUILD_TESTS=OFF
-DBUILD_SHARED_LIBS=ON
"
termux_step_pre_configure() {
TERMUX_PKG_SRCDIR+="/cmake/"
}
|
#!/bin/bash
SCRIPT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
ENVPATH=$(echo $SCRIPT_PATH"/.direnv/bin/activate")
CLIENT_PATH=$(echo $SCRIPT_PATH"/app/client")
if [[ ! -f $ENVPATH ]]
then
echo "Creating enviroment"
python -m venv $SCRIPT_PATH"/.direnv"
fi
echo "Activating enviroment"
source $ENVPATH
echo "Loading: $CLIENT_PATH"
cd $CLIENT_PATH
python run.py |
<gh_stars>0
package ru.job4j.analysis;
import java.util.HashMap;
import java.util.List;
/**
* Analysis - class for analysis collections modifications.
* @author <NAME> (<EMAIL>)
* @version $Id$
* @since 0.1
*/
public class Analysis {
/**
* The method analysis list modifications from the previous to the current condition.
* @param previous - previous list.
* @param current - current list.
* @return - Info object with the information about changes
* (a number of the added, changed and deleted elements).
*/
public Info diff(List<User> previous, List<User> current) {
Info info = new Info(0, 0, 0);
HashMap<Integer, User> map = new HashMap<>();
for (User u : current) {
map.put(u.id, u);
}
for (User u : previous) {
User temp = map.get(u.id);
if (temp == null) {
info.deleted++;
} else if (!temp.name.equals(u.name)) {
info.changed++;
}
}
info.added = current.size() - (previous.size() - info.deleted);
return info;
}
/**
* User object.
* id - unique user identifier
*/
public static class User {
int id;
String name;
public User(int id, String name) {
this.id = id;
this.name = name;
}
}
/**
* Info class.
* Info contains information about a number of the:
* added elements,
* changed elements.
* deleted elements.
*/
public static class Info {
int added;
int changed;
int deleted;
public Info(int added, int changed, int deleted) {
this.added = added;
this.changed = changed;
this.deleted = deleted;
}
@Override
public boolean equals(Object o) {
boolean result = false;
if (this == o) {
result = true;
} else {
if (!(o == null || getClass() != o.getClass())) {
Info info = (Info) o;
if (added == info.added && changed == info.changed) {
result = (deleted == info.deleted);
}
}
}
return result;
}
@Override
public int hashCode() {
int result = added;
result = 31 * result + changed;
result = 31 * result + deleted;
return result;
}
}
} |
#!/usr/bin/env bash
if [[ "$#" -lt 1 || "$1" = "--help" ]]; then
echo "Syntax: gentpl.sh <number of services>"
echo ""
exit
fi
NB_SERVICES="$1"
NAMESPACE="default"
LAST_ARG=""
for arg in "$@"
do
if [[ "$LAST_ARG" = "-n" ]]; then
NAMESPACE="$arg"
LAST_ARG=""
else
LAST_ARG="$arg"
fi
done
kubectl label namespace $NAMESPACE istio-injection=enabled
for (( c=0; c<$NB_SERVICES; c++ ))
do
next=$(($c+1))
if [[ $next -eq $NB_SERVICES ]]; then
next=0
fi
cat "./deployment-tpl.yaml" \
| sed -e "s:this-service:service-$c:g" \
| sed -e "s:target-service:service-$next:g" \
| sed -e "s:this-namespace:$NAMESPACE:g"
echo "---"
done
|
<gh_stars>0
import { $, ElementFinder } from 'protractor';
export class ProductAddedModalPage {
private modal: ElementFinder;
constructor () {
this.modal = $('[style*="display: block;"] .button-container > a');
}
public async open(): Promise<void> {
await this.modal.click();
}
}
|
require('mocha-sinon')()
const assert = require('assert')
const { resolve } = require('path')
const Connector = require('../src/connector')
const { testProjectPath } = require('../../../test/support/paths')
const testAdapterPath = resolve(__dirname, 'fixtures', 'test-adapter')
const noopAdapterPath = resolve(__dirname, 'fixtures', 'noop-adapter')
const componentsPath = resolve(testProjectPath, 'src', 'components')
const testFilePath = resolve(componentsPath, 'form', 'form.test')
const testAdapterOptions = { basedir: componentsPath }
const TestAdapter = require(testAdapterPath)
const stateWithModule = module => ({
config: {
source: {
components: componentsPath
},
adapters: {
test: {
module,
options: testAdapterOptions
}
}
}
})
const state = stateWithModule(testAdapterPath)
const stateNoop = stateWithModule(noopAdapterPath)
describe('Connector', () => {
afterEach(function () {
this.sinon.restore()
})
it('should throw error if the adapter cannot be resolved', async () => {
const stateWithNonExistingAdapter = stateWithModule('doesnotexist')
try {
await Connector.setup(stateWithNonExistingAdapter)
} catch (error) {
assert(error)
}
})
describe('#setup', () => {
it('should call the adapters registerComponentFile function', async function () {
this.sinon.stub(TestAdapter, 'registerComponentFile')
await Connector.setup(state)
assert(TestAdapter.registerComponentFile.calledOnce)
assert(TestAdapter.registerComponentFile.calledWith(testAdapterOptions, testFilePath))
})
it('should be no op if there are no adapters', async function () {
this.sinon.stub(TestAdapter, 'registerComponentFile')
const state = { config: { source: { components: componentsPath }, adapters: { } } }
await Connector.setup(state)
assert(TestAdapter.registerComponentFile.notCalled)
})
})
describe('#registerComponentFile', () => {
it('should call the adapters registerComponentFile function', async function () {
this.sinon.stub(TestAdapter, 'registerComponentFile')
await Connector.registerComponentFile(state, testFilePath)
assert(TestAdapter.registerComponentFile.calledOnce)
assert(TestAdapter.registerComponentFile.calledWith(testAdapterOptions, testFilePath))
})
})
describe('#render', () => {
it('should call the adapter render function with the options, the template id and data', async function () {
this.sinon.stub(TestAdapter, 'render').returns('')
const templatePath = './src/templates/my-template.test'
const data = { myData: 1 }
await Connector.render(state, templatePath, data)
assert(TestAdapter.render.calledOnce)
assert(TestAdapter.render.calledWith(testAdapterOptions, templatePath, data))
})
it('should throw error if the adapter does not implement the render function', async () => {
try {
await Connector.render(stateNoop, './src/templates/my-template.test', {})
} catch (error) {
assert(error)
}
})
it('should throw error if the adapter for the filetype is missing', async () => {
try {
await Connector.render(stateNoop, './src/templates/my-template.unknown', {})
} catch (error) {
assert(error)
}
})
})
describe('#filesForComponent', () => {
it('should call the adapters filesForComponent function', async function () {
this.sinon.stub(TestAdapter, 'filesForComponent')
await Connector.filesForComponent(state, 'test', 'button')
assert(TestAdapter.filesForComponent.calledOnce)
assert(TestAdapter.filesForComponent.calledWith('button'))
})
it('should return an empty array if the adapter does not implement the filesForComponent function', async () => {
const result = await Connector.filesForComponent(stateNoop, 'test', 'button')
assert.equal(result.length, 0)
})
})
describe('#filesForVariant', () => {
it('should call the adapters filesForVariant function', async function () {
this.sinon.stub(TestAdapter, 'filesForVariant')
await Connector.filesForVariant(state, 'test', 'button', 'primary')
assert(TestAdapter.filesForVariant.calledOnce)
assert(TestAdapter.filesForVariant.calledWith('button', 'primary'))
})
it('should return an empty array if the adapter does not implement the filesForVariant function', async () => {
const result = await Connector.filesForVariant(stateNoop, 'test', 'button', 'primary')
assert.equal(result.length, 0)
})
})
})
|
import Cookies from 'js-cookie';
export default function({ store, redirect, route, $axios }) {
$axios.onRequest(config => {
store.commit('common/loading', true);
const token = Cookies.get('DEMOAPP-XSRF-TOKEN');
if (token) {
config.headers.common['DEMOAPP-XSRF-TOKEN'] = token;
}
});
$axios.onResponse(response => {
store.commit('common/loading', false);
});
$axios.onError(error => {
store.commit('common/loading', false);
store.commit('common/transaction', false);
if (error.response) {
if (error.response.status === 401) {
const message = error.response.data || error.response.data.message;
store.commit('common/error', {
message,
status: error.response.status,
code: 401
});
if (route.path !== '/login' && route.path !== '/login') {
redirect('/login');
}
}
}
});
}
|
<reponame>TCC-Aquaponia/sistema-aguaponia
const { Broker, Investment } = require('../../src/models').models;
describe('Broker', () => {
describe('attributes', () => {
it('should have name', async () => {
const broker = await Broker.create({ name: 'Foo' });
expect(broker.get('name')).toEqual('Foo');
});
});
describe('validations', () => {
it('should validate name', async () => {
const broker = Broker.build();
await expect(broker.validate()).rejects.toThrow();
});
});
describe('relations', () => {
it('should have many investments', async () => {
const broker = await Broker.create(
{
name: 'Foo',
Investments: [{ name: 'A' }, { name: 'B' }],
},
{ include: [Investment] }
);
expect(broker.get('Investments')).toHaveLength(2);
});
});
});
|
// import { ethers } from "ethers";
export interface MetaMaskProvider extends Object {
isMetaMask: boolean;
isConnected: () => boolean;
request: (request: {
method: string;
params?: any[] | undefined;
}) => Promise<any>;
on: (event: string, callback: (param: any) => void) => void;
}
|
#include <iostream>
// Assume MeshId is a typedef or class representing the identifier of a 3D mesh
typedef int MeshId;
class Instance {
private:
bool is_static;
MeshId mesh_id;
public:
Instance(MeshId id, bool isStatic) : mesh_id(id), is_static(isStatic) {}
bool isStatic() const {
return is_static;
}
void setStatic(bool b) {
is_static = b;
}
MeshId getMeshId() const {
return mesh_id;
}
};
int main() {
// Example usage
MeshId mesh1 = 1;
Instance instance1(mesh1, true);
std::cout << "Instance 1 is static: " << instance1.isStatic() << std::endl;
instance1.setStatic(false);
std::cout << "Instance 1 is static: " << instance1.isStatic() << std::endl;
MeshId mesh2 = 2;
Instance instance2(mesh2, false);
std::cout << "Instance 2 mesh ID: " << instance2.getMeshId() << std::endl;
return 0;
} |
<reponame>krrrr38/mackerel-client-scala
package com.krrrr38.mackerel4s
package api
import dispatch._
import com.krrrr38.mackerel4s.model.Types.{ ApiKey, Path }
trait MackerelClientBase {
val setting: ClientSetting
val apiKey: ApiKey
val userAgent: String
val baseRequest: Req =
Req(_
.addHeader(setting.AUTH_HEADER_KEY, apiKey)
.addHeader(setting.USER_AGENT_KEY, userAgent)
)
.setContentType("application/json", "UTF-8")
val client = (path: Path) =>
baseRequest.setUrl(setting.BASE_URL + path)
}
|
<gh_stars>100-1000
package com.xyoye.common_component.weight.swipe_menu;
/**
* Created by guanaj on 2017/6/6.
*
* Modified by xyoye on 2020/6/24.
*/
public enum SwipeState {
SWIPE_LEFT,
SWIPE_RIGHT,
SWIPE_TOP,
SWIPE_BOTTOM,
SWIPE_CLOSE,
}
|
package io.github.vampirestudios.obsidian.addon_modules;
import io.github.vampirestudios.obsidian.Obsidian;
import io.github.vampirestudios.obsidian.api.obsidian.AddonModule;
import io.github.vampirestudios.obsidian.configPack.ObsidianAddon;
import io.github.vampirestudios.obsidian.minecraft.obsidian.*;
import io.github.vampirestudios.obsidian.utils.ModIdAndAddonPath;
import io.github.vampirestudios.obsidian.utils.RegistryUtils;
import net.minecraft.item.Item;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import static io.github.vampirestudios.obsidian.configPack.ObsidianAddonLoader.*;
public class Tools implements AddonModule {
@Override
public void init(ObsidianAddon addon, File file, ModIdAndAddonPath id) throws FileNotFoundException {
io.github.vampirestudios.obsidian.api.obsidian.item.ToolItem tool = Obsidian.GSON.fromJson(new FileReader(file), io.github.vampirestudios.obsidian.api.obsidian.item.ToolItem.class);
try {
if (tool == null) return;
CustomToolMaterial material = new CustomToolMaterial(tool.material);
Item.Settings settings = new Item.Settings().group(tool.information.getItemGroup())
.maxCount(tool.information.max_count).rarity(tool.information.getRarity());
switch (tool.toolType) {
case "pickaxe" -> RegistryUtils.registerItem(new PickaxeItemImpl(tool, material, tool.attackDamage, tool.attackSpeed, settings),
tool.information.name.id);
case "shovel" -> RegistryUtils.registerItem(new ShovelItemImpl(tool, material, tool.attackDamage, tool.attackSpeed, settings),
tool.information.name.id);
case "hoe" -> RegistryUtils.registerItem(new HoeItemImpl(tool, material, tool.attackDamage, tool.attackSpeed, settings),
tool.information.name.id);
case "axe" -> RegistryUtils.registerItem(new AxeItemImpl(tool, material, tool.attackDamage, tool.attackSpeed, settings),
tool.information.name.id);
}
register(TOOLS, "tool", tool.information.name.id, tool);
} catch (Exception e) {
failedRegistering("tool", tool.information.name.id.toString(), e);
}
}
@Override
public String getType() {
return "items/tools";
}
}
|
<reponame>yugasun/serveless-egg-ssr-boilerplate<gh_stars>1-10
'use strict';
// https://yuque.com/easy-team/egg-react
module.exports = {
devtool: 'source-map',
entry: {
blog: 'app/web/page/blog/index.js',
blogx: 'app/web/page/blog/index.jsx',
list: 'app/web/page/list/index.jsx',
detail: 'app/web/page/detail/index.jsx',
intro: 'app/web/page/intro/index.jsx',
async: 'app/web/page/example/async/index.jsx',
'example/stateless': 'app/web/page/example/stateless.js',
'example/node': 'app/web/page/example/node.jsx',
'example/data': 'app/web/page/example/data.jsx'
},
plugins:[
{ imagemini: false },
{
copy: [{ from: 'app/web/asset/lib', to: 'lib' }] // ็ดๆฅ Script ๆนๅผๅผๅ
ฅ React ๅ
}
]
// customize(webpackConfig) {
// // Node Render ๆถไธ่ฝ externals script lib
// if (webpackConfig.target === 'web') {
// webpackConfig.externals.push({
// react: 'ReactCoreLib.React',
// 'react-dom': 'ReactCoreLib.ReactDOM'
// });
// }
// return webpackConfig;
// }
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.