text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
# Testbed for NSD.
# By Wouter Wijngaards, NLnet Labs, 2006.
# BSD License.
# this version prefers gmake if available.
# adds variable LDNS for the LDNS path to use.
# global settings
CONFIGURE_FLAGS=""
REPORT_FILE=testdata/testbed.report
LOG_FILE=testdata/testbed.log
HOST_FILE=testdata/host_file.$USER
if test ! -f $HOST_FILE; then
echo "No such file: $HOST_FILE"
exit 1
fi
function echossh() # like ssh but echos.
{
echo "> ssh $*"
ssh $*
}
# Compile and run NSD on platforms
function dotest()
# parameters: <host> <dir>
# host is name of ssh host
# dir is directory of nsd trunk on host
{
echo "$1 begin on "`date` | tee -a $REPORT_FILE
DISABLE=""
if test $IP6 = no; then
DISABLE="--disable-ipv6"
fi
if test x$LDNS != x; then
DISABLE="--with-ldns=$LDNS $DISABLE"
fi
if test x$LIBEVENT != x; then
DISABLE="--with-libevent=$LIBEVENT $DISABLE"
fi
cat >makeconf.mak.$$ << EOF
#configure: configure.ac
# $AC_CMD
# touch configure
Makefile: Makefile.in #configure
./configure $CONFIGURE_FLAGS $DISABLE
touch Makefile
EOF
scp makeconf.mak.$$ $1:$2
# determine make to use
tempx=`ssh $1 "cd $2; which gmake"`
MAKE_CMD=`ssh $1 "cd $2; if test -f '$tempx'; then echo $tempx; else echo $MAKE_CMD; fi"`
if test $SVN = yes; then
echossh $1 "cd $2; svn up"
echossh $1 "cd $2; $MAKE_CMD -f makeconf.mak.$$ configure"
else
# svn and autoconf locally
echo "fake svn via svnexport, tar, autoconf, bison, flex."
svn export svn+ssh://open.nlnetlabs.nl/svn/nsd/trunk unbound_ttt
(cd unbound_ttt; $AC_CMD; rm -r autom4te* .c-mode-rc.el .cvsignore)
if test $FIXCONFIGURE = yes; then
echo fixing up configure length test.
(cd unbound_ttt; mv configure oldconf; sed -e 's?while (test "X"?lt_cv_sys_max_cmd_len=65500; echo skip || while (test "X"?' <oldconf >configure; chmod +x ./configure)
fi
du unbound_ttt
rsync -vrcpz --rsync-path=/home/wouter/bin/rsync unbound_ttt $1:unbound_ttt
# tar czf unbound_ttt.tgz unbound_ttt
rm -rf unbound_ttt
# ls -al unbound_ttt.tgz
# scp unbound_ttt.tgz $1:unbound_ttt.tar.gz
# rm unbound_ttt.tgz
# echossh $1 "gtar xzf unbound_ttt.tar.gz && rm unbound_ttt.tar.gz"
fi
echossh $1 "cd $2; $MAKE_CMD -f makeconf.mak.$$ Makefile"
echossh $1 "cd $2; $MAKE_CMD all tests"
echossh $1 "cd $2; $MAKE_CMD doc"
if test $RUN_TEST = yes; then
echossh $1 "cd $2; bash testcode/do-tests.sh"
echossh $1 "cd $2/testdata; sh ../testcode/mini_tpkg.sh -q report" | tee -a $REPORT_FILE
fi
echossh $1 "cd $2; rm -f makeconf.mak.$$"
rm -f makeconf.mak.$$
echo "$1 end on "`date` | tee -a $REPORT_FILE
}
echo "on "`date`" by $USER." > $REPORT_FILE
echo "on "`date`" by $USER." > $LOG_FILE
# read host names
declare -a hostname desc dir vars
IFS=' '
i=0
while read a b c d; do
if echo $a | grep "^#" >/dev/null; then
continue # skip it
fi
# append after arrays
hostname[$i]=$a
desc[$i]=$b
dir[$i]=$c
vars[$i]=$d
i=$(($i+1))
done <$HOST_FILE
echo "testing on $i hosts"
# do the test
for((i=0; i<${#hostname[*]}; i=$i+1)); do
if echo ${hostname[$i]} | grep "^#" >/dev/null; then
continue # skip it
fi
# echo "hostname=[${hostname[$i]}]"
# echo "desc=[${desc[$i]}]"
# echo "dir=[${dir[$i]}]"
# echo "vars=[${vars[$i]}]"
AC_CMD="libtoolize -c --force; autoconf && autoheader"
MAKE_CMD="make"
SVN=yes
IP6=yes
FIXCONFIGURE=no
RUN_TEST=yes
LDNS=
LIBEVENT=
eval ${vars[$i]}
echo "*** ${hostname[$i]} ${desc[$i]} ***" | tee -a $LOG_FILE | tee -a $REPORT_FILE
dotest ${hostname[$i]} ${dir[$i]} 2>&1 | tee -a $LOG_FILE
done
echo "done"
|
<filename>src/ch13/ex133.java<gh_stars>0
package ch13;
import java.io.*;
import static java.lang.System.*;
/**
* Project: ch13
* Date: 2/26/2018
*
* @author <NAME>
*/
public final class ex133
{
private final int EOF = -1; // End of file char code
private final int LINE_FEED = 10; // Line feed char code
private String filename,
nameInput, // Holds the employee's name
numInput; // Holds the employee's number
private int keyEntered; // User's keystrokes
// To disable no-arg constructor
private ex133() {}
/**
* This method starts the process by asking for filename, it handles
* the exceptions and the loop for input.
*/
public void start()
{
try (BufferedReader console =
new BufferedReader(new InputStreamReader(in)))
{
out.print("\nEnter output filename: ");
filename = console.readLine();
out.println("\n======== INPUT =========\n");
try (DataOutputStream file =
new DataOutputStream(new FileOutputStream(filename)))
{
do {
getNameInput(console);
getNumInput(console);
writeToFile(file);
out.println();
}
while (keyEntered != EOF);
}
readFromFile();
}
catch (IOException e)
{
err.println("Error: " + e.getMessage());
exit(1);
}
}
/**
* This method will record keystroke inputs, and only stage alphabetic
* characters for file writing. (Since name doesn't have numeric characters)
* @param console Buffered stream attach to "standard" input stream.
* @throws IOException If an I/O error occurs.
*/
private void getNameInput(BufferedReader console) throws IOException
{
nameInput = "";
out.print("Enter employee name: ");
while ((keyEntered = console.read()) != LINE_FEED)
{
if (keyEntered == EOF)
break;
if (!Character.isDigit((char) keyEntered))
nameInput = nameInput.concat(String.valueOf((char) keyEntered));
}
}
/**
* This method will record keystroke inputs, and only stage numeric
* characters for file writing. If empty the default value is (0) zero.
* @param console Buffered stream attach to "standard" input stream.
* @throws IOException If an I/O error occurs.
*/
private void getNumInput(BufferedReader console) throws IOException
{
if (keyEntered == EOF)
{
numInput = "0";
return;
}
numInput = "";
out.print("Enter employee number: ");
while ((keyEntered = console.read()) != LINE_FEED)
{
if (keyEntered == EOF)
break;
if (Character.isDigit((char) keyEntered))
numInput = numInput.concat(String.valueOf((char) keyEntered));
}
if (numInput.isEmpty())
numInput = "0";
}
/**
* This method write primitive Java data types to an output stream
* in a portable way.
* @param file The underlying output stream, to be saved for later use.
* @throws IOException If an I/O error occurs.
*/
private void writeToFile(DataOutputStream file) throws IOException
{
if (!numInput.equals("0") || !nameInput.isEmpty())
{
file.writeUTF(nameInput.trim());
file.writeInt(Integer.valueOf(numInput));
file.flush();
}
}
/**
* This method read primitive Java data types from an underlying input
* stream in a machine-independent way.
* @throws IOException If an I/O error occurs.
*/
private void readFromFile() throws IOException
{
out.println("\n======== OUTPUT ========\n");
try (DataInputStream file =
new DataInputStream(new FileInputStream(filename)))
{
while(true)
{
try
{
out.printf("%-15s %8d\n", file.readUTF(), file.readInt());
}
catch (EOFException e)
{
out.println();
break;
}
}
}
}
public static void main(String[] args)
{
new ex133().start();
}
} |
#!/bin/sh
#
# Copyright 2016 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
DIRNAME=`dirname $0`
RUNHOME=`cd $DIRNAME/; pwd`
echo @RUNHOME@ $RUNHOME
if [ -f "$RUNHOME/setenv.sh" ]; then
. "$RUNHOME/setenv.sh"
else
echo "can not found $RUNHOME/setenv.sh"
fi
echo ================== ENV_INFO =============================================
echo @RUNHOME@ $RUNHOME
echo @JAVA_BASE@ $JAVA_BASE
echo @Main_Class@ $Main_Class
echo @APP_INFO@ $APP_INFO
echo @Main_JAR@ $Main_JAR
echo ==========================================================================
echo start $APP_INFO ...
JAVA="$JAVA_HOME/bin/java"
JAVA_OPTS="-Xms50m -Xmx128m"
port=8787
JAVA_OPTS="$JAVA_OPTS -Xdebug -Xnoagent -Djava.compiler=NONE -Xrunjdwp:transport=dt_socket,address=$port,server=y,suspend=n"
export CATALINA_BASE=$RUNHOME
repo_dir=winery-repository
repo_dir_init=winery-repository-init
echo ================== RUN_INFO =============================================
echo @JAVA_HOME@ $JAVA_HOME
echo @JAVA@ $JAVA
echo @JAVA_OPTS@ $JAVA_OPTS
echo @httpPort@ $httpPort
echo @CATALINA_BASE@ $CATALINA_BASE
echo @repo_dir@ $repo_dir
echo @repo_dir_init@ $repo_dir_init
echo ==========================================================================
cd $RUNHOME
if [ ! -d "$repo_dir" ]; then
echo there is no $repo_dir in $RUNHOME
mkdir "$repo_dir"
fi
if [ -d "$repo_dir_init" ]; then
echo found repository init directory $repo_dir_init in $RUNHOME,copy it to $repo_dir;
\cp -rf $repo_dir_init/* $repo_dir
fi
echo @JAVA@ "$JAVA" $JAVA_OPTS -jar $RUNHOME/$Main_JAR
echo @JAVA_CMD@
"$JAVA" $JAVA_OPTS -jar $RUNHOME/$Main_JAR -httpPort $httpPort
|
#!/bin/bash
set -e
fqdn=$1
admin_user=$2
password=$3
resource_group=$4
cyclecloud_storage_key=$5
# Installing CycleCloud CLI
echo "Getting CLI binaries..."
wget --no-check-certificate https://$fqdn/download/tools/cyclecloud-cli.zip
unzip -o cyclecloud-cli.zip
pushd cyclecloud-cli-installer/
echo "Installing CLI..."
./install.sh -y
echo "Initializing CLI..."
name=$(echo $fqdn | cut -d'.' -f1)
echo $name
~/bin/cyclecloud initialize --force --batch \
--name $name \
--url=https://$fqdn \
--verify-ssl=false \
--username=$admin_user \
--password="${password}"
~/bin/cyclecloud config list
popd
rm cyclecloud-cli.zip
rm -rf cyclecloud-cli-installer
# Setup POGO
cyclecloud_storage_account=$(~/bin/cyclecloud locker list | sed -e 's|^[^/]*//||' -e 's|/.*$||')
pogo_config_file=$HOME/.cycle/pogo.ini
touch $pogo_config_file
if ! grep -q "${cyclecloud_storage_account}-storage" $pogo_config_file; then
echo "Creating ~/.cycle/pogo.ini"
cat <<EOF >> $pogo_config_file
[pogo ${cyclecloud_storage_account}-storage]
type=az
matches=az://$cyclecloud_storage_account/cyclecloud
access_key=$cyclecloud_storage_key
EOF
echo "pogo.ini file created"
fi
|
var express = require('express');
var router = express.Router();
var dbconnection = require("../database");
router.get("/data/test/connection",function(req, res, next){
dbconnection.destroy();
res.json({result:"complete"});
});
router.post("/data/test/query",function(req, res, next){
var query = req.body.query;
dbconnection.query(query, function(err, rows, fields){
if(!err){
res.json({result:true, rows:rows});
}else{
res.json({result:false,error: err});
}
});
})
/* GET home page. */
router.get('/', function(req, res, next) {
if(req.session.hosp_info){
res.redirect('/page/main');
}else{
res.render('index.html');
}
});
router.get('/page/main',function(req, res, next){
if(!req.session.hosp_info){
res.redirect('/');
}else{
res.render('main.html');
}
});
router.get('/page/diag/:resv_id',function(req, res, next){
var resv_id = req.params.resv_id;
if(!req.session.hosp_info){
res.redirect('/');
}else{
res.render('diagnosis.ejs',{resv_id:resv_id});
}
});
module.exports = router;
|
<reponame>theleoji/lumos
import React from "react"
import { graphql } from "gatsby"
import { MDXRenderer } from "gatsby-plugin-mdx"
import Layout from "../components/layout"
import SEO from "../components/seo"
class PageTemplate extends React.Component {
render() {
const page = this.props.data.mdx
return (
<Layout location={this.props.location}>
<SEO
title={page.frontmatter.title}
description={page.frontmatter.description || page.excerpt}
/>
{this.props.location.pathname === "/" ? null : (
<h1>{page.frontmatter.title}</h1>
)}
<MDXRenderer>{page.body}</MDXRenderer>
</Layout>
)
}
}
export default PageTemplate
export const pageQuery = graphql`
query PageBySlug($slug: String!) {
mdx(fields: { slug: { eq: $slug } }) {
id
excerpt(pruneLength: 160)
body
frontmatter {
title
description
}
}
}
`
|
<reponame>FXTD-ODYSSEY/TimmyPKM
// CodeMirror, copyright (c) by <NAME> and others
// Distributed under an MIT license: https://codemirror.net/LICENSE
! function (t) {
"object" == typeof exports && "object" == typeof module ? t(require("../../lib/codemirror")) : "function" == typeof define && define.amd ? define(["../../lib/codemirror"], t) : t(CodeMirror)
}(function (W) {
"use strict";
var I = "CodeMirror-hint-active";
function o(t, i) {
this.cm = t, this.options = i, this.widget = null, this.debounce = 0, this.tick = 0, this.startPos = this.cm.getCursor("start"), this.startLen = this.cm.getLine(this.startPos.line).length - this.cm.getSelection().length;
var e = this;
t.on("cursorActivity", this.activityFunc = function () {
e.cursorActivity()
})
}
W.showHint = function (t, i, e) {
if (!i) return t.showHint(e);
e && e.async && (i.async = !0);
var n = {
hint: i
};
if (e)
for (var o in e) n[o] = e[o];
return t.showHint(n)
}, W.defineExtension("showHint", function (t) {
t = function (t, i, e) {
var n = t.options.hintOptions,
o = {};
for (var s in l) o[s] = l[s];
if (n)
for (var s in n) void 0 !== n[s] && (o[s] = n[s]);
if (e)
for (var s in e) void 0 !== e[s] && (o[s] = e[s]);
o.hint.resolve && (o.hint = o.hint.resolve(t, i));
return o
}(this, this.getCursor("start"), t);
var i = this.listSelections();
if (!(1 < i.length)) {
if (this.somethingSelected()) {
if (!t.hint.supportsSelection) return;
for (var e = 0; e < i.length; e++)
if (i[e].head.line != i[e].anchor.line) return
}
this.state.completionActive && this.state.completionActive.close();
var n = this.state.completionActive = new o(this, t);
n.options.hint && (W.signal(this, "startCompletion", this), n.update(!0))
}
}), W.defineExtension("closeHint", function () {
this.state.completionActive && this.state.completionActive.close()
});
var s = window.requestAnimationFrame || function (t) {
return setTimeout(t, 1e3 / 60)
},
c = window.cancelAnimationFrame || clearTimeout;
function B(t) {
return "string" == typeof t ? t : t.text
}
function K(t, i) {
for (; i && i != t;) {
if ("LI" === i.nodeName.toUpperCase() && i.parentNode == t) return i;
i = i.parentNode
}
}
function n(o, t) {
this.completion = o, this.data = t, this.picked = !1;
var e = this,
s = o.cm,
c = s.getInputField().ownerDocument,
r = c.defaultView || c.parentWindow,
l = this.hints = c.createElement("ul"),
i = o.cm.options.theme;
l.className = "CodeMirror-hints " + i, this.selectedHint = t.selectedHint || 0;
for (var n = t.list, h = 0; h < n.length; ++h) {
var a = l.appendChild(c.createElement("li")),
u = n[h],
f = "CodeMirror-hint" + (h != this.selectedHint ? "" : " " + I);
null != u.className && (f = u.className + " " + f), a.className = f, u.render ? u.render(a, t, u) : a.appendChild(c.createTextNode(u.displayText || B(u))), a.hintId = h
}
var p, d, m, g = o.options.container || c.body,
v = s.cursorCoords(o.options.alignWithWord ? t.from : null),
y = v.left,
w = v.bottom,
H = !0,
C = 0,
b = 0;
g !== c.body && (d = (p = -1 !== ["absolute", "relative", "fixed"].indexOf(r.getComputedStyle(g).position) ? g : g.offsetParent).getBoundingClientRect(), m = c.body.getBoundingClientRect(), C = d.left - m.left - p.scrollLeft, b = d.top - m.top - p.scrollTop), l.style.left = y - C + "px", l.style.top = w - b + "px";
var k = r.innerWidth || Math.max(c.body.offsetWidth, c.documentElement.offsetWidth),
A = r.innerHeight || Math.max(c.body.offsetHeight, c.documentElement.offsetHeight);
g.appendChild(l);
var x, T, S = l.getBoundingClientRect(),
M = S.bottom - A,
N = l.scrollHeight > l.clientHeight + 1,
F = s.getScrollInfo();
0 < M && (x = S.bottom - S.top, 0 < v.top - (v.bottom - S.top) - x ? (l.style.top = (w = v.top - x - b) + "px", H = !1) : A < x && (l.style.height = A - 5 + "px", l.style.top = (w = v.bottom - S.top - b) + "px", T = s.getCursor(), t.from.ch != T.ch && (v = s.cursorCoords(T), l.style.left = (y = v.left - C) + "px", S = l.getBoundingClientRect())));
var E, O = S.right - k;
if (0 < O && (S.right - S.left > k && (l.style.width = k - 5 + "px", O -= S.right - S.left - k), l.style.left = (y = v.left - O - C) + "px"), N)
for (var P = l.firstChild; P; P = P.nextSibling) P.style.paddingRight = s.display.nativeBarWidth + "px";
return s.addKeyMap(this.keyMap = function (t, n) {
var o = {
Up: function () {
n.moveFocus(-1)
},
Down: function () {
n.moveFocus(1)
},
PageUp: function () {
n.moveFocus(1 - n.menuSize(), !0)
},
PageDown: function () {
n.moveFocus(n.menuSize() - 1, !0)
},
Home: function () {
n.setFocus(0)
},
End: function () {
n.setFocus(n.length - 1)
},
Enter: n.pick,
Tab: n.pick,
Esc: n.close
};
/Mac/.test(navigator.platform) && (o["Ctrl-P"] = function () {
n.moveFocus(-1)
}, o["Ctrl-N"] = function () {
n.moveFocus(1)
});
var i = t.options.customKeys,
s = i ? {} : o;
function e(t, i) {
var e = "string" != typeof i ? function (t) {
return i(t, n)
} : o.hasOwnProperty(i) ? o[i] : i;
s[t] = e
}
if (i)
for (var c in i) i.hasOwnProperty(c) && e(c, i[c]);
var r = t.options.extraKeys;
if (r)
for (var c in r) r.hasOwnProperty(c) && e(c, r[c]);
return s
}(o, {
moveFocus: function (t, i) {
e.changeActive(e.selectedHint + t, i)
},
setFocus: function (t) {
e.changeActive(t)
},
menuSize: function () {
return e.screenAmount()
},
length: n.length,
close: function () {
o.close()
},
pick: function () {
e.pick()
},
data: t
})), o.options.closeOnUnfocus && (s.on("blur", this.onBlur = function () {
E = setTimeout(function () {
o.close()
}, 100)
}), s.on("focus", this.onFocus = function () {
clearTimeout(E)
})), s.on("scroll", this.onScroll = function () {
var t = s.getScrollInfo(),
i = s.getWrapperElement().getBoundingClientRect(),
e = w + F.top - t.top,
n = e - (r.pageYOffset || (c.documentElement || c.body).scrollTop);
if (H || (n += l.offsetHeight), n <= i.top || n >= i.bottom) return o.close();
l.style.top = e + "px", l.style.left = y + F.left - t.left + "px"
}), W.on(l, "dblclick", function (t) {
var i = K(l, t.target || t.srcElement);
i && null != i.hintId && (e.changeActive(i.hintId), e.pick())
}), W.on(l, "click", function (t) {
var i = K(l, t.target || t.srcElement);
i && null != i.hintId && (e.changeActive(i.hintId), o.options.completeOnSingleClick && e.pick())
}), W.on(l, "mousedown", function () {
setTimeout(function () {
s.focus()
}, 20)
}), this.scrollToActive(), W.signal(t, "select", n[this.selectedHint], l.childNodes[this.selectedHint]), !0
}
function r(t, i, e, n) {
var o;
t.async ? t(i, n, e) : (o = t(i, e)) && o.then ? o.then(n) : n(o)
}
o.prototype = {
close: function () {
this.active() && (this.cm.state.completionActive = null, this.tick = null, this.cm.off("cursorActivity", this.activityFunc), this.widget && this.data && W.signal(this.data, "close"), this.widget && this.widget.close(), W.signal(this.cm, "endCompletion", this.cm))
},
active: function () {
return this.cm.state.completionActive == this
},
pick: function (t, i) {
var e = t.list[i],
n = this;
this.cm.operation(function () {
e.hint ? e.hint(n.cm, t, e) : n.cm.replaceRange(B(e), e.from || t.from, e.to || t.to, "complete"), W.signal(t, "pick", e), n.cm.scrollIntoView()
}), this.close()
},
cursorActivity: function () {
this.debounce && (c(this.debounce), this.debounce = 0);
var t = this.startPos;
this.data && (t = this.data.from);
var i, e = this.cm.getCursor(),
n = this.cm.getLine(e.line);
e.line != this.startPos.line || n.length - e.ch != this.startLen - this.startPos.ch || e.ch < t.ch || this.cm.somethingSelected() || !e.ch || this.options.closeCharacters.test(n.charAt(e.ch - 1)) ? this.close() : ((i = this).debounce = s(function () {
i.update()
}), this.widget && this.widget.disable())
},
update: function (i) {
var e, n;
null != this.tick && (n = ++(e = this).tick, r(this.options.hint, this.cm, this.options, function (t) {
e.tick == n && e.finishUpdate(t, i)
}))
},
finishUpdate: function (t, i) {
this.data && W.signal(this.data, "update");
var e = this.widget && this.widget.picked || i && this.options.completeSingle;
this.widget && this.widget.close(), (this.data = t) && t.list.length && (e && 1 == t.list.length ? this.pick(t, 0) : (this.widget = new n(this, t), W.signal(t, "shown")))
}
}, n.prototype = {
close: function () {
var t;
this.completion.widget == this && (this.completion.widget = null, this.hints.parentNode.removeChild(this.hints), this.completion.cm.removeKeyMap(this.keyMap), t = this.completion.cm, this.completion.options.closeOnUnfocus && (t.off("blur", this.onBlur), t.off("focus", this.onFocus)), t.off("scroll", this.onScroll))
},
disable: function () {
this.completion.cm.removeKeyMap(this.keyMap);
var t = this;
this.keyMap = {
Enter: function () {
t.picked = !0
}
}, this.completion.cm.addKeyMap(this.keyMap)
},
pick: function () {
this.completion.pick(this.data, this.selectedHint)
},
changeActive: function (t, i) {
var e;
t >= this.data.list.length ? t = i ? this.data.list.length - 1 : 0 : t < 0 && (t = i ? 0 : this.data.list.length - 1), this.selectedHint != t && ((e = this.hints.childNodes[this.selectedHint]) && (e.className = e.className.replace(" " + I, "")), (e = this.hints.childNodes[this.selectedHint = t]).className += " " + I, this.scrollToActive(), W.signal(this.data, "select", this.data.list[this.selectedHint], e))
},
scrollToActive: function () {
var t = this.completion.options.scrollMargin || 0,
i = this.hints.childNodes[Math.max(0, this.selectedHint - t)],
e = this.hints.childNodes[Math.min(this.data.list.length - 1, this.selectedHint + t)],
n = this.hints.firstChild;
i.offsetTop < this.hints.scrollTop ? this.hints.scrollTop = i.offsetTop - n.offsetTop : e.offsetTop + e.offsetHeight > this.hints.scrollTop + this.hints.clientHeight && (this.hints.scrollTop = e.offsetTop + e.offsetHeight - this.hints.clientHeight + n.offsetTop)
},
screenAmount: function () {
return Math.floor(this.hints.clientHeight / this.hints.firstChild.offsetHeight) || 1
}
}, W.registerHelper("hint", "auto", {
resolve: function (t, i) {
var e, c = t.getHelpers(i, "hint");
if (c.length) {
var n = function (t, n, o) {
var s = function (t, i) {
if (!t.somethingSelected()) return i;
for (var e = [], n = 0; n < i.length; n++) i[n].supportsSelection && e.push(i[n]);
return e
}(t, c);
! function i(e) {
if (e == s.length) return n(null);
r(s[e], t, o, function (t) {
t && 0 < t.list.length ? n(t) : i(e + 1)
})
}(0)
};
return n.async = !0, n.supportsSelection = !0, n
}
return (e = t.getHelper(t.getCursor(), "hintWords")) ? function (t) {
return W.hint.fromList(t, {
words: e
})
} : W.hint.anyword ? function (t, i) {
return W.hint.anyword(t, i)
} : function () {}
}
}), W.registerHelper("hint", "fromList", function (t, i) {
var e, n = t.getCursor(),
o = t.getTokenAt(n),
s = W.Pos(n.line, o.start),
c = n;
o.start < n.ch && /\w/.test(o.string.charAt(n.ch - o.start - 1)) ? e = o.string.substr(0, n.ch - o.start) : (e = "", s = n);
for (var r = [], l = 0; l < i.words.length; l++) {
var h = i.words[l];
h.slice(0, e.length) == e && r.push(h)
}
if (r.length) return {
list: r,
from: s,
to: c
}
}), W.commands.autocomplete = W.showHint;
var l = {
hint: W.hint.auto,
completeSingle: !0,
alignWithWord: !0,
closeCharacters: /[\s()\[\]{};:>,]/,
closeOnUnfocus: !0,
completeOnSingleClick: !0,
container: null,
customKeys: null,
extraKeys: null
};
W.defineOption("hintOptions", null)
}); |
package com.coltsoftware.liquidsledgehammer.subtransactions.strategies.description;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
public final class IncludeExcludeDescriptionStrategyTests {
private IncludeExcludeDescriptionStrategy strat;
@Before
public void setup() {
strat = new IncludeExcludeDescriptionStrategy();
}
@Test
public void default_value() {
assertFalse(strat.matches("a"));
}
@Test
public void simple_includes() {
strat.addInclude("abc");
assertTrue(strat.matches("abc"));
}
@Test
public void simple_includes_false() {
strat.addInclude("abc");
assertFalse(strat.matches("a"));
}
@Test
public void simple_includes_two() {
strat.addInclude("abc");
strat.addInclude("def");
assertTrue(strat.matches("abc"));
}
@Test
public void simple_includes_two_substring() {
strat.addInclude("abc");
strat.addInclude("def");
assertTrue(strat.matches("-abcd"));
}
@Test
public void simple_includes_excludes_substring() {
strat.addInclude("abc");
strat.addInclude("def");
strat.addExclude("-abcd");
assertFalse(strat.matches("-abcd"));
}
}
|
function expand(refName: string): string | null {
const m = refName.match(/^npm:(.+)$/); // Using regex to match the format 'npm:<package-name>'
if (m === null) return null; // If the format doesn't match, return null
const pkg: string = m[1]; // Extract the package name from the matched group
return `https://www.npmjs.com/package/${pkg}`; // Construct and return the URL
} |
import * as application from "tns-core-modules/application"
import { DbResult, RESULT_TYPE } from "./dbresult";
import { Cursor } from "./cursor";
export class Sqlite {
private static self: Sqlite;
public readonly _dbname: string;
private _db: any;
private _isOpen: boolean;
private _isSqlite: boolean = true;
private _options: any;
private _resultType: RESULT_TYPE;
private _valuesType: RESULT_TYPE;
constructor(dbname: string, options?: {}) {
if (Sqlite.self instanceof Sqlite) {
return Sqlite.self;
}
this._dbname = dbname;
this._isOpen = false;
this._resultType = RESULT_TYPE.RESULTSASARRAY;
this._valuesType = RESULT_TYPE.VALUESARENATIVE;
this._options = options || {};
// Check to see if it has a path, or if it is a relative dbname
// dbname = "" - Temporary Database
// dbname = ":memory:" = memory database
if (dbname !== "" && dbname !== ":memory:") {
dbname = Sqlite._getContext().getDatabasePath(dbname).getAbsolutePath();
var path = dbname.substr(0, dbname.lastIndexOf('/') + 1);
// Create "databases" folder if it is missing. This causes issues on Emulators if it is missing
// So we create it if it is missing
try {
var java = java || {};
var javaFile = new java.io.File(path);
if (!javaFile.exists()) {
javaFile.mkdirs();
javaFile.setReadable(true);
javaFile.setWritable(true);
}
}
catch (err) {
return this._error(`Constructor Error: Unable to create folder\r\n${err}`);
}
}
Sqlite.self = this;
return this;
}
_error(message: string): null {
console.error(`Sqlite Error:\r\n${message}`);
return null;
}
openDatabase(): Promise<any> {
return new Promise((resolve, reject) => {
try {
var flags = 0;
if (typeof this._options.androidFlags !== 'undefined') {
flags = this._options.androidFlags;
}
this._db = this._openDatabase(this._dbname, flags);
} catch (error) {
this._error(`Unable to open database\r\n${error}`);
return reject(error);
}
this._isOpen = true;
return resolve(this);
});
}
private _openDatabase(dbname: string, flags: any) {
var android = android || {};
if (dbname === ":memory:") {
return android.database.sqlite.SQLiteDatabase.create(flags);
} else {
return android.database.sqlite.SQLiteDatabase.openDatabase(dbname, null, flags | 0x10000000);
}
};
private _getResultEngine(mode: RESULT_TYPE): Function {
if (mode == null || mode === 0) return DbResult.getResults;
var resultType = (mode & RESULT_TYPE.RESULTSASARRAY | RESULT_TYPE.RESULTSASOBJECT);
if (resultType === 0) {
resultType = this._resultType;
}
var valueType = (mode & RESULT_TYPE.VALUESARENATIVE | RESULT_TYPE.VALUESARESTRINGS);
if (valueType === 0) {
valueType = this._valuesType;
}
if (resultType === RESULT_TYPE.RESULTSASOBJECT) {
if (valueType === RESULT_TYPE.VALUESARESTRINGS) {
return DbResult.asRowObjectString;
} else {
return DbResult.asRowObjectNative;
}
} else {
if (valueType === RESULT_TYPE.VALUESARESTRINGS) {
return DbResult.asRowArrayString;
} else {
return DbResult.asRowArrayNative;
}
}
};
version(valueOrCallback: number | Function): void {
if (typeof valueOrCallback === 'function') {
this.get('PRAGMA user_version', null, function (data, error) {
valueOrCallback(data && parseInt(data[0], 10), error);
}, RESULT_TYPE.RESULTSASARRAY);
} else if (!isNaN(valueOrCallback + 0)) {
this.execSQL('PRAGMA user_version = ' + (valueOrCallback + 0).toString());
} else {
this.get('PRAGMA user_version', undefined, undefined, RESULT_TYPE.RESULTSASARRAY);
}
};
isOpen() {
return this._isOpen;
};
private _toStringArray(params) {
var stringParams = [];
if (Object.prototype.toString.apply(params) === '[object Array]') {
var count = params.length;
for (var i = 0; i < count; ++i) {
if (params[i] == null) {
stringParams.push(null);
} else {
stringParams.push(params[i].toString());
}
}
} else {
if (params == null) {
stringParams.push(null);
} else {
stringParams.push(params.toString());
}
}
return stringParams;
};
resultType(type: RESULT_TYPE): RESULT_TYPE {
if (type === RESULT_TYPE.RESULTSASARRAY) {
this._resultType = RESULT_TYPE.RESULTSASARRAY;
DbResult.setResultValueTypeEngine(this._resultType, this._valuesType);
} else if (type === RESULT_TYPE.RESULTSASOBJECT) {
this._resultType = RESULT_TYPE.RESULTSASOBJECT;
DbResult.setResultValueTypeEngine(this._resultType, this._valuesType);
}
return this._resultType;
};
valueType(type: RESULT_TYPE): RESULT_TYPE {
if (type === RESULT_TYPE.VALUESARENATIVE) {
this._valuesType = RESULT_TYPE.VALUESARENATIVE;
DbResult.setResultValueTypeEngine(this._resultType, this._valuesType);
} else if (type === RESULT_TYPE.VALUESARESTRINGS) {
this._valuesType = RESULT_TYPE.VALUESARESTRINGS;
DbResult.setResultValueTypeEngine(this._resultType, this._valuesType);
}
return this._resultType;
};
close(): Promise<any> {
return new Promise((resolve, reject) => {
if (!this._isOpen) {
let error = "Database is already closed";
this._error(error);
return reject(error);
}
else {
this._db.close();
this._isOpen = false;
return resolve();
}
});
};
get(sql: string, params?: {}, callback?: Function, mode?: RESULT_TYPE): Promise<any> {
var hasCallback = true;
if (typeof callback !== 'function') {
hasCallback = false;
}
return new Promise((resolve, reject) => {
if (!this._isOpen) {
var message = "Database is not open";
this._error(message);
if (hasCallback) callback(null, message);
return reject(message);
}
var cursor: Cursor;
try {
if (params !== undefined) {
cursor = this._db.rawQuery(sql, this._toStringArray(params));
} else {
cursor = this._db.rawQuery(sql, null);
}
} catch (error) {
this._error(error);
callback(null, error);
return reject(error);
}
// No Records
if (cursor.getCount() === 0) {
cursor.close();
if (hasCallback) {
callback(null, null);
}
return resolve(null);
}
var results;
var resultEngine = this._getResultEngine(mode);
try {
cursor.moveToFirst();
results = resultEngine(cursor);
cursor.close();
} catch (error) {
this._error(error);
callback(null, error);
return reject(error);;
}
if (hasCallback) {
callback(results, null);
}
resolve(results);
});
}
execSQL(sql: string, params?: {}): Promise<any> {
return new Promise((resolve, reject) => {
if (!this._isOpen) {
var message = "Database is not open";
this._error(message);
return reject(message);
}
// Need to see if we have to run any status queries afterwords
var flags = 0;
var queryType = sql.trim().substr(0, 7).toLowerCase();
if (queryType === 'insert ') {
flags = 1;
} else if (queryType === 'update ' || queryType === 'delete ') {
flags = 2;
}
try {
if (params !== undefined) {
this._db.execSQL(sql, this._toStringArray(params));
} else {
this._db.execSQL(sql);
}
} catch (error) {
this._error(error);
return reject(error);
}
switch (flags) {
case 0:
return resolve(null);
break;
case 1:
this.get('select last_insert_rowid()', null, function (data, error) {
if (error) {
return reject(error);
} else {
return resolve(data && data[0]);
}
}, RESULT_TYPE.RESULTSASARRAY | RESULT_TYPE.VALUESARENATIVE);
break;
case 2:
this.get('select changes()', null, function (data, error) {
if (error) {
return reject(error);
} else {
return resolve(data && data[0]);
}
}, RESULT_TYPE.RESULTSASARRAY | RESULT_TYPE.VALUESARENATIVE);
break;
default:
return resolve();
}
});
}
all(sql: string, params?: {} | Function, callback?: Function): Promise<any> {
if (typeof params === 'function') {
callback = params;
params = undefined;
}
return new Promise(function (resolve, reject) {
var hasCallback = true;
if (typeof callback !== 'function') {
callback = reject;
hasCallback = false;
}
if (!this._isOpen) {
var message = "Database is not open";
this._error(message);
callback(null, message);
return reject(message);
}
var cursor: Cursor, count: number;
try {
if (params !== undefined) {
cursor = this._db.rawQuery(sql, this._toStringArray(params));
} else {
cursor = this._db.rawQuery(sql, null);
}
count = cursor.getCount();
} catch (error) {
this._error(error);
callback(null, error);
return reject(error);
}
// No Records
if (count === 0) {
cursor.close();
if (hasCallback) {
callback([], null);
}
return resolve([]);
}
cursor.moveToFirst();
var results = [];
try {
for (var i = 0; i < count; i++) {
var data = DbResult.getResults(cursor);
results.push(data);
cursor.moveToNext();
}
cursor.close();
} catch (error) {
callback(null, error);
return reject(error);
}
if (hasCallback) {
callback(results, null);
}
return resolve(results);
});
}
each(sql: string, params?: {} | Function, callback?: Function, complete?: Function): Promise<any> {
if (typeof params === 'function') {
complete = callback;
callback = params;
params = undefined;
}
return new Promise(function (resolve, reject) {
// Callback is required
if (typeof callback !== 'function') {
var message = "Sqlite 'Each' function requires a callback";
this._error(message);
return reject(message);
}
// Set the error Callback
var errorCB = complete || callback;
var cursor: Cursor, count: number;
try {
if (params !== undefined) {
cursor = this._db.rawQuery(sql, this._toStringArray(params));
} else {
cursor = this._db.rawQuery(sql, null);
}
count = cursor.getCount();
} catch (error) {
errorCB(null, error);
return reject(error);
}
// No Records
if (count === 0) {
cursor.close();
if (complete) {
complete(0, null);
}
return resolve(0);
}
cursor.moveToFirst();
try {
for (var i = 0; i < count; i++) {
var data = DbResult.getResults(cursor);
callback(data, null);
cursor.moveToNext();
}
cursor.close();
} catch (error) {
errorCB(null, error);
return reject(error);
}
if (complete) {
complete(count, null);
}
return resolve(count);
});
}
isSqlite(obj) {
return obj && obj._isSqlite;
}
static _getContext() {
if (application.android.context) {
return (application.android.context);
}
var java = java || {};
var ctx = java.lang.Class.forName("android.app.AppGlobals").getMethod("getInitialApplication", null).invoke(null, null);
if (ctx) return ctx;
ctx = java.lang.Class.forName("android.app.ActivityThread").getMethod("currentApplication", null).invoke(null, null);
return ctx;
}
static exists(dbname: string): boolean {
var java = java || {};
var dbName = Sqlite._getContext().getDatabasePath(dbname).getAbsolutePath();
var dbFile = new java.io.File(dbName);
return <boolean>dbFile.exists();
}
static deleteDatabase(dbname: string) {
var java = java || {};
var dbName = Sqlite._getContext().getDatabasePath(dbname).getAbsolutePath();
var dbFile = new java.io.File(dbName);
if (dbFile.exists()) {
dbFile.delete();
dbFile = new java.io.File(dbName + '-journal');
if (dbFile.exists()) {
dbFile.delete();
}
}
}
static copyDatabase(dbname: string) {
var java = java || {};
//Open your local db as the input stream
var myInput = Sqlite._getContext().getAssets().open("app/" + dbname);
if (dbname.indexOf('/')) {
dbname = dbname.substring(dbname.indexOf('/') + 1);
}
var dbName = Sqlite._getContext().getDatabasePath(dbname).getAbsolutePath();
var path = dbName.substr(0, dbName.lastIndexOf('/') + 1);
// Create "databases" folder if it is missing. This causes issues on Emulators if it is missing
// So we create it if it is missing
try {
var javaFile = new java.io.File(path);
if (!javaFile.exists()) {
javaFile.mkdirs();
javaFile.setReadable(true);
javaFile.setWritable(true);
}
}
catch (error) {
console.error(`Sqlite Error: @copyDatabase\r\nCreating DB Folder Error\r\n${error}`);
return false;
}
//Open the empty db as the output stream
var myOutput = new java.io.FileOutputStream(dbname);
var success = true;
try {
//transfer bytes from the inputfile to the outputfile
var buffer = java.lang.reflect.Array.newInstance(java.lang.Byte.class.getField("TYPE").get(null), 1024);
var length;
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
}
catch (err) {
success = false;
}
//Close the streams
myOutput.flush();
myOutput.close();
myInput.close();
return success;
}
}
|
<reponame>rbrtsmith/flexmenu<filename>src/components/CollapsedNavigationContainer.js
import { connect } from 'react-redux';
import CollapsedNavigation from './CollapsedNavigation';
import { setCollapsedNavHeight } from '../actions';
const mapStateToProps = ({ collapsedNavOpen, collapsedNavHeight }) => ({
collapsedNavOpen,
collapsedNavHeight
});
const mapDispatchToProps = dispatch => ({
setCollapsedNavHeight(payload) {
dispatch(setCollapsedNavHeight(payload));
}
});
export default connect(
mapStateToProps,
mapDispatchToProps
)(CollapsedNavigation);
|
import React from 'react';
import axios from 'axios';
function App() {
const [movie, setMovie] = React.useState('');
const [results, setResults] = React.useState([]);
const onChange = (e) => {
setMovie(e.target.value);
};
const search = () => {
axios
.get(`http://www.omdbapi.com/?s=${movie}&apikey=YOUR_API_KEY`)
.then((response) => {
setResults(response.data.Search);
});
};
return (
<div>
<input type="text" onChange={onChange} value={movie} />
<button onClick={search}>Search</button>
{results.map((result) => (
<p>{result.Title}</p>
))}
</div>
);
}
export default App; |
import test from 'ava';
import { numericCompare } from '@collectable/core';
import { empty, lastValue } from '../../src';
import { SortedMap, fromNumericArray } from '../test-utils';
let map: SortedMap, values: number[], lastIndex;
test.before(() => {
values = [13, 21, 34, 55, 1, 2, 3, 5, 8];
map = fromNumericArray(values);
values.sort(numericCompare);
lastIndex = values.length - 1;
});
test('returns the last value in the sorted index', t => {
t.deepEqual(lastValue(map), `#${values[lastIndex]}`);
});
test('returns undefined if the collection is empty', t => {
t.is(lastValue(empty()), void 0);
});
|
The most popular programming language is Java. It is the first choice of many companies, developers, and students. It is widely used for developing desktop, web, and mobile applications. Its usage percentage has been steadily increasing since 2017 and other languages such as Python and JavaScript have been slowly catching up. According to the TIOBE index, Java is currently the most popular language in the world, followed by C, Python, and C++. Other major programming languages in the top 10 include JavaScript, C#, Ruby, and Go. Java is used by more than 13 million developers and is the official language of Android development. It is also used widely in many large enterprises and is one of the best choices for developers to learn to stay up-to-date in the field. |
import random
def random_string(length, characters):
return ''.join(random.choice(characters) for _ in range(length)) |
import { Injectable } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http';
import { environment } from '../../../../environments/environment';
import { Observable } from 'rxjs';
import { CrudService } from '../../shared/services/crud.service';
@Injectable({
providedIn: 'root'
})
export class TaxService {
constructor(
private crudService: CrudService
) {
}
// Tax Classes Services
getTaxClass(params): Observable<any> {
return this.crudService.get('/v1/private/tax/class', params);
}
deleteTaxClass(id): Observable<any> {
return this.crudService.delete(`/v1/private/tax/class/${id}`);
}
getUniqueTax(code) {
return this.crudService.get(`/v1/private/tax/class/unique?code=${code}`);
}
addTaxClasses(param) {
return this.crudService.post(`/v1/private/tax/class`, param);
}
updateTaxClasses(taxClassID, params) {
return this.crudService.put('/v1/private/tax/class/' + taxClassID, params);
}
getTaxClassesDetails(param): Observable<any> {
return this.crudService.get('/v1/private/tax/class/' + param);
}
// Country & State
getCountry(): Observable<any> {
return this.crudService.get('/v1/country')
}
getBillingZone(value): Observable<any> {
return this.crudService.get('/v1/zones?code=' + value)
}
// Tax Rate Services
getTaxRate(params): Observable<any> {
return this.crudService.get('/v1/private/tax/rates', params);
}
getUniqueRate(code) {
return this.crudService.get(`/v1/private/tax/rate/unique?code=${code}`);
}
addTaxRate(param) {
return this.crudService.post(`/v1/private/tax/rate`, param);
}
deleteTaxRate(id): Observable<any> {
return this.crudService.delete(`/v1/private/tax/rate/${id}`);
}
getTaxRateDetails(param, lan): Observable<any> {
return this.crudService.get('/v1/private/tax/rate/' + param + '/?lang=' + lan);
}
updateTaxRate(taxrateID, params) {
return this.crudService.put('/v1/private/tax/rate/' + taxrateID, params);
}
}
|
#!/bin/sh -v -e
mysql -uroot -p <<END_SQL1
use valid_dbp;
select count(*) as bucket_listing from bucket_listing;
select count(*) as bucket_verse_summary from bucket_verse_summary;
select count(*) as bibles from bibles;
select count(*) as bible_translations from bible_translations;
select count(*) as bible_filesets from bible_filesets;
select count(*) as bible_fileset_connections from bible_fileset_connections;
select count(*) as bible_fileset_copyrights from bible_fileset_copyrights;
select count(*) as bible_fileset_copyright_organizations from bible_fileset_copyright_organizations;
select count(*) as bible_fileset_tags from bible_fileset_tags;
select count(*) as access_group_filesets from access_group_filesets;
select count(*) as bible_books from bible_books;
select count(*) as bible_files from bible_files;
select count(*) as bible_file_tags from bible_file_tags;
select count(*) as bible_file_stream_bandwidths from bible_file_stream_bandwidths;
select count(*) as bible_file_stream_bytes from bible_file_stream_bytes;
select count(*) as bible_file_stream_ts from bible_file_stream_ts;
END_SQL1 |
# Activate your virtualenv
# Copy this to your shell
datasette serve movie.sqlite --metadata metadata.json |
import request from '@/utils/request'
export function getTestcase(params) {
return request({
url: '/testcase',
method: 'get',
params
})
}
export function addTestcase(data) {
return request({
url: '/testcase',
method: 'put',
data
})
}
export function updateTestcase(data) {
return request({
url: '/testcase',
method: 'post',
data
})
}
|
import React, { Component } from 'react';
import { connect } from 'react-redux';
import PropTypes from 'prop-types';
import Book from '../presentational/Book';
import Actions from '../../actions/index';
import CategoryFilter from '../presentational/CategoryFilter';
import '../../styles/booklist.css';
class BooksList extends Component {
constructor(props) {
super(props);
this.handleRemoveBook = this.handleRemoveBook.bind(this);
this.handleFilterChange = this.handleFilterChange.bind(this);
}
handleRemoveBook(book) {
const { deleteBook } = this.props;
deleteBook(book);
}
handleFilterChange(e) {
const { filterBook } = this.props;
filterBook(e.target.value);
}
render() {
const { books, filter } = this.props;
const filteredBooks = () => ((filter !== 'All') ? books.filter(book => book.category === filter) : books);
return (
<div className="booksList">
<div className="book-filter">
<CategoryFilter changeFilter={this.handleFilterChange} />
</div>
<div className="book-item">
{filteredBooks().map(book => (
<Book key={book.ID} book={book} onClick={this.handleRemoveBook} />
))}
</div>
</div>
);
}
}
const mapStateToProps = state => ({
books: state.books,
filter: state.filters,
});
const mapDispatchToProps = dispatch => ({
deleteBook: book => {
dispatch(Actions.removeBook(book));
},
filterBook: category => {
dispatch(Actions.changeFilter(category));
},
});
export default connect(mapStateToProps, mapDispatchToProps)(BooksList);
BooksList.propTypes = {
books: PropTypes.arrayOf(PropTypes.shape({
ID: PropTypes.number,
title: PropTypes.string,
category: PropTypes.string,
})).isRequired,
deleteBook: PropTypes.func.isRequired,
filterBook: PropTypes.func.isRequired,
filter: PropTypes.string.isRequired,
};
|
<gh_stars>0
#pragma once
#include "data_provider.h"
#include "binarizations_manager.h"
#include "cat_feature_perfect_hash_helper.h"
#include "binarized_features_meta_info.h"
#include "classification_target_helper.h"
#include <catboost/cuda/utils/helpers.h>
#include <catboost/libs/data/load_data.h>
#include <catboost/libs/data_types/pair.h>
#include <catboost/libs/helpers/cpu_random.h>
#include <catboost/libs/helpers/exception.h>
#include <catboost/libs/logging/logging.h>
#include <catboost/libs/options/loss_description.h>
#include <catboost/libs/pairs/util.h>
#include <catboost/libs/quantization/utils.h>
#include <library/threading/local_executor/fwd.h>
#include <util/random/shuffle.h>
#include <util/stream/file.h>
#include <util/system/atomic.h>
#include <util/system/atomic_ops.h>
#include <util/system/sem.h>
#include <util/system/spinlock.h>
namespace NCB {
struct TPathWithScheme;
}
namespace NCatboostCuda {
class TDataProviderBuilder: public NCB::IPoolBuilder {
public:
TDataProviderBuilder(TBinarizedFeaturesManager& featureManager,
TDataProvider& dst,
bool isTest = false,
const int buildThreads = 1)
: FeaturesManager(featureManager)
, DataProvider(dst)
, IsTest(isTest)
, BuildThreads(buildThreads)
, CatFeaturesPerfectHashHelper(FeaturesManager)
{
}
template <class TContainer>
TDataProviderBuilder& AddIgnoredFeatures(const TContainer& container) {
for (const auto& f : container) {
IgnoreFeatures.insert(f);
}
return *this;
}
TDataProviderBuilder& SetTargetHelper(TSimpleSharedPtr<TClassificationTargetHelper> helper) {
TargetHelper = helper;
return *this;
}
TDataProviderBuilder& SetBinarizedFeaturesMetaInfo(const TBinarizedFloatFeaturesMetaInfo& binarizedFeaturesMetaInfo) {
BinarizedFeaturesMetaInfo = binarizedFeaturesMetaInfo;
return *this;
}
void Start(const TPoolMetaInfo& poolMetaInfo,
int docCount,
const TVector<int>& catFeatureIds) override;
TDataProviderBuilder& SetShuffleFlag(bool shuffle, ui64 seed = 0) {
ShuffleFlag = shuffle;
Seed = seed;
return *this;
}
void StartNextBlock(ui32 blockSize) override;
float GetCatFeatureValue(const TStringBuf& feature) override {
return ConvertCatFeatureHashToFloat(StringToIntHash(feature));
}
void AddCatFeature(ui32 localIdx,
ui32 featureId,
const TStringBuf& feature) override {
if (IgnoreFeatures.count(featureId) == 0) {
Y_ASSERT(FeatureTypes[featureId] == EFeatureValuesType::Categorical);
WriteFloatOrCatFeatureToBlobImpl(localIdx,
featureId,
ConvertCatFeatureHashToFloat(StringToIntHash(feature)));
}
}
void AddFloatFeature(ui32 localIdx, ui32 featureId, float feature) override {
if (IgnoreFeatures.count(featureId) == 0) {
switch (FeatureTypes[featureId]) {
case EFeatureValuesType::BinarizedFloat: {
ui8 binarizedFeature = NCB::Binarize<ui8>(
NanModes[featureId],
Borders[featureId],
feature
);
WriteBinarizedFeatureToBlobImpl(localIdx, featureId, binarizedFeature);
break;
}
case EFeatureValuesType::Float: {
WriteFloatOrCatFeatureToBlobImpl(localIdx, featureId, feature);
break;
}
default: {
CB_ENSURE(false, "Unsupported type " << FeatureTypes[featureId]);
}
}
}
}
void AddBinarizedFloatFeature(ui32 localIdx, ui32 featureId, ui8 binarizedFeature) override {
if (IgnoreFeatures.count(featureId) == 0) {
CB_ENSURE(FeatureTypes[featureId] == EFeatureValuesType::BinarizedFloat, "FeatureValueType doesn't match: expect BinarizedFloat, got " << FeatureTypes[featureId]);
WriteBinarizedFeatureToBlobImpl(localIdx, featureId, binarizedFeature);
}
}
void AddBinarizedFloatFeaturePack(ui32 localIdx, ui32 featureId, TConstArrayRef<ui8> binarizedFeaturePack) override {
if (IgnoreFeatures.count(featureId) == 0) {
CB_ENSURE(FeatureTypes[featureId] == EFeatureValuesType::BinarizedFloat, "FeatureValueType doesn't match: expect BinarizedFloat, got " << FeatureTypes[featureId]);
for (ui8 binarizedFeature : binarizedFeaturePack) {
WriteBinarizedFeatureToBlobImpl(localIdx, featureId, binarizedFeature);
++localIdx;
}
}
}
void AddAllFloatFeatures(ui32 localIdx, TConstArrayRef<float> features) override {
CB_ENSURE(features.size() == FeatureBlobs.size(),
"Error: number of features should be equal to factor count");
for (size_t featureId = 0; featureId < FeatureBlobs.size(); ++featureId) {
if (IgnoreFeatures.count(featureId) == 0) {
if (FeatureTypes[featureId] == EFeatureValuesType::Categorical) {
WriteFloatOrCatFeatureToBlobImpl(localIdx, featureId, features[featureId]);
} else {
AddFloatFeature(localIdx, featureId, features[featureId]);
}
}
}
}
void AddLabel(ui32 localIdx, const TStringBuf& value) final {
Labels[GetLineIdx(localIdx)] = value;
}
void AddTarget(ui32 localIdx, float value) override {
if (Y_UNLIKELY(!IsSafeTarget(value))) {
WriteUnsafeTargetWarningOnce(localIdx, value);
}
DataProvider.Targets[GetLineIdx(localIdx)] = value;
}
void AddWeight(ui32 localIdx, float value) override {
DataProvider.Weights[GetLineIdx(localIdx)] = value;
}
void AddQueryId(ui32 localIdx, TGroupId queryId) override {
DataProvider.QueryIds[GetLineIdx(localIdx)] = queryId;
}
void AddSubgroupId(ui32 localIdx, TSubgroupId groupId) override {
DataProvider.SubgroupIds[GetLineIdx(localIdx)] = groupId;
}
void AddBaseline(ui32 localIdx, ui32 baselineIdx, double value) override {
DataProvider.Baseline[baselineIdx][GetLineIdx(localIdx)] = (float)value;
}
void AddTimestamp(ui32 localIdx, ui64 timestamp) override {
DataProvider.Timestamp[GetLineIdx(localIdx)] = timestamp;
}
void SetFeatureIds(const TVector<TString>& featureIds) override {
FeatureNames = featureIds;
}
void SetPairs(const TVector<TPair>& pairs) override {
CB_ENSURE(!IsDone, "Error: can't set pairs after finish");
Pairs = pairs;
}
void SetGroupWeights(const TVector<float>& groupWeights) override {
CB_ENSURE(!IsDone, "Error: can't set group weights after finish");
CB_ENSURE(DataProvider.GetSampleCount() == groupWeights.size(),
"Group weights file should have as many weights as the objects in the dataset.");
DataProvider.Weights = groupWeights;
}
void GeneratePairs(const NCatboostOptions::TLossDescription& lossFunctionDescription) {
CB_ENSURE(Pairs.empty() && IsPairLogit(lossFunctionDescription.GetLossFunction()), "Cannot generate pairs, pairs are not empty");
CB_ENSURE(
!DataProvider.Targets.empty(),
"Pool labels are not provided. Cannot generate pairs."
);
Pairs = GeneratePairLogitPairs(
DataProvider.QueryIds,
DataProvider.Targets,
NCatboostOptions::GetMaxPairCount(lossFunctionDescription),
Seed);
DataProvider.FillQueryPairs(Pairs);
}
void SetFloatFeatures(const TVector<TFloatFeature>& floatFeatures) override {
Y_UNUSED(floatFeatures);
CB_ENSURE(false, "Not supported for regular pools");
}
void SetTarget(const TVector<float>& target) override {
CB_ENSURE(target.size() == DataProvider.Targets.size(), "Error: target size should be equal to line count");
DataProvider.Targets = target;
}
int GetDocCount() const override {
return DataProvider.Targets.size();
}
TConstArrayRef<TString> GetLabels() const override {
return MakeArrayRef(Labels.data(), Labels.size());
}
TConstArrayRef<float> GetWeight() const override {
return MakeArrayRef(DataProvider.Weights.data(), DataProvider.Weights.size());
}
TConstArrayRef<TGroupId> GetGroupIds() const override {
return MakeArrayRef(DataProvider.QueryIds.data(), DataProvider.QueryIds.size());
}
// TODO(nikitxskv): Temporary solution until MLTOOLS-140 is implemented.
void SetPoolPathAndFormat(
const NCB::TPathWithScheme& poolPath,
const NCB::TDsvFormatOptions& dsvPoolFormatOptions) {
DataProvider.PoolPath = poolPath;
DataProvider.DsvPoolFormatOptions = dsvPoolFormatOptions;
}
void Finish() override;
void RegisterFeaturesInFeatureManager(const TVector<TFeatureColumnPtr>& featureColumns) const {
for (ui32 featureId = 0; featureId < featureColumns.size(); ++featureId) {
if (!FeaturesManager.IsKnown(featureId)) {
if (FeatureTypes[featureId] == EFeatureValuesType::Categorical) {
FeaturesManager.RegisterDataProviderCatFeature(featureId);
} else {
FeaturesManager.RegisterDataProviderFloatFeature(featureId);
}
}
}
}
private:
ui32 GetBytesPerFeature(ui32 featureId) const {
return FeatureTypes.at(featureId) != EFeatureValuesType::BinarizedFloat ? 4 : 1;
}
void WriteBinarizedFeatureToBlobImpl(ui32 localIdx, ui32 featureId, ui8 feature);
void WriteFloatOrCatFeatureToBlobImpl(ui32 localIdx, ui32 featureId, float feautre);
void WriteUnsafeTargetWarningOnce(ui32 localIdx, float value) {
if (Y_UNLIKELY(AtomicCas(&UnsafeTargetWarningWritten, true, false))) {
const auto rowIndex = Cursor + localIdx;
CATBOOST_WARNING_LOG
<< "Got unsafe target "
<< LabeledOutput(value)
<< " at " << LabeledOutput(rowIndex) << '\n';
}
}
private:
inline ui32 GetLineIdx(ui32 localIdx) {
return Cursor + localIdx;
}
inline TString GetFeatureName(ui32 featureId) {
return FeatureNames.size() ? FeatureNames[featureId] : "";
}
private:
TAtomic UnsafeTargetWarningWritten = false;
TBinarizedFeaturesManager& FeaturesManager;
TDataProvider& DataProvider;
bool IsTest;
ui32 BuildThreads;
TCatFeaturesPerfectHashHelper CatFeaturesPerfectHashHelper;
bool ShuffleFlag = false;
ui64 Seed = 0;
ui32 Cursor = 0;
bool IsDone = false;
TBinarizedFloatFeaturesMetaInfo BinarizedFeaturesMetaInfo;
TVector<TVector<ui8>> FeatureBlobs;
TVector<EFeatureValuesType> FeatureTypes;
TVector<TVector<float>> Borders;
TVector<ENanMode> NanModes;
TSet<ui32> IgnoreFeatures;
TVector<TString> FeatureNames;
TSimpleSharedPtr<TClassificationTargetHelper> TargetHelper;
TVector<TPair> Pairs;
TVector<TString> Labels;
};
void ReadPool(
const ::NCB::TPathWithScheme& poolPath,
const ::NCB::TPathWithScheme& pairsFilePath, // can be uninited
const ::NCB::TPathWithScheme& groupWeightsFilePath, // can be uninited
const ::NCatboostOptions::TDsvPoolFormatParams& dsvPoolFormatParams,
const TVector<int>& ignoredFeatures,
bool verbose,
NCB::TTargetConverter* const targetConverter,
::NPar::TLocalExecutor* const localExecutor,
TDataProviderBuilder* const poolBuilder);
}
|
import org.junit.jupiter.api.function.Executable;
import static org.junit.jupiter.api.Assertions.fail;
public class ExceptionAssertions {
public static void assertThrows(Class<? extends Throwable> expectedException, Executable executable) {
try {
executable.execute();
fail("Expected " + expectedException.getName() + " to be thrown, but no exception was thrown");
} catch (Throwable actualException) {
if (!expectedException.isInstance(actualException)) {
fail("Expected " + expectedException.getName() + " to be thrown, but " + actualException.getClass().getName() + " was thrown instead");
}
}
}
public static void assertDoesNotThrow(Executable executable) {
try {
executable.execute();
} catch (Throwable actualException) {
fail("Expected no exception to be thrown, but " + actualException.getClass().getName() + " was thrown");
}
}
} |
package evilcraft.entities.tileentities.tickaction.bloodinfuser;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import evilcraft.api.entities.tileentitites.tickaction.ITickAction;
import evilcraft.entities.tileentities.TileBloodInfuser;
/**
* Abstract {@link ITickAction} that can infuse items with blood.
* @author rubensworks
*
*/
public abstract class BloodInfuserTickAction implements ITickAction<TileBloodInfuser> {
protected final static int MB_PER_TICK = 100;
@Override
public boolean canTick(TileBloodInfuser tile, ItemStack itemStack, int slot, int tick) {
// Only allow ticking if production slot is empty or if the producing item is the same and
// there is at least one spot left in the stack.
if(!tile.getTank().isEmpty() && getInfuseStack(tile) != null && tile.canConsume(getInfuseStack(tile))) {
ItemStack production = tile.getInventory().getStackInSlot(tile.getProduceSlot());
if(production == null) {
return true;
} else if(production.getItem() == willProduceItem(tile)) {
if(production.stackSize < production.getMaxStackSize())
return true;
}
}
return false;
}
/**
* Get the stack in the infusion slot.
* @param tile The tile to check the slot for.
* @return The item in that slot.
*/
public ItemStack getInfuseStack(TileBloodInfuser tile) {
return tile.getInventory().getStackInSlot(tile.getConsumeSlot());
}
/**
* Get the item of the item that will be produced after infusion.
* @param tile The tile that performs the infusion.
* @return The item.
*/
public abstract Item willProduceItem(TileBloodInfuser tile);
/**
* Try to add the given item to the production slot.
* @param tile The tile where infusion happened.
* @param itemStack The item to try to put in the production slot.
* @return If the item could be added or joined in the production slot.
*/
public boolean addToProduceSlot(TileBloodInfuser tile, ItemStack itemStack) {
ItemStack produceStack = tile.getInventory().getStackInSlot(tile.getProduceSlot());
if(produceStack == null) {
tile.getInventory().setInventorySlotContents(tile.getProduceSlot(), itemStack);
return true;
} else {
if(produceStack.getItem() == itemStack.getItem()
&& produceStack.getMaxStackSize() >= produceStack.stackSize + itemStack.stackSize) {
produceStack.stackSize += itemStack.stackSize;
return true;
}
}
return false;
}
}
|
#!/bin/bash
themes=(base16 base16.dark base16.light base16.monokai base16.monokai.dark base16.monokai.light base16.solarized base16.solarized.dark base16.solarized.light bw colorful github gruvbox gruvbox.dark gruvbox.light igorpro magritte molokai monokai monokai.sublime pastie thankful_eyes tulip)
dest=assets/css/rougify
rm -rf ${dest} && mkdir -p ${dest}
for theme in ${themes[@]}
do
rougify style ${theme} > ${theme}.scss
scss --sourcemap=none --style compressed ${theme}.scss ${dest}/${theme}.css
rm --force ${theme}.scss
done
# pip3 download -r requirements.txt --no-deps
|
<gh_stars>0
package main
import (
"fmt"
"os"
"strings"
"github.com/fatih/color"
"github.com/gammons/todolist/todolist"
"github.com/skratchdot/open-golang/open"
)
const (
VERSION = "0.8"
)
func main() {
if len(os.Args) <= 1 {
usage()
os.Exit(0)
}
input := strings.Join(os.Args[1:], " ")
routeInput(os.Args[1], input)
}
func usage() {
blue := color.New(color.FgBlue)
cyan := color.New(color.FgCyan)
yellow := color.New(color.FgYellow)
blueBold := blue.Add(color.Bold)
fmt.Printf("todo v%s, a simple, command line based, GTD-style todo manager\n", VERSION)
blueBold.Println("\nAdding todos")
fmt.Println(" the 'a' command adds todos.")
fmt.Println(" You can also optionally specify a due date.")
fmt.Println(" Specify a due date by putting 'due <date>' at the end, where <date> is in (tod|today|tom|tomorrow|mon|tue|wed|thu|fri|sat|sun)")
fmt.Println("\n Examples for adding a todo:")
yellow.Println("\ttodo a Meeting with @bob about +importantPrject due today")
yellow.Println("\ttodo a +work +verify did @john fix the build\\?")
blueBold.Println("\nListing todos")
fmt.Println(" When listing todos, you can filter and group the output.\n")
fmt.Println(" todo l due (tod|today|tom|tomorrow|overdue|this week|next week|last week|mon|tue|wed|thu|fri|sat|sun|none)")
fmt.Println(" todo l overdue")
cyan.Println(" Filtering by date:\n")
yellow.Println("\ttodo l due tod")
fmt.Println("\tlists all todos due today\n")
yellow.Println("\ttodo l due tom")
fmt.Println("\tlists all todos due tomorrow\n")
yellow.Println("\ttodo l due mon")
fmt.Println("\tlists all todos due monday\n")
yellow.Println("\ttodo l overdue")
fmt.Println("\tlists all todos where the due date is in the past\n")
yellow.Println("\ttodo agenda")
fmt.Println("\tlists all todos where the due date is today or in the past\n")
fmt.Println(" todo l completed (tod|today|this week)")
cyan.Println(" Filtering by date:\n")
yellow.Println("\ttodo l completed (tod|today)")
fmt.Println("\tlists all todos that were completed today\n")
yellow.Println("\ttodo l completed this week")
fmt.Println("\tlists all todos that were completed this week\n")
cyan.Println(" Grouping:")
fmt.Println(" You can group todos by context or project.")
yellow.Println("\ttodo l by c")
fmt.Println("\tlists all todos grouped by context\n")
yellow.Println("\ttodo l by p")
fmt.Println("\tlists all todos grouped by project\n")
cyan.Println(" Grouping and filtering:")
fmt.Println(" Of course, you can combine grouping and filtering to get a nice formatted list.\n")
yellow.Println("\ttodo l due today by c")
fmt.Println("\tlists all todos due today grouped by context\n")
yellow.Println("\ttodo l +project due this week by c")
fmt.Println("\tlists all todos due today for +project, grouped by context\n")
yellow.Println("\ttodo l @frank due tom by p")
fmt.Println("\tlists all todos due tomorrow concerining @frank for +project, grouped by project\n")
blueBold.Println("\nCompleting and uncompleting ")
fmt.Println("Complete and Uncomplete a todo by its Id:\n")
yellow.Println("\ttodo c 33")
fmt.Println("\tCompletes a todo with id 33\n")
yellow.Println("\ttodo uc 33")
fmt.Println("\tUncompletes a todo with id 33\n")
blueBold.Println("\nPrioritizing")
fmt.Println("Todos have a priority flag, which will make them bold when listed.\n")
yellow.Println("\ttodo p 33")
fmt.Println("\tPrioritizes a todo with id 33\n")
yellow.Println("\ttodo up 33")
fmt.Println("\tUn-prioritizes a todo with id 33\n")
yellow.Println("\ttodo l p")
fmt.Println("\tlist all priority todos\n")
blueBold.Println("\nArchiving")
fmt.Println("You can archive todos once they are done, or if you might come back to them.")
fmt.Println("By default, todo will only show unarchived todos.\n")
yellow.Println("\ttodo ar 33")
fmt.Println("\tArchives a todo with id 33\n")
yellow.Println("\ttodo ac")
fmt.Println("\tArchives all completed todos\n")
yellow.Println("\ttodo l archived")
fmt.Println("\tlist all archived todos\n")
blueBold.Println("\nEditing due dates")
yellow.Println("\ttodo e 33 due mon")
fmt.Println("\tEdits the todo with 33 and sets the due date to this coming Monday\n")
yellow.Println("\ttodo e 33 due none")
fmt.Println("\tEdits the todo with 33 and removes the due date\n")
blueBold.Println("\nExpanding existing todos")
yellow.Println("\ttodo ex 39 +final: read physics due mon, do literature report due fri")
fmt.Println("\tRemoves the todo with id 39, and adds following two todos\n")
blueBold.Println("\nDeleting")
yellow.Println("\ttodo d 33")
fmt.Println("\tDeletes a todo with id 33\n")
blueBold.Println("\nManipulating notes")
yellow.Println("\ttodo ln")
fmt.Println("\tlists all todos with their notes")
yellow.Println("\ttodo an 12 check http://this.web.site")
fmt.Println("\tAdds notes \"check http://this.web.site\" to the todo with id 12\n")
yellow.Println("\ttodo n 12")
fmt.Println("\tLists notes of the todo with id 12\n")
yellow.Println("\ttodo dn 12 3")
fmt.Println("\tDeletes the 3rd note of the todo with id 12\n")
yellow.Println("\ttodo en 12 3 check http://that.web.site")
fmt.Println("\tEditing the 3rd note of the todo with id 12 to \"http://that.web.site\" \n")
blueBold.Println("\nGarbage Collection")
yellow.Println("\ttodo gc")
fmt.Println("\tDeletes all archived todos.\n")
fmt.Println("Todolist was lovingly crafted by <NAME> (https://twitter.com/gammons).")
fmt.Println("For full documentation, please visit http://todolist.site")
}
func routeInput(command string, input string) {
app := todolist.NewApp()
switch command {
case "l", "ln", "list", "agenda":
app.ListTodos(input)
case "a", "add":
app.AddTodo(input)
case "done":
app.AddDoneTodo(input)
case "d", "delete":
app.DeleteTodo(input)
case "c", "complete":
app.CompleteTodo(input)
case "uc", "uncomplete":
app.UncompleteTodo(input)
case "ar", "archive":
app.ArchiveTodo(input)
case "uar", "unarchive":
app.UnarchiveTodo(input)
case "ac":
app.ArchiveCompleted()
case "e", "edit":
app.EditTodo(input)
case "ex", "expand":
app.ExpandTodo(input)
case "an", "n", "dn", "en":
app.HandleNotes(input)
case "gc":
app.GarbageCollect()
case "p", "prioritize":
app.PrioritizeTodo(input)
case "up", "unprioritize":
app.UnprioritizeTodo(input)
case "init":
app.InitializeRepo()
case "web":
if err := app.Load(); err != nil {
os.Exit(1)
} else {
web := todolist.NewWebapp()
fmt.Println("Now serving todolist web.\nHead to http://localhost:7890 to see your todo list!")
open.Start("http://localhost:7890")
web.Run()
}
}
}
|
#shellcheck shell=sh
shellspec_syntax 'shellspec_modifier_line'
shellspec_modifier_line() {
shellspec_syntax_param count [ $# -ge 1 ] || return 0
shellspec_syntax_param 1 is number "$1" || return 0
if [ "${SHELLSPEC_SUBJECT+x}" ] && [ "${SHELLSPEC_SUBJECT:-}" ]; then
SHELLSPEC_EVAL="
shellspec_callback() { \
[ \$2 -eq $1 ] && SHELLSPEC_SUBJECT=\$1 && return 1; \
unset SHELLSPEC_SUBJECT ||:; \
}
"
eval "$SHELLSPEC_EVAL"
shellspec_lines shellspec_callback "$SHELLSPEC_SUBJECT"
else
unset SHELLSPEC_SUBJECT ||:
fi
shift
eval shellspec_syntax_dispatch modifier ${1+'"$@"'}
}
|
#include <stdio.h>
int getMax(int arr[], int n)
{
int max = arr[0];
int i;
for (i = 1; i < n; i++)
if (arr[i] > max)
max = arr[i];
return max;
}
int main()
{
int arr[] = {2, 5, 3, 1, 6};
int n = sizeof(arr)/sizeof(arr[0]);
int max = getMax(arr, n);
printf("Maximum value of the array is %d", max);
} |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
wget https://paddlerec.bj.bcebos.com/datasets/ml-m1-tisasrec/ml-1m.txt
mv ml-1m.txt data
|
<filename>tests/Platform.test.cpp
#define DOCTEST_CONFIG_IMPLEMENT_WITH_MAIN
#include "khaos/Platform.h"
#include "doctest/doctest.h"
TEST_CASE("Testing the Platform")
{
#if PLATFORM_IS(Android)
std::cout << "Platform : Android, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(CloudABI)
std::cout << "Platform : CloudABI, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(iOSTarget)
std::cout << "Platform : iOSTarget, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(iOSIphone)
std::cout << "Platform : iOSIphone, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(iOSSimulator)
std::cout << "Platform : iOSSimulator, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGW)
std::cout << "Platform : MINGW, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGW32)
std::cout << "Platform : MINGW32, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGW64)
std::cout << "Platform : MINGW64, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(UWP)
std::cout << "Platform : UWP, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(WindowsDesktop)
std::cout << "Platform : WindowsDesktop, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(WindowsPhone)
std::cout << "Platform : WindowsPhone, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(WindowsServer)
std::cout << "Platform : WindowsServer, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(WindowsStore)
std::cout << "Platform : WindowsStore, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(WindowsSystem)
std::cout << "Platform : WindowsSystem, Version : " << KHAOS_PLATFORM_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGWEmulated)
std::cout << "Platform : MINGWEmulated, Version : " << KHAOS_MINGW_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGW32Emulated)
std::cout << "Platform : MINGW32Emulated, Version : " << KHAOS_MINGW_VERSION << std::endl;
#endif
#if PLATFORM_IS(MINGW64Emulated)
std::cout << "Platform : MINGW64Emulated, Version : " << KHAOS_MINGW_VERSION << std::endl;
#endif
}
|
<reponame>quintel/etengine
class Input
# Stores and retrieves input min, max, and start values.
class Cache
# Public: Retrieves the hash containing all of the input attributes.
#
# If no values for the area and year are already cached, the entire input
# collection values will be calculated and cached.
#
# scenario - A scenario with an area code and end year.
# input - The input whose values are to be retrieved.
#
# Returns a hash of the input min, max, etc.
def read(scenario, input)
cache_key = input_cache_key(scenario, input)
Rails.cache.read(cache_key) ||
(warm_values_for(scenario) && Rails.cache.read(cache_key))
end
private
# Internal: Sets the hash containing all of the input attributes.
#
# scenario - A scenario with an area code and end year.
# input - The input whose values are to be set.
# values - Values for the input in the form
# { min: Numeric, max: Numeric } etc
#
# Returns the values written.
def set(scenario, input, values)
Rails.cache.write(input_cache_key(scenario, input), values)
end
# Internal: Pre-calculates values for each input.
#
# scenario - A scenario with an area code and end year. All other attributes
# are ignored.
#
# Returns nothing.
def warm_values_for(scenario)
attributes = scenario.attributes.slice('area_code', 'end_year')
gql = Scenario.new(attributes).gql
Input.all.each do |input|
set(scenario, input, values_for(input, gql))
end
end
# Internal: The values which will be cached for an input.
#
# input - The input whose values are to be cached.
# gql - GQL instance for calculating values.
#
# Returns a hash of the input values.
def values_for(input, gql)
values = {
min: input.min_value_for(gql),
max: input.max_value_for(gql),
default: input.start_value_for(gql),
label: input.label_value_for(gql),
disabled: input.disabled_in_current_area?(gql),
step: input.step_value
}
# TODO: Remove once adding a proper "permited_values" attribute.
values[:min] = values[:min].map(&:to_s) if input.unit == 'enum'
values = Scaler.call(input, scaler_for(gql), values)
required_numerics = values.values_at(*input.required_numeric_attributes)
if required_numerics.any? { |value| ! value.is_a?(Numeric) }
{ disabled: true, error: 'Non-numeric GQL value' }
else
values
end
end
# Internal: The scaler for the given GQL, or nil if the scenario is not
# scaled.
def scaler_for(gql)
if gql.scenario && Area.derived?(gql.scenario.area_code)
ScenarioScaling.from_atlas_scaling(gql.scenario.area[:scaling])
end
end
# Internal: Given a scenario, returns the key used to store cached minimum,
# maximum, and start values.
#
# scenario - The scenario containing an area code and end year.
# input - The input whose key you want.
#
def input_cache_key(scenario, input)
area = scenario.area_code || :unknown
year = scenario.end_year || :unknown
key = input.kind_of?(Input) ? input.key : input
"#{ area }.#{ year }.inputs.#{ key }.values"
end
end # Cache
end
|
package nightmarethreatreis.com.github.mvp.model;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import org.hibernate.annotations.Type;
@Entity
public class Predstava {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "predstava_id")
private long id;
@Column(nullable = false)
private String naziv;
@Column(nullable = false)
private int trajanje;
@Column(nullable = false)
@Type(type = "text")
private String opis;
@ManyToMany
private List<Zanr> zanrovi = new ArrayList<>();
@OneToMany(mappedBy = "predstava")
private List<Uloga> uloge = new ArrayList<>();
@ManyToMany
private List<Reziser> reziseri = new ArrayList<>();
@OneToMany(mappedBy = "predstava")
private List<Izvodjenje> izvodjenja = new ArrayList<>();
public List<Uloga> getUloge() {
return uloge;
}
public void setUloge(List<Uloga> uloge) {
this.uloge = uloge;
}
public List<Reziser> getReziseri() {
return reziseri;
}
public void setReziseri(List<Reziser> reziseri) {
this.reziseri = reziseri;
}
public List<Izvodjenje> getIzvodjenja() {
return izvodjenja;
}
public void setIzvodjenja(List<Izvodjenje> izvodjenja) {
this.izvodjenja = izvodjenja;
}
public void setId(long id) {
this.id = id;
}
public long getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getNaziv() {
return naziv;
}
public void setNaziv(String naziv) {
this.naziv = naziv;
}
public int getTrajanje() {
return trajanje;
}
public void setTrajanje(int trajanje) {
this.trajanje = trajanje;
}
public String getOpis() {
return opis;
}
public void setOpis(String opis) {
this.opis = opis;
}
public List<Zanr> getZanrovi() {
return zanrovi;
}
public void setZanrovi(List<Zanr> zanrovi) {
this.zanrovi = zanrovi;
}
public String zanroviAsString() {
if(getZanrovi().size() > 0) {
return String.join(", ", getZanrovi().stream().map(Zanr::getNaziv).collect(Collectors.toList()));
}
else {
return "/";
}
}
public String reziseriAsString() {
if(getReziseri().size() > 0) {
return String.join(", ", getReziseri().stream().map(reziser -> reziser.getIme() + " " + reziser.getPrezime()).collect(Collectors.toList()));
}
else {
return "/";
}
}
public String ulogeAsString() {
if(getUloge().size() > 0) {
return String.join(", ", getUloge().stream().map(uloga -> {
String result = uloga.getNaziv();
if(uloga.getGlumci().size() > 0) {
result += "(";
result += String.join(", ", uloga.getGlumci().stream().map(glumac -> glumac.getIme() + " " + glumac.getPrezime()).collect(Collectors.toList()));
result += ")";
}
return result;
}).collect(Collectors.toList()));
}
else {
return "/";
}
}
}
|
package com.yahoo.ycsb.db;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.Statement;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.StringByteIterator;
import com.yahoo.ycsb.generator.IncrementingPrintableStringGenerator;
import com.yahoo.ycsb.generator.UniformDoubleGenerator;
import com.yahoo.ycsb.generator.UnixEpochTimestampGenerator;
import com.yahoo.ycsb.measurements.Measurements;
import com.yahoo.ycsb.workloads.WinnerWorkload;
import org.cassandraunit.CassandraCQLUnit;
import org.cassandraunit.dataset.cql.ClassPathCQLDataSet;
import org.junit.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
public class TestWinnerCassandraCQLClient {
// Change the default Cassandra timeout from 10s to 120s for slow CI machines
private final static long timeout = 120000L;
private final static String TABLE = "samples";
private final static String METRIC = "sensormetric";
private final static String HOST = "localhost";
private final static String PORT = "9042";
private final static int TAGPOOL_SIZE = 5;
private final static int TAGCOUNT = 5;
private final static int TAGLENGTH = 10;
private WinnerCassandraCQLClient client;
private Session session;
private List<String> tagpool;
UnixEpochTimestampGenerator tGen;
UniformDoubleGenerator vGen;
IncrementingPrintableStringGenerator tagGen;
@ClassRule
public static CassandraCQLUnit cassandraUnit = new CassandraCQLUnit(
new ClassPathCQLDataSet("samples.cql", "ycsb"), null, timeout);
@Before
public void setUp() throws Exception {
session = cassandraUnit.getSession();
Properties p = new Properties();
p.setProperty("hosts", HOST);
p.setProperty("port", PORT);
p.setProperty("table", TABLE);
p.setProperty("metric", METRIC);
p.setProperty("fieldlength", String.valueOf(TAGLENGTH));
p.setProperty("fieldcount", String.valueOf(TAGCOUNT));
Measurements.setProperties(p);
final WinnerWorkload workload = new WinnerWorkload();
workload.init(p);
client = new WinnerCassandraCQLClient();
client.setProperties(p);
client.init();
tGen = new UnixEpochTimestampGenerator(1, TimeUnit.SECONDS, 1);
vGen = new UniformDoubleGenerator(-10.0, 10.0);
tagGen = new IncrementingPrintableStringGenerator(
TAGLENGTH, IncrementingPrintableStringGenerator.printableBasicAlphaASCIISet());
tagpool = new ArrayList<>();
for (int i = 0; i < TAGPOOL_SIZE; i++) {
tagpool.add(tagGen.nextValue());
}
}
@After
public void tearDownClient() throws Exception {
if (client != null) {
client.cleanup();
}
client = null;
}
@After
public void clearTable() throws Exception {
// Clear the table so that each test starts fresh.
final Statement truncate = QueryBuilder.truncate(TABLE);
if (cassandraUnit != null) {
cassandraUnit.getSession().execute(truncate);
}
}
@Test
public void testInsert() throws Exception {
for (int i = 0; i < 1000; i++) {
Status status = client.insert("samples", getNextKey(), generateTags());
Assert.assertEquals(Status.OK, status);
}
}
private String getNextKey() {
long ts = tGen.nextValue();
double val = vGen.nextValue();
String key = "sensormetric:"+ts+":"+val;
return key;
}
private HashMap<String, ByteIterator> generateTags() {
HashMap<String, ByteIterator> tags = new HashMap<>();
int i = 0;
for (String tag : tagpool) {
tags.put("tag"+i, new StringByteIterator(tag));
i++;
}
return tags;
}
}
|
module CanonicalVocabulary
module Renewals
class RenewalReportRowBuilder
include RenewalBuilder
attr_reader :data_set
def initialize(family, primary)
@data_set = []
@family = family
@primary = primary
end
def append_integrated_case_number
@data_set << @family.e_case_id.split('#')[1]
end
def append_name_of(member)
@data_set << member.person.name_first
@data_set << member.person.name_last
end
def append_notice_date(notice_date)
@data_set << notice_date
end
def append_household_address
address = @primary.person.addresses[0]
@data_set << address.address_line_1
@data_set << address.address_line_2
append_blank # Apt
@data_set << address.location_city_name
@data_set << address.location_state_code
@data_set << address.location_postal_code
end
def append_aptc
append_blank
end
def append_response_date(response_date)
@data_set << response_date
end
def append_policy(policy)
if policy.current.blank?
3.times{|i| append_blank }
else
@data_set << policy.current.plan_name
@data_set << policy.current.future_plan_name
@data_set << policy.current.quoted_premium
end
end
def append_post_aptc_premium
append_blank
end
def append_financials
@data_set << @family.yearly_income("2014")
append_blank
@data_set << @family.irs_consent
end
def append_age_of(member)
@data_set << member.age
end
def append_residency_of(member)
@data_set << residency(member)
end
def append_citizenship_of(member)
@data_set << citizenship(member)
end
def append_tax_status_of(member)
@data_set << tax_status(member)
end
def append_mec_of(member)
@data_set << member_mec(member)
end
def append_app_group_size
@data_set << @family.family_members.count
end
def append_yearwise_income_of(member)
@data_set << member.income_by_year("2014")
end
def append_blank
@data_set << nil
end
def append_incarcerated(member)
@data_set << incarcerated?(member)
end
end
end
end
|
<reponame>shirou/VSNowm<gh_stars>0
import { ExecException } from "child_process";
// Base error class
export class BaseError extends Error {
constructor(e?: string) {
super(e);
this.name = new.target.name;
}
}
export class GitUninitlizedError extends BaseError {}
export class GitManualRequiredError extends BaseError {}
export class GitSyncerError extends BaseError {
constructor(public stderr: string, e?: ExecException) {
super(e?.message);
}
}
|
import pandas as pd
import pytest
import twigy
@pytest.fixture
def titanic_training_data():
titanic_data = pd.read_csv("/test/testdata/titanic_data.csv")
X = titanic_data.drop(columns=['Survived'])
y = titanic_data[['Survived']]
return X.to_numpy(), y.to_numpy()
def test_result(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=5)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert len(tree_clf.tree.nodes) == 53
assert tree_clf.tree.nodes[0].n_samples == 891
assert tree_clf.tree.nodes[0].split_feature == 5
assert tree_clf.tree.nodes[0].threshold == 1.5
assert tree_clf.tree.nodes[0].left_child_id == 1
assert tree_clf.tree.nodes[0].right_child_id == 26
assert tree_clf.tree.nodes[0].impurity == pytest.approx(0.473013)
assert tree_clf.tree.nodes[0].value == [549, 342]
assert tree_clf.tree.nodes[22].n_samples == 32
assert tree_clf.tree.nodes[22].impurity == pytest.approx(0.0)
assert tree_clf.tree.nodes[22].value == [32, 0]
assert tree_clf.tree.nodes[49].n_samples == 21
assert tree_clf.tree.nodes[49].split_feature == 2
assert tree_clf.tree.nodes[49].threshold == 0.5
assert tree_clf.tree.nodes[49].left_child_id == 50
assert tree_clf.tree.nodes[49].right_child_id == 51
assert tree_clf.tree.nodes[49].impurity == pytest.approx(0.17233559)
assert tree_clf.tree.nodes[49].value == [19, 2]
def test_neg_max_depth(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=-5)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert len(tree_clf.tree.nodes) == 1
def test_max_depth(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=1)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert len(tree_clf.tree.nodes) == 3
def test_max_depth(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=5, max_features=1)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert tree_clf.tree.nodes[0].split_feature == 1
assert tree_clf.tree.nodes[1].split_feature == 7
assert tree_clf.tree.nodes[2].split_feature == 6
assert tree_clf.tree.nodes[3].split_feature == 2
assert tree_clf.tree.nodes[4].split_feature == 7
def test_high_min_impurity_split(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=5, min_impurity_split=100)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert len(tree_clf.tree.nodes) == 1
def test_normal_min_impurity_split(titanic_training_data):
tree_clf = twigy.DecisionTreeClassifier(max_depth=5, min_impurity_split=0.1)
tree_clf.build_tree(titanic_training_data[0], titanic_training_data[1])
assert len(tree_clf.tree.nodes) == 47
|
function flatten(arr) {
let flattened = [];
arr.forEach((item) => {
if (Array.isArray(item)) {
flattened.push(...flatten(item));
} else {
flattened.push(item);
}
});
return flattened;
} |
#!/bin/sh
# execute-backup-command.sh
set -e
echo "ENVIRONMENT=${ENVIRONMENT}"
echo "REGION=${REGION}"
if [ -z ${REGION} ]; then
REGION="us-west-2"
fi
IP_ADDRESS=$(curl -s http://checkip.amazonaws.com/)
echo ""
echo "-----------------------------------------------------------------------------------------------------------------"
echo ""
echo "Executing command: '${COMMAND} ${ADDITIONAL_COMMAND_OPTIONS}' in environment: '${ENVIRONMENT}' and region '${REGION}
with the following external IP address: ${IP_ADDRESS}"
echo ""
echo "-----------------------------------------------------------------------------------------------------------------"
echo ""
cerberus -e ${ENVIRONMENT} -r ${REGION} --no-tty ${COMMAND} ${ADDITIONAL_COMMAND_OPTIONS}
curl -s -X POST \
https://ingest.signalfx.com/v2/datapoint \
-H 'Cache-Control: no-cache' \
-H 'Content-Type: application/json' \
-H "X-SF-TOKEN: ${SIGNALFX_TOKEN}" \
-d "{ \"counter\": [{
\"metric\": \"cerberus.cli.success\",
\"dimensions\": { \"command\": \"${COMMAND}\", \"env\": \"${ENVIRONMENT}\" },
\"value\": 1
}]}" |
# production style:
#gunicorn report_relay.main:App --bind localhost:8080 --worker-class aiohttp.GunicornWebWorker
# dev server style:
python3 -m report_relay.main
|
package main
import (
"fmt"
"io/ioutil"
"net/http"
"os"
)
// MathServer maintains server state for all handlers.
type MathServer struct {
addr string
log *os.File
requests uint
GET uint
POST uint
}
// NewMathServer creates a AddServer object that will be used to maintain server state.
func NewMathServer(addr string) *MathServer {
return &MathServer{
addr: addr,
log: nil,
requests: 0,
GET: 0,
POST: 0,
}
}
// AddHandler takes the addition request and forwards it to the addition microservice.
func (m *MathServer) AddHandler(w http.ResponseWriter, r *http.Request) {
if r.Method == "POST" {
// Send a post to the addition microservice with the request.
resp, err := http.Post("http://localhost:8090", "text/json", r.Body)
if err != nil {
w.WriteHeader(resp.StatusCode)
}
defer resp.Body.Close()
// If the status is OK, write header as http 200.
w.WriteHeader(http.StatusOK)
// Read the POST responses body.
rBody, _ := ioutil.ReadAll(resp.Body)
w.Write(rBody)
} else {
resp, err := http.Get("http://localhost:8090")
if err != nil {
w.WriteHeader(resp.StatusCode)
}
defer resp.Body.Close()
w.WriteHeader(http.StatusOK)
rBody, _ := ioutil.ReadAll(resp.Body)
w.Write(rBody)
}
}
// DivHandler takes a divison request and forwards it to the division microservice.
func (m *MathServer) DivHandler(w http.ResponseWriter, r *http.Request) {
}
// MultHandler takes a multiplication request and forwards it to the multiplication service.
func (m *MathServer) MultHandler(w http.ResponseWriter, r *http.Request) {
}
// SubHandler takes a subtraction request and forwards it to the subtraction service.
func (m *MathServer) SubHandler(w http.ResponseWriter, r *http.Request) {
}
func main() {
m := NewMathServer(":8089")
mux := http.NewServeMux()
mux.HandleFunc("/add", m.AddHandler)
mux.HandleFunc("/div", m.DivHandler)
mux.HandleFunc("/mult", m.MultHandler)
mux.HandleFunc("/sub", m.SubHandler)
fmt.Println("Starting math server on port 8089.")
http.ListenAndServe(m.addr, mux)
}
|
#!/bin/sh
# Tests for file descriptor exhaustion.
# Copyright (C) 2009-2015 Free Software Foundation, Inc.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. "${srcdir=.}/tests/init.sh"; path_prepend_ ./src
print_ver_ sort
# Skip the test when running under valgrind.
( ulimit -n 6; sort 3<&- 4<&- 5<&- < /dev/null ) \
|| skip_ 'fd-limited sort failed; are you running under valgrind?'
for i in $(seq 31); do
echo $i | tee -a in > __test.$i || framework_failure_
done
# glob before ulimit to avoid issues on bash 3.2 on OS X 10.6.8 at least
test_files=$(echo __test.*)
(
ulimit -n 6
sort -n -m $test_files 3<&- 4<&- 5<&- < /dev/null > out
) &&
compare in out ||
{ fail=1; echo 'file descriptor exhaustion not handled' 1>&2; }
echo 32 | tee -a in > in1
(
ulimit -n 6
sort -n -m $test_files - 3<&- 4<&- 5<&- < in1 > out
) &&
compare in out || { fail=1; echo 'stdin not handled properly' 1>&2; }
Exit $fail
|
#!/bin/bash
# gives the capacity to the local user to user `/usr/bin/node` without sudo
# if this does not work, check where your installation of node is: `which node`
sudo setcap cap_net_bind_service=ep /usr/bin/node
npm run start
|
const printPrime = (start, end) => {
for (let i = start; i <= end; i++) {
let prime = true
for (let j = 2; j < i; j++) {
if (i % j === 0) prime = false
}
if (prime) console.log(i)
}
}
printPrime(1, 10) |
<reponame>wojciech-kowalik/nestjs-shop
import { Injectable } from '@nestjs/common';
import { RegisterDto } from './dto/register.dto';
import { RegisterUserResponse } from '../shop/interfaces/user';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { User } from './user.entity';
import { hashPwd } from '../utils/hash-pwd';
@Injectable()
export class UserService {
constructor(
@InjectRepository(User)
private userRepository: Repository<User>,
) {}
filter(user: User): RegisterUserResponse {
const { id, email } = user;
return {
id,
email,
};
}
async register(newUser: RegisterDto): Promise<RegisterUserResponse> {
const user = new User();
user.email = newUser.email;
user.pwdHash = hashPwd(newUser.password);
await this.userRepository.save(user);
return this.filter(user);
}
async getUser(id: string): Promise<User> {
return await this.userRepository.findOneOrFail(id);
}
}
|
// Copyright (C) 2018 <NAME>.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.github.saurfang.sas.mapreduce
import java.io.IOException
import com.github.saurfang.sas.parso.ParsoWrapper
import org.apache.commons.io.input.CountingInputStream
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.compress.CompressionCodecFactory
import org.apache.hadoop.io.NullWritable
import org.apache.hadoop.mapreduce.{InputSplit, RecordReader, TaskAttemptContext}
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.log4j.LogManager
import org.apache.spark.sql.execution.datasources.CodecStreams
/**
* A [[RecordReader]] for [[SasInputFormat]].
* Each split is aligned to the closest preceding page boundary,
* calculated from the page size specified in the .sas7bdat meta info.
*/
class SasRecordReader(split: InputSplit,
context: TaskAttemptContext) extends RecordReader[NullWritable, Array[Object]] {
@transient lazy val log = LogManager.getLogger(this.getClass.getName)
// Process input parameters.
private val fileSplit = split.asInstanceOf[FileSplit]
private val filePath = fileSplit.getPath
private val jobConf = context.getConfiguration
// Sanity-Check: Ensure file is not compressed.
private val codec = Option(new CompressionCodecFactory(jobConf).getCodec(filePath))
private val isSplittable = codec.isEmpty
// Initialize variables.
private var recordCount: Long = 0
private var currentRecordValue: Array[Object] = _
// Initialize InputStream.
private val fs = filePath.getFileSystem(jobConf)
private val rawInputStream = fs.open(filePath)
private val fileInputStream = codec.map(codec => codec.createInputStream(rawInputStream)).getOrElse(rawInputStream)
private val countingInputStream = new CountingInputStream(fileInputStream)
// Initialize Parso SasFileParser.
private val sasFileReader = ParsoWrapper.createSasFileParser(countingInputStream)
// Extract static SAS file metadata.
private val headerLength: Long = sasFileReader.getSasFileProperties.getHeaderLength
private val pageLength: Long = sasFileReader.getSasFileProperties.getPageLength
private val pageCount: Long = sasFileReader.getSasFileProperties.getPageCount
private val columnCount: Long = sasFileReader.getSasFileProperties.getColumnsCount
private val rowCount: Long = sasFileReader.getSasFileProperties.getRowCount
private val fileLength: Long = if (isSplittable) {
fs.getFileStatus(filePath).getLen
} else {
headerLength + (pageLength * pageCount)
}
// Calculate initial split byte positions.
private var splitStart: Long = if (isSplittable) {
fileSplit.getStart
} else {
0
}
private var splitEnd: Long = if (isSplittable) {
splitStart + fileSplit.getLength
} else {
splitStart + fileLength
}
// Log file information
log.debug(s"Reading file of length $fileLength between $splitStart and $splitEnd. ($rowCount rows, $columnCount columns)")
// Expand splitStart to closest preceding page end.
if (splitStart > 0) {
// Calculate how many extra bytes we need to include, so we start on a page boundary.
val partialPageLength = (splitStart - headerLength) % pageLength
// Move splitStart back to include these bytes.
splitStart -= partialPageLength
if (partialPageLength != 0) {
log.debug(s"Expanded splitStart by $partialPageLength bytes to start on page boundary, splitStart is now: $splitStart.")
}
}
// Shrink splitEnd to closest preceding page end. (Don't move last split, it should end on file end)
if (splitEnd != fileLength) {
// Calculate how many bytes we need to exclude, so we end on a page boundary.
val partialPageLength = (splitEnd - headerLength) % pageLength
// Move splitEnd back to exclude these bytes.
splitEnd -= partialPageLength
if (partialPageLength != 0) {
log.debug(s"Shrunk splitEnd by $partialPageLength bytes to end on page boundary, splitEnd is now: $splitEnd.")
}
}
// Seek input stream. (Don't seek if this is the first split, as it has already read past metadata)
if (fileInputStream.getPos != splitStart && splitStart > 0) {
val originalPos = fileInputStream.getPos
// Shift fileInputStream to start of split.
fileInputStream.seek(splitStart)
log.debug(s"Shifted fileInputStream to $splitStart offset from $originalPos.")
// Reset Byte Counter.
countingInputStream.resetByteCount()
// If we seek then we need to look at the current page.
// this is safe because we seeked to a page boundary.
sasFileReader.readNextPage()
}
// Define initialise so we can compile, as it is marked abstract.
override def initialize(split: InputSplit, context: TaskAttemptContext): Unit = {
}
override def getCurrentKey: NullWritable = {
NullWritable.get
}
override def getCurrentValue: Array[Object] = {
currentRecordValue
}
override def close() {
log.debug(s"Read $getPos bytes and $recordCount records.")
if (countingInputStream != null) {
countingInputStream.close()
}
if (fileInputStream != null) {
fileInputStream.close()
}
}
override def getProgress: Float = {
splitStart match {
case x if x == splitEnd => 0.0F
case _ => Math.min(getPos / (splitEnd - splitStart), 1.0F)
}
}
override def nextKeyValue(): Boolean = {
// Lazy evaluator to read next record.
lazy val readNext = {
// Clear the current stored record.
currentRecordValue = new Array[Object](columnCount.toInt)
// Read next record.
val recordValue: Option[Array[Object]] = Option(sasFileReader.readNext())
// Store the returned record.
if (recordValue.isDefined) {
// copyToArray handles partially corrupted records
recordValue.get.copyToArray(currentRecordValue)
recordCount += 1
true
}
else {
false
}
}
// If there is more to read, read a row.
if (getPos <= splitEnd - splitStart) {
readNext
}
else {
false
}
}
// Get byte current byte position of the input stream.
private def getPos: Long = {
countingInputStream.getByteCount
}
}
|
//
// WonderlandMacLib.h
// WonderlandMacLib
//
// Created by <NAME> on 8/11/2559 BE.
// Copyright © 2559 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface WonderlandMacLib : NSObject
@end
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Enum
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import Text
from sqlalchemy import Unicode
from sqlalchemy import UniqueConstraint
from sqlalchemy.orm import relationship, backref
from nailgun import consts
from nailgun.db import db
from nailgun.db.sqlalchemy.models.base import Base
from nailgun.db.sqlalchemy.models.fields import JSON
from nailgun.db.sqlalchemy.models.fields import LowercaseString
from nailgun.db.sqlalchemy.models.network import NetworkBondAssignment
from nailgun.db.sqlalchemy.models.network import NetworkNICAssignment
from nailgun.logger import logger
from nailgun.volumes.manager import VolumeManager
class NodeRoles(Base):
__tablename__ = 'node_roles'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id', ondelete="CASCADE"))
node = Column(Integer, ForeignKey('nodes.id'))
class PendingNodeRoles(Base):
__tablename__ = 'pending_node_roles'
id = Column(Integer, primary_key=True)
role = Column(Integer, ForeignKey('roles.id', ondelete="CASCADE"))
node = Column(Integer, ForeignKey('nodes.id'))
class Role(Base):
__tablename__ = 'roles'
__table_args__ = (
UniqueConstraint('name', 'release_id'),
)
id = Column(Integer, primary_key=True)
release_id = Column(
Integer,
ForeignKey('releases.id', ondelete='CASCADE'),
nullable=False
)
name = Column(String(50), nullable=False)
class Node(Base):
__tablename__ = 'nodes'
id = Column(Integer, primary_key=True)
uuid = Column(String(36), nullable=False,
default=lambda: str(uuid.uuid4()), unique=True)
cluster_id = Column(Integer, ForeignKey('clusters.id'))
name = Column(Unicode(100))
status = Column(
Enum(*consts.NODE_STATUSES, name='node_status'),
nullable=False,
default=consts.NODE_STATUSES.discover
)
meta = Column(JSON, default={})
mac = Column(LowercaseString(17), nullable=False, unique=True)
ip = Column(String(15))
fqdn = Column(String(255))
manufacturer = Column(Unicode(50))
platform_name = Column(String(150))
kernel_params = Column(Text)
progress = Column(Integer, default=0)
os_platform = Column(String(150))
pending_addition = Column(Boolean, default=False)
pending_deletion = Column(Boolean, default=False)
changes = relationship("ClusterChanges", backref="node")
error_type = Column(Enum(*consts.NODE_ERRORS, name='node_error_type'))
error_msg = Column(String(255))
timestamp = Column(DateTime, nullable=False)
online = Column(Boolean, default=True)
role_list = relationship(
"Role",
secondary=NodeRoles.__table__,
backref=backref("nodes", cascade="all,delete")
)
pending_role_list = relationship(
"Role",
secondary=PendingNodeRoles.__table__,
backref=backref("pending_nodes", cascade="all,delete")
)
attributes = relationship("NodeAttributes",
backref=backref("node"),
uselist=False,
cascade="all,delete")
nic_interfaces = relationship("NodeNICInterface", backref="node",
cascade="delete",
order_by="NodeNICInterface.name")
bond_interfaces = relationship("NodeBondInterface", backref="node",
cascade="delete",
order_by="NodeBondInterface.name")
# hash function from raw node agent request data - for caching purposes
agent_checksum = Column(String(40), nullable=True)
ip_addrs = relationship("IPAddr", viewonly=True)
replaced_deployment_info = Column(JSON, default=[])
replaced_provisioning_info = Column(JSON, default={})
@property
def interfaces(self):
return self.nic_interfaces + self.bond_interfaces
@property
def uid(self):
return str(self.id)
@property
def offline(self):
return not self.online
@property
def network_data(self):
# TODO(enchantner): move to object
from nailgun.network.manager import NetworkManager
return NetworkManager.get_node_networks(self)
@property
def volume_manager(self):
return VolumeManager(self)
@property
def needs_reprovision(self):
return self.status == 'error' and self.error_type == 'provision' and \
not self.pending_deletion
@property
def needs_redeploy(self):
return (
self.status in ['error', 'provisioned'] or
len(self.pending_roles)) and not self.pending_deletion
@property
def needs_redeletion(self):
return self.status == 'error' and self.error_type == 'deletion'
@property
def human_readable_name(self):
return self.name or self.mac
@property
def full_name(self):
return u'%s (id=%s, mac=%s)' % (self.name, self.id, self.mac)
@property
def roles(self):
return [role.name for role in self.role_list]
@roles.setter
def roles(self, new_roles):
if not self.cluster:
logger.warning(
u"Attempting to assign roles to node "
u"'{0}' which isn't added to cluster".format(
self.name or self.id
)
)
return
if new_roles:
self.role_list = db().query(Role).filter_by(
release_id=self.cluster.release_id,
).filter(
Role.name.in_(new_roles)
).all()
else:
self.role_list = []
@property
def pending_roles(self):
return [role.name for role in self.pending_role_list]
@property
def all_roles(self):
"""Returns all roles, self.roles and self.pending_roles."""
return set(self.pending_roles + self.roles)
@pending_roles.setter
def pending_roles(self, new_roles):
if not self.cluster:
logger.warning(
u"Attempting to assign pending_roles to node "
u"'{0}' which isn't added to cluster".format(
self.name or self.id
)
)
return
self.pending_role_list = db().query(Role).filter_by(
release_id=self.cluster.release_id,
).filter(
Role.name.in_(new_roles)
).all()
@property
def admin_interface(self):
"""Iterate over interfaces, if admin subnet include
ip address of current interface then return this interface.
:raises: errors.CanNotFindInterface
"""
# TODO(enchantner): move to object
from nailgun.network.manager import NetworkManager
return NetworkManager.get_admin_interface(self)
def _check_interface_has_required_params(self, iface):
return bool(iface.get('name') and iface.get('mac'))
def _clean_iface(self, iface):
# cleaning up unnecessary fields - set to None if bad
for param in ["max_speed", "current_speed"]:
val = iface.get(param)
if not (isinstance(val, int) and val >= 0):
val = None
iface[param] = val
return iface
def update_meta(self, data):
# helper for basic checking meta before updation
result = []
if "interfaces" in data:
for iface in data["interfaces"]:
if not self._check_interface_has_required_params(iface):
logger.warning(
"Invalid interface data: {0}. "
"Interfaces are not updated.".format(iface)
)
data["interfaces"] = self.meta.get("interfaces")
self.meta = data
return
result.append(self._clean_iface(iface))
data["interfaces"] = result
self.meta = data
def create_meta(self, data):
# helper for basic checking meta before creation
result = []
if "interfaces" in data:
for iface in data["interfaces"]:
if not self._check_interface_has_required_params(iface):
logger.warning(
"Invalid interface data: {0}. "
"Skipping interface.".format(iface)
)
continue
result.append(self._clean_iface(iface))
data["interfaces"] = result
self.meta = data
def reset_name_to_default(self):
"""Reset name to default
TODO(el): move to node REST object which
will be introduced in 5.0 release
"""
self.name = u'Untitled ({0})'.format(self.mac[-5:])
class NodeAttributes(Base):
__tablename__ = 'node_attributes'
id = Column(Integer, primary_key=True)
node_id = Column(Integer, ForeignKey('nodes.id'))
volumes = Column(JSON, default=[])
interfaces = Column(JSON, default={})
class NodeNICInterface(Base):
__tablename__ = 'node_nic_interfaces'
id = Column(Integer, primary_key=True)
node_id = Column(
Integer,
ForeignKey('nodes.id', ondelete="CASCADE"),
nullable=False)
name = Column(String(128), nullable=False)
mac = Column(LowercaseString(17), nullable=False)
max_speed = Column(Integer)
current_speed = Column(Integer)
assigned_networks_list = relationship(
"NetworkGroup",
secondary=NetworkNICAssignment.__table__,
order_by="NetworkGroup.id")
ip_addr = Column(String(25))
netmask = Column(String(25))
state = Column(String(25))
parent_id = Column(Integer, ForeignKey('node_bond_interfaces.id'))
@property
def type(self):
return consts.NETWORK_INTERFACE_TYPES.ether
@property
def assigned_networks(self):
return [
{"id": n.id, "name": n.name}
for n in self.assigned_networks_list
]
@assigned_networks.setter
def assigned_networks(self, value):
self.assigned_networks_list = value
class NodeBondInterface(Base):
__tablename__ = 'node_bond_interfaces'
id = Column(Integer, primary_key=True)
node_id = Column(
Integer,
ForeignKey('nodes.id', ondelete="CASCADE"),
nullable=False)
name = Column(String(32), nullable=False)
mac = Column(LowercaseString(17))
assigned_networks_list = relationship(
"NetworkGroup",
secondary=NetworkBondAssignment.__table__,
order_by="NetworkGroup.id")
state = Column(String(25))
flags = Column(JSON, default={})
mode = Column(
Enum(
*consts.OVS_BOND_MODES,
name='bond_mode'
),
nullable=False,
default=consts.OVS_BOND_MODES[0]
)
slaves = relationship("NodeNICInterface", backref="bond")
@property
def max_speed(self):
return None
@property
def current_speed(self):
return None
@property
def type(self):
return consts.NETWORK_INTERFACE_TYPES.bond
@property
def assigned_networks(self):
return [
{"id": n.id, "name": n.name}
for n in self.assigned_networks_list
]
@assigned_networks.setter
def assigned_networks(self, value):
self.assigned_networks_list = value
|
<gh_stars>1-10
package utils;
import java.awt.geom.Point2D;
import java.util.HashMap;
import java.util.Map;
public class GeodesicDistanceCalculator {
public static Double vincentyDistance(double lat1, double lon1, double lat2, double lon2) {
double a = 6378137, b = 6356752.3142, f = 1/298.257223563;
double L = toRad((lon2-lon1));
double U1 = Math.atan((1-f) * Math.tan(toRad(lat1)));
double U2 = Math.atan((1-f) * Math.tan(toRad(lat2)));
double sinU1 = Math.sin(U1), cosU1 = Math.cos(U1);
double sinU2 = Math.sin(U2), cosU2 = Math.cos(U2);
double lambda = L, lambdaP, iterLimit = 100, cosSqAlpha, cosSigma, sigma, sinAlpha, cos2SigmaM, sinSigma, sinLambda, cosLambda;
do {
sinLambda = Math.sin(lambda);
cosLambda = Math.cos(lambda);
sinSigma = Math.sqrt((cosU2*sinLambda) * (cosU2*sinLambda) +
(cosU1*sinU2-sinU1*cosU2*cosLambda) * (cosU1*sinU2-sinU1*cosU2*cosLambda));
// co-incident points
if (sinSigma == 0)
return 0.0;
cosSigma = sinU1*sinU2 + cosU1*cosU2*cosLambda;
sigma = Math.atan2(sinSigma, cosSigma);
sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma;
cosSqAlpha = 1 - sinAlpha*sinAlpha;
if(cosSqAlpha == 0.0){
cos2SigmaM = 0;
}else{
cos2SigmaM = cosSigma - 2*sinU1*sinU2/cosSqAlpha;
}
double C = f/16*cosSqAlpha*(4+f*(4-3*cosSqAlpha));
lambdaP = lambda;
lambda = L + (1-C) * f * sinAlpha *
(sigma + C*sinSigma*(cos2SigmaM+C*cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)));
} while(Math.abs(lambda-lambdaP) > 1e-12 && --iterLimit>0);
// formula failed to converge
if (iterLimit == 0)
return null;
double uSq = cosSqAlpha * (a*a - b*b) / (b*b);
double A = 1 + uSq/16384*(4096+uSq*(-768+uSq*(320-175*uSq)));
double B = uSq/1024 * (256+uSq*(-128+uSq*(74-47*uSq)));
double deltaSigma = B*sinSigma*(cos2SigmaM+B/4*(cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-
B/6*cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)));
Double s = b*A*(sigma-deltaSigma);
return s;
}
public static double toRad(double val) {
return val * Math.PI / 180;
}
/**
* This method receives two points in 2D space. One representing an initial geodesic point in decimal
* format and a second one representing a second point in common Cartesian form. The second point
* denotes the distance from the first point, if the latter is considered to be point (0,0).
* The cartesian points exrpesses distance in kilometers (i.e. <code>newCartesianPointX</code>=1
* and <code>newCartesianPointY</code>=-1 denotes a point that resides 1 km towards the east and 1
* km towards the south from the initial geodesic point).
*
* This method returns a Point2D point (in double format) which represents the new geodesic point.
*
*
* @param initialLongtitude The initial longtitude value in decimal format
* @param initialLatitude The initial latitude value in decimal format
* @param newCartesianPointX The change in the X axis
* @param newCartesianPointY The change in the Y axis
* @return The new geodesic point in decimal format.
*/
public static Point2D.Double get(double initialLongtitude, double initialLatitude, double newCartesianPointX, double newCartesianPointY) {
if (degreeToKilometersMap==null) {
initializeDegreeToKilometersMap();
}
double kilometersPerDegreeForSpecificLatitude = getKilometersPerDegreeForLatitude(initialLatitude);
double geopointsLongtitude = newCartesianPointX/kilometersPerDegreeForSpecificLatitude+initialLongtitude;
double geopointsLatitude = newCartesianPointY/kilometersPerDegreeForSpecificLatitude+initialLatitude;
return new Point2D.Double(geopointsLongtitude, geopointsLatitude);
}
private static Map<Integer, Double> degreeToKilometersMap;
private static void initializeDegreeToKilometersMap() {
degreeToKilometersMap = new HashMap<Integer, Double>();
degreeToKilometersMap.put(0, 110.57);
degreeToKilometersMap.put(10, 110.61);
degreeToKilometersMap.put(20, 110.70);
degreeToKilometersMap.put(30, 110.85);
degreeToKilometersMap.put(40, 111.04);
degreeToKilometersMap.put(50, 111.23);
degreeToKilometersMap.put(60, 111.41);
degreeToKilometersMap.put(70, 111.56);
degreeToKilometersMap.put(80, 111.66);
degreeToKilometersMap.put(90, 111.69);
}
private static double getKilometersPerDegreeForLatitude(double latitude) {
return degreeToKilometersMap.get((((int)Math.ceil(latitude))/10)*10);
}
} |
package de.htwg.se.durak.model.gameElementsComponent
/**
* Custom CardDeck for the game field
*/
trait FieldInterface {
/**
* Fields CardDeck
*
* @return CardDeck of the field
*/
val cardDeck: CardDeckInterface
/**
* Fields main deck
*
* @return List of cards in the field
*/
val deck: List[CardInterface] = cardDeck.deck
/**
* Fields deck size
*
* @return Number of cards in the field
*/
val size: Int = deck.size
/**
* Add a card to the field
*
* @param card Card that should be added
* @return new Field instance
*/
def addCard(card: CardInterface): FieldInterface
}
|
#!/bin/bash
FN="beadarrayExampleData_1.30.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.13/data/experiment/src/contrib/beadarrayExampleData_1.30.0.tar.gz"
"https://bioarchive.galaxyproject.org/beadarrayExampleData_1.30.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-beadarrayexampledata/bioconductor-beadarrayexampledata_1.30.0_src_all.tar.gz"
)
MD5="78bd38e4588c20d686f5a93af7827809"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
#!/bin/bash
echo "start npm $1"
cd src || exit 1
echo "Now directory is $(pwd)"
npm "$1"
cd ../examples || exit 1
echo "Now directory is $(pwd)"
npm "$1" react-dom-lazyload-component
|
#!/bin/sh
TERRAFORM_VERSION="0.12.29"
TERRAFORM_FILE="terraform_${TERRAFORM_VERSION}_linux_amd64.zip"
HELM_FILE=".helm-installer.sh"
SCENARIO_REPOSITORY="https://github.com/ksatirli/katacoda-scenarios.git"
SCENARIO_WORKSPACE="/tmp/scenario"
USER_WORKSPACE="/root/"
# install `unzip`
apt-get \
install \
--quiet \
--yes \
"unzip"
# fetch Terraform archive
curl \
--remote-name \
"https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/${TERRAFORM_FILE}"
# unzip Terraform archive and make it accessible in PATH
unzip \
"${TERRAFORM_FILE}" \
-d "/usr/local/bin/"
# clean up
rm \
--recursive \
--force \
${TERRAFORM_FILE}
## install Helm
#curl \
# --fail \
# --location \
# --show-error \
# --output "${HELM_FILE}" \
# --silent \
# "https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3" \
#&& \
#chmod 700 "${HELM_FILE}" \
#&& \
#./${HELM_FILE} \
#
## clean up Helm installer
#rm \
# --recursive \
# --force \
# ${HELM_FILE}
# initialize Helm
helm \
init
## add `stable` Helm Charts (this provides `stable/datadog:2.3.42`)
#helm \
# repo \
# add "stable" "https://kubernetes-charts.storage.googleapis.com/"
# add `datadog` Helm Charts (this provides `datadog/datadog:2.4.5`)
helm \
repo \
add "datadog" "https://helm.datadoghq.com/"
# update Helm Charts
helm \
repo \
update
# create user workspace
mkdir \
-p ${USER_WORKSPACE}
# clone course code into workspace amd copy relevant files for `step1` to workspace
git \
clone \
--depth=1 \
"${SCENARIO_REPOSITORY}" \
"${SCENARIO_WORKSPACE}" \
&& \
cp \
${SCENARIO_WORKSPACE}/k8s-terraform-datadog/step1/*.tf "${USER_WORKSPACE}/"
|
#!/bin/bash
# Terminate immediately with an error if any child command fails.
set -e
./tests/local_pdiff_test.py
./tests/fetch_worker_test.py
./tests/queue_worker_test.py
./tests/site_diff_test.py
./tests/timer_worker_test.py
./tests/workers_test.py
|
<reponame>RyelBanfield/react-calculator
import renderer from 'react-test-renderer';
it('renders correctly', () => {
const home = renderer
.create(<div><h1>Welcome to Math-Magicians</h1></div>)
.toJSON();
expect(home).toMatchSnapshot();
});
|
#!/bin/bash
dieharder -d 4 -g 206 -S 3066549221
|
#! /bin/sh
starttest() {
set -e
GO111MODULE=on go test -race ./...
}
if [ -z "${TEAMCITY_VERSION}" ]; then
# running locally, so start test in a container
# TEAMCITY_VERSION=local will avoid recursive calls, when it would be running in container
docker run --rm --name ristretto-test -ti \
-v `pwd`:/go/src/github.com/dgraph-io/ristretto \
--workdir /go/src/github.com/dgraph-io/ristretto \
--env TEAMCITY_VERSION=local \
golang:1.13 \
sh test.sh
else
# running in teamcity, since teamcity itself run this in container, let's simply run this
starttest
fi
|
'use strict'
const request = require('request-promise-native')
const API_URL = require('../../config').API_URL
exports.index = async (ctx) => {
const title = ctx.query.title
return {
title: title
}
} |
<gh_stars>1-10
package auth
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
"sync"
"syscall"
"testing"
"time"
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/aukbit/pluto/v6"
"github.com/aukbit/pluto/v6/auth"
"github.com/aukbit/pluto/v6/auth/jwt"
pba "github.com/aukbit/pluto/v6/auth/proto"
backend "github.com/aukbit/pluto/v6/examples/auth/backend/service"
frontend "github.com/aukbit/pluto/v6/examples/auth/frontend/service"
pbu "github.com/aukbit/pluto/v6/examples/user/proto"
"github.com/aukbit/pluto/v6/reply"
"github.com/aukbit/pluto/v6/server"
"github.com/aukbit/pluto/v6/server/router"
"github.com/paulormart/assert"
)
type Error struct {
string
}
const (
USER_URL = "http://localhost:8088"
AUTH_URL = "http://localhost:8089"
)
var (
privKeyPath = "./keys/auth.rsa"
pubKeyPath = "./keys/auth.rsa.pub"
)
var wg sync.WaitGroup
func TestMain(m *testing.M) {
if !testing.Short() {
wg.Add(4)
go MockUserBackend()
time.Sleep(time.Millisecond * 500)
go MockUserFrontend()
time.Sleep(time.Millisecond * 500)
go RunAuthBackend()
time.Sleep(time.Millisecond * 500)
go RunAuthFrontend()
time.Sleep(time.Millisecond * 1000)
}
result := m.Run()
if !testing.Short() {
wg.Wait()
}
os.Exit(result)
}
func TestExampleAuth(t *testing.T) {
defer syscall.Kill(syscall.Getpid(), syscall.SIGINT)
r, err := http.NewRequest("POST", AUTH_URL+"/authenticate", strings.NewReader(`{}`))
if err != nil {
t.Fatal(err)
}
r.SetBasicAuth("<EMAIL>", "<PASSWORD>")
// call handler
response, err := http.DefaultClient.Do(r)
if err != nil {
t.Fatal(err)
}
actualBody, err := ioutil.ReadAll(response.Body)
defer response.Body.Close()
if err != nil {
t.Fatal(err)
}
token := &pba.Token{}
err = json.Unmarshal(actualBody, token)
if err != nil {
t.Fatal(err)
}
assert.Equal(t, response.Header.Get("Content-Type"), "application/json")
assert.Equal(t, http.StatusOK, response.StatusCode)
assert.Equal(t, true, len(token.Jwt) > 0)
// Test access to private resources
r, err = http.NewRequest("POST", USER_URL+"/user",
strings.NewReader(`{"name":"Gopher", "email": "<EMAIL>", "password":"<PASSWORD>"}`))
if err != nil {
t.Fatal(err)
}
// set Bearer authorization header
r.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token.Jwt))
// call handler
resp, err := http.DefaultClient.Do(r)
if err != nil {
t.Fatal(err)
}
defer resp.Body.Close()
var v interface{}
if err = json.NewDecoder(resp.Body).Decode(&v); err != nil {
t.Fatal(err)
}
assert.Equal(t, "application/json", resp.Header.Get("Content-Type"))
assert.Equal(t, http.StatusCreated, resp.StatusCode)
assert.Equal(t, "ok", v)
}
// Helper functions
func RunAuthBackend() {
defer wg.Done()
prv, err := jwt.LoadPrivateKey(privKeyPath)
if err != nil {
log.Fatal(err)
}
pub, err := jwt.LoadPublicKey(pubKeyPath)
if err != nil {
log.Fatal(err)
}
if err := backend.Run(pub, prv); err != nil {
log.Fatal(err)
}
}
func RunAuthFrontend() {
defer wg.Done()
if err := frontend.Run(); err != nil {
log.Fatal(err)
}
}
// type A func(s *grpc.Server, srv pbu.UserServiceServer)
func MockUserBackend() {
defer wg.Done()
// Define Pluto Server
grpcSrv := server.New(
server.Addr(":65080"),
server.GRPCRegister(func(g *grpc.Server) {
pbu.RegisterUserServiceServer(g, &MockUser{})
}),
)
// Define Pluto Service
s := pluto.New(
pluto.Name("MockUserBackend"),
pluto.Servers(grpcSrv),
pluto.HealthAddr(":9094"),
)
// Run service
if err := s.Run(); err != nil {
log.Fatal(err)
}
}
func MockUserFrontend() {
defer wg.Done()
// Define handlers
mux := router.New()
mux.POST("/user", PostHandler)
// define http server
srv := server.New(
server.Name("user_api"),
server.Addr(":8088"),
server.Mux(mux),
server.Middlewares(auth.MiddlewareBearerAuth()),
)
// define authentication client
clt := auth.NewClientAuth("127.0.0.1:65081")
// Define Pluto service
s := pluto.New(
pluto.Name("MockUserFrontend"),
pluto.Servers(srv),
pluto.Clients(clt),
pluto.HealthAddr(":9095"),
)
// Run service
if err := s.Run(); err != nil {
log.Fatal(err)
}
}
// User frontend views
func PostHandler(w http.ResponseWriter, r *http.Request) {
// ...
// create user with data sent on user backend
// check examples/user/frontend/views
// ...
reply.Json(w, r, http.StatusCreated, "ok")
}
// User backend views
type MockUser struct{}
func (s *MockUser) ReadUser(ctx context.Context, nu *pbu.User) (*pbu.User, error) {
// ...
return &pbu.User{}, nil
}
func (s *MockUser) CreateUser(ctx context.Context, nu *pbu.NewUser) (*pbu.User, error) {
// ...
return &pbu.User{}, nil
}
func (s *MockUser) UpdateUser(ctx context.Context, nu *pbu.User) (*pbu.User, error) {
// ...
return &pbu.User{}, nil
}
func (s *MockUser) DeleteUser(ctx context.Context, nu *pbu.User) (*pbu.User, error) {
// ...
return &pbu.User{}, nil
}
func (s *MockUser) FilterUsers(ctx context.Context, nu *pbu.Filter) (*pbu.Users, error) {
// ...
return &pbu.Users{}, nil
}
func (s *MockUser) VerifyUser(ctx context.Context, nu *pbu.Credentials) (*pbu.Verification, error) {
// ...
// verify user with data persisted
// check examples/user/backend/views
// ...
return &pbu.Verification{IsValid: true}, nil
}
func (s *MockUser) StreamUsers(nu *pbu.Filter, stream pbu.UserService_StreamUsersServer) error {
// ...
return nil
}
|
/*
* The MIT License (MIT)
*
* Copyright (c) 2020-2021 TheRandomLabs
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.therandomlabs.randompatches;
import com.therandomlabs.autoconfigtoml.TOMLConfigSerializer;
import com.therandomlabs.randompatches.client.CauldronWaterTranslucencyHandler;
import com.therandomlabs.randompatches.client.RPContributorCapeHandler;
import me.sargunvohra.mcmods.autoconfig1u.AutoConfig;
import net.fabricmc.api.ModInitializer;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* The main class for RandomPatches.
*/
public final class RandomPatches implements ModInitializer {
/**
* The RandomPatches mod ID.
*/
public static final String MOD_ID = "randompatches";
/**
* The RandomPatches logger. This should only be used by RandomPatches.
*/
public static final Logger logger = LogManager.getLogger(MOD_ID);
@SuppressWarnings("PMD.NonThreadSafeSingleton")
@Nullable
private static TOMLConfigSerializer<RPConfig> serializer;
/**
* {@inheritDoc}
*/
@Override
public void onInitialize() {
reloadConfig();
}
/**
* Called after {@link net.minecraft.client.MinecraftClient} is initialized.
*/
public static void postClientInit() {
CauldronWaterTranslucencyHandler.enable();
if (RandomPatches.config().client.contributorCapes) {
RPContributorCapeHandler.downloadContributorList();
}
}
/**
* Returns the RandomPatches configuration.
*
* @return an {@link RPConfig} object.
*/
@SuppressWarnings("NullAway")
public static RPConfig config() {
if (serializer == null) {
reloadConfig();
}
return serializer.getConfig();
}
/**
* Reloads the RandomPatches configuration from disk.
*/
public static void reloadConfig() {
if (serializer == null) {
AutoConfig.register(RPConfig.class, (definition, configClass) -> {
serializer = new TOMLConfigSerializer<>(definition, configClass);
return serializer;
});
} else {
serializer.reloadFromDisk();
}
}
}
|
// NOTE - You must have a level with the name "start". This is used as the first level in the game.
var game = {
music: "98_Lost_Mine.mp3",
background_image: "bed.jpg",
levels: {
start: {
message: "You Awake in the Morning Feeling Sluggish",
choices: [
{
text: "Get up and Walk About",
nextLevel: "Hallway",
},
{
text: "Go back to sleep",
nextLevel: "Dreamland",
},
]
},
Hallway: {
background_image: "hallway.jpg",
music: "Final-Fantasy-7-Boss-Battle.mp3",
message: "You Hear Voices Amongst the Walls",
choices: [
{
text: "Run Away and Sleep",
nextLevel: "Dreamland",
},
{
text: "Encounter the voice Behind the Madness",
nextLevel: "Satan_Room",
},
]
},
Satan_Room: {
background_image: "j.jpg",
music: "Final-Fantasy-7-Boss-Battle.mp3",
message: "You Encounter Satan",
choices: [
{
text: "Face Your Fears",
nextLevel: "Succeed",
},
{
text: "Give Up",
nextLevel: "Pawn_Room",
},
]
},
Pawn_Room: {
background_image: "pawn.jpg",
music: "Final-Fantasy-7-Boss-Battle.mp3",
message: "You Become a Pawn of Satan",
choices: [
{
text: "Relapse",
nextLevel: "start",
},
]
},
Succeed: {
background_image: "sus.jpg",
music: "Final-Fantasy-7-Boss-Battle.mp3",
message: "Success",
choices: [
{
text: "You Are Freed",
nextLevel: "start",
},
]
},
Dreamland: {
background_image: "d.jpg",
message: "As you Drift off you Begin to Dream",
choices: [
{
text: "Relapse",
nextLevel: "start",
},
]
},
}
};
|
<reponame>diwang011/h<gh_stars>0
package com.example.entity;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import java.io.Serializable;
import java.util.Set;
@Entity
//@JsonIgnoreProperties("books")
public class Author implements Serializable
{
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue
private Integer id;
private String name;
@ManyToMany(mappedBy = "authors")
private Set<Book> books;
public Author()
{
super();
}
public Author(String name)
{
super();
this.name = name;
}
public Integer getId()
{
return id;
}
public void setId(Integer id)
{
this.id = id;
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public Set<Book> getBooks()
{
return books;
}
public void setBooks(Set<Book> books)
{
this.books = books;
}
@Override
public String toString()
{
return String.format("Author [id=%s, name=%s, books=%s]", id, name, books);
}
}
|
import {Subject} from '../../generated/model/subject';
import {UserControllerService} from '../../generated/api/userController.service';
import {Observable} from 'rxjs/Observable';
import {CollectionViewer, DataSource} from '@angular/cdk/collections';
import {AlertService} from '../services/alert.service';
import {Injectable} from '@angular/core';
import {BehaviorSubject} from 'rxjs/BehaviorSubject';
@Injectable()
export class UserDataSource extends DataSource<Subject> {
private subject = new BehaviorSubject<Subject[]>([]); // start with null subject
// Observable<User[]> content: = Observable.create();
// TODO: implement a proper observable here that send array of userdata wheneveer filter, page, sort changes in the frontend grid...
constructor(private adminControllerService: UserControllerService,
private alertService: AlertService) {
super();
}
// see: https://github.com/angular/material2/issues/5917
/** Connect function called by the table to retrieve one stream containing the data to render. */
connect(collectionViewer: CollectionViewer): Observable<Subject[]> {
return this.subject.asObservable();
}
disconnect(collectionViewer: CollectionViewer) {
}
public refresh(): void {
this.adminControllerService.findPageUsingGET().subscribe(
result => {
this.subject.next(result.content);
},
error => {
this.alertService.handleError(error.error);
this.subject.next([]);
}
);
}
}
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Bienvenidos a la app de ${PREFIX}'s.
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
#!/bin/bash
# Waits for a response from a website and prints out the response time
if [ $# -eq 0 ]
then
echo "No arguments supplied"
echo "Usage: $0 website_url"
exit 1
fi
SITE="$1"
echo "Pinging $SITE …"
# Time the request
TIME=$(curl -o /dev/null --silent --head --write-out '%{time_total}\n' "$SITE")
echo -e "\nResponse time: $TIME seconds" |
<reponame>zaineb125/NestJs<filename>src/premier/premier/premier.controller.ts<gh_stars>0
import { Controller, Delete, Get, Patch, Post } from '@nestjs/common';
@Controller('premier')
export class PremierController {
@Get()
getPremier():string{
return 'GET';
}
@Post()
postPremier (){
return 'POST';
}
@Delete()
deletePremier(){
return 'Delete';
}
@Patch()
patchPremier(){
return 'patch';
}
}
|
/// Error for [`advertise_start`]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
pub enum AdvertiseError {
Timeout,
NoFreeConn,
Raw(RawError),
}
// Define the RawError type for demonstration purposes
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
struct RawError {
error_code: u32,
}
// Function to convert RawError to AdvertiseError
fn convert_raw_error(raw_error: RawError) -> AdvertiseError {
match raw_error.error_code {
1 => AdvertiseError::Timeout,
2 => AdvertiseError::NoFreeConn,
_ => AdvertiseError::Raw(raw_error),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_convert_raw_error() {
let timeout_error = RawError { error_code: 1 };
assert_eq!(convert_raw_error(timeout_error), AdvertiseError::Timeout);
let no_free_conn_error = RawError { error_code: 2 };
assert_eq!(convert_raw_error(no_free_conn_error), AdvertiseError::NoFreeConn);
let custom_error = RawError { error_code: 3 };
assert_eq!(convert_raw_error(custom_error), AdvertiseError::Raw(custom_error));
}
} |
const __ = require('./const')
const TimeString = require('./data/time.string')
const Debugger = class {
error (message) {
if (!__.TEST)
return
console.error(`${new TimeString().toString()} E -> ${message}`)
}
log (message) {
if (!__.DEBUG)
return
console.log(`${new TimeString().toString()} ${message}`)
}
warn (message) {
if (!__.DEBUG)
return
console.warn(`${new TimeString().toString()} W -> ${message}`)
}
}
const d = new Debugger()
module.exports = d |
class Attendance < ApplicationRecord
belongs_to :meeting
belongs_to :user
end
|
require 'simplecov' # has to be at top, will execute ../.simplecov
require 'spawning_logger'
require 'minitest/autorun'
require 'minitest/reporters'
if ENV["RM_INFO"] || ENV["TEAMCITY_VERSION"]
MiniTest::Reporters.use! MiniTest::Reporters::RubyMineReporter.new
else
MiniTest::Reporters.use! MiniTest::Reporters::SpecReporter.new
end
|
package misrraimsp.fourthrest.service;
import lombok.RequiredArgsConstructor;
import misrraimsp.fourthrest.data.ExpenseRepository;
import misrraimsp.fourthrest.data.PersonRepository;
import misrraimsp.fourthrest.model.Person;
import misrraimsp.fourthrest.model.dto.PersonConverter;
import misrraimsp.fourthrest.model.dto.PersonDTO;
import misrraimsp.fourthrest.model.dto.TransferDTO;
import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.List;
import java.util.Stack;
import java.util.stream.Collectors;
@Service
@RequiredArgsConstructor
public class PersonServer {
private final PersonRepository personRepository;
private final ExpenseRepository expenseRepository;
public List<PersonDTO> findAll() {
BigDecimal duty = this.getDuty();
return personRepository
.findAll()
.stream()
.map(person -> PersonConverter.convertPersonToDto(person,duty))
.collect(Collectors.toList());
}
public PersonDTO persist(PersonDTO dto) {
Person saved = personRepository.save(PersonConverter.convertDtoToPerson(dto));
return PersonConverter.convertPersonToDto(saved, this.getDuty());
}
public List<TransferDTO> getTransfers() {
Stack<PersonDTO> debtors = new Stack<>();
Stack<PersonDTO> creditors = new Stack<>();
this.findAll().forEach(personDTO -> {
if (personDTO.getBalance().compareTo(BigDecimal.ZERO) < 0) debtors.push(personDTO);
if (personDTO.getBalance().compareTo(BigDecimal.ZERO) > 0) creditors.push(personDTO);
});
List<TransferDTO> transfers = new ArrayList<>();
while (!debtors.empty() && !creditors.empty()) {
PersonDTO debtor = debtors.peek();
PersonDTO creditor = creditors.peek();
TransferDTO transferDTO = new TransferDTO();
transferDTO.setIssuerFirstName(debtor.getFirstName());
transferDTO.setIssuerLastName(debtor.getLastName());
transferDTO.setRecipientFirstName(creditor.getFirstName());
transferDTO.setRecipientLastName(creditor.getLastName());
BigDecimal result = debtor.getBalance().add(creditor.getBalance());
if (result.compareTo(BigDecimal.ZERO) < 0) {
transferDTO.setAmount(creditor.getBalance());
debtor.setBalance(result);
creditors.pop();
} else if (result.compareTo(BigDecimal.ZERO) > 0) {
transferDTO.setAmount(debtor.getBalance().abs());
creditor.setBalance(result);
debtors.pop();
} else {
transferDTO.setAmount(creditor.getBalance());
debtors.pop();
creditors.pop();
}
transfers.add(transferDTO);
}
return transfers;
}
private BigDecimal getDuty() {
BigDecimal total = BigDecimal.valueOf(expenseRepository.getTotalExpense()).setScale(3, RoundingMode.HALF_UP);
BigDecimal count = BigDecimal.valueOf(personRepository.count()).setScale(3, RoundingMode.HALF_UP);
return total.divide(count, 3, RoundingMode.HALF_UP);
}
}
|
<gh_stars>0
from OO_fraction import *
# 1/2 + 1/3 = 5/6
x = Fraction(1, 2)
y = Fraction(1, 3)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 1/3 + 1/6 = 1/2
x = Fraction(1, 3)
y = Fraction(1, 6)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 8/9 + 1/9 = 1
x = Fraction(8, 9)
y = Fraction(1, 9)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 1/200000000 + 1/300000000 = 1/120000000
x = Fraction(1, 200000000)
y = Fraction(1, 300000000)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 1073741789/20 + 1073741789/30 = 1073741789/12
x = Fraction(1073741789, 20)
y = Fraction(1073741789, 30)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 4/17 + 17/4 = 305/68
x = Fraction(4, 17)
y = Fraction(17, 4)
z = x.add(y)
print(z)
z1 = x + y
print(z1)
print()
# 4/17 * 17/4 = 1
x = Fraction(4, 17)
y = Fraction(17, 4)
z = x.multiply(y)
print(z)
z1 = x * y
print(z1)
print()
# 3037141/3247033 * 3037547/3246599 = 841/961
x = Fraction(3037141, 3247033)
y = Fraction(3037547, 3246599)
z = x.multiply(y)
print(z)
z1 = x * y
print(z1)
print()
# 1/6 - -4/-8 = -1/3
x = Fraction( 1, 6)
y = Fraction(-4, -8)
z = x.subtract(y)
print(z)
z1 = x - y
print(z1)
|
import os
import urllib.request
import tarfile
from conans import ConanFile, CMake
class SoftwareProject(ConanFile):
name = "software_project"
version = "1.0"
settings = "os", "compiler", "build_type", "arch"
_source_subfolder = "source"
_build_subfolder = "build"
_cmake = None
def _download_source_code(self):
source_url = self.conan_data["sources"][self.version]
download_path = os.path.join(self._source_subfolder, "master.tgz")
urllib.request.urlretrieve(source_url, download_path)
def _extract_source_code(self):
source_file = os.path.join(self._source_subfolder, "master.tgz")
with tarfile.open(source_file, "r:gz") as tar:
tar.extractall(path=self._source_subfolder)
members = tar.getmembers()
if members:
common_prefix = os.path.commonprefix([member.name for member in members])
for member in members:
member.name = member.name.replace(common_prefix, "", 1)
tar.extractall(path=self._source_subfolder)
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.configure(build_folder=self._build_subfolder)
return self._cmake |
#!/bin/bash
if [[ ! $1 ]]; then
echo "reduce the latest"
name_pattern='*toltec*[0-9].nc'
else
name_pattern="*toltec*[0-9]${1}*.nc"
fi
echo $name_pattern
echo $(find /data/data_toltec/ics/ -name "${name_pattern}")
find /data/data_toltec/ics/ -name "${name_pattern}" | parallel "$HOME/kids_bin/reduce.sh {} -r --output dummy_output"
# for i in $(find /data/data_toltec/ics/ -name "${name_pattern}"); do
# echo $HOME/kids_bin/reduce.sh $i -r --output dummy_output
# $HOME/kids_bin/reduce.sh $i -r --output dummy_output
# done
|
#!/bin/bash
# Convenience script for interactive-ish development. Starts up some background
# processes to constantly update and restart things.
#
# Specifically:
# - Recompile on source change
# - Restart servers on relevant changes
# - Re-run tests on source change
# - Rebuild haddock on source change
# - Rebuild tags file on source change
function watch_serve() {
BASEDIR="$(realpath .)"
cd "$1"
for ((;;))
do
sleep 1
pgrep -l sprinkles
sprinkles "$2" &
PID="$!"
echo "$PID: $1 :$2"
pgrep -l sprinkles
inotifywait \
-e attrib \
"$(which sprinkles)" \
project.yml \
templates/** \
$(find -name static) \
"$BASEDIR"/run-devel.sh
sleep 1
kill "$PID"
done
}
function watch_hasktags() {
for ((;;))
do
inotifywait \
-e modify \
src/**/*.hs app/*.hs test
hasktags . -c
done
}
stack install # --test --haddock
watch_serve examples/blogg 5100 &
watch_serve examples/countryInfo 5101 &
watch_serve examples/playground 5102 &
watch_hasktags &
stack install --file-watch --test # --haddock
|
<filename>server/integration-tests/integration-tests-mysql/src/test/scala/com/prisma/integration/deploychecks/DeleteRelationDeploySpec.scala<gh_stars>1000+
package com.prisma.integration.deploychecks
import com.prisma.integration.IntegrationBaseSpec
import org.scalatest.{FlatSpec, Matchers}
class DeleteRelationDeploySpec extends FlatSpec with Matchers with IntegrationBaseSpec {
"Deleting a relation when there is no data" should "succeed" in {
val schema =
"""|type A {
| id: ID! @id
| name: String! @unique
| b: B! @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| name: String! @unique
| a: A
|}"""
val (project, _) = setupProject(schema)
val schema2 =
"""|type C {
| id: ID! @id
| name: String! @unique
|}
|"""
deployServer.deploySchemaThatMustSucceed(project, schema2, 3)
}
"Deleting a relation when there is data" should "should warn" in {
val schema =
"""|type A {
| id: ID! @id
| name: String! @unique
| b: B! @relation(link: INLINE)
|}
|
|type B {
| id: ID! @id
| name: String! @unique
| a: A
|}"""
val (project, _) = setupProject(schema)
apiServer.query("""mutation{createA(data:{name: "A", b :{create:{name: "B"}}}){name}}""", project)
val schema2 =
"""|type A {
| id: ID! @id
| name: String! @unique
|}
|
|type B {
| id: ID! @id
| name: String! @unique
|}"""
deployServer.deploySchemaThatMustWarn(project, schema2).toString should be(
"""{"data":{"deploy":{"migration":null,"errors":[],"warnings":[{"description":"You already have nodes for this relation. This change will result in data loss."}]}}}""")
}
}
|
echo "This script will install packages required by PyRate. Continue?"
select yn in "Yes" "No"; do
case $yn in
Yes ) break;;
No ) exit;;
esac
done
# OS package requirements for Ubuntu 18.04
sudo apt-get update
sudo apt-get -y install \
gdal-bin \
libgdal-dev \
openmpi-bin \
libopenmpi-dev
|
<filename>7-assets/past-student-repos/LambdaSchool-master/m1/13b1/assignments/callbacks.js
// Create a higher order function and invoke the callback function to test your work. You have been provided an example of a problem and a solution to see how this works with our items array. Study both the problem and the solution to figure out the rest of the problems.
const items = ['Pencil', 'Notebook', 'yo-yo', 'Gum'];
// GIVEN THIS PROBLEM:
function firstItem(arr, cb) {
// firstItem passes the first item of the given array to the callback function.
}
// SOLUTION:
function firstItem(arr, cb) {
return cb(arr[0]);
}
// NOTES ON THE SOLUTION:
// firstItem is a higher order function.
// It expects a callback (referred to as `cb`) as its second argument.
// To test our solution, we can use the given `items` array and a variety of callbacks.
// Note how callbacks can be declared separately, or inlined.
// TEST 1 (inlined callback):
const test1 = firstItem(items, item => `I love my ${item}!`);
console.log(test1); // "I love my Pencil!"
// TEST 2 (declaring callback before hand):
function logExorbitantPrice(article) {
return `this ${article} is worth a million dollars!`;
};
const test2 = firstItem(items, logExorbitantPrice);
console.log(test2); // "this Pencil is worth a million dollars!"
let arr = [1, 2, 3, 4];
let x = 2;
let y = 3;
function getLength(arr, cb) {
return cb(arr.length);
// getLength passes the length of the array into the callback.
}
function last(arr, cb) {
return cb(arr[arr.length - 1]);
// last passes the last item of the array into the callback.
}
function sumNums(x, y, cb) {
return cb(x + y);
// sumNums adds two numbers (x, y) and passes the result to the callback.
}
function multiplyNums(x, y, cb) {
return cb(x * y);
// multiplyNums multiplies two numbers and passes the result to the callback.
}
let item = "apple";
let list = ["apple", "orange", "banana"];
function contains(item, list, cb) {
cb(
function (item, list) {
for (let x = 0; x < list.length; x++) {
if (item == list[x]) {
return true;
}
else { return false };
}
}
)
};
// contains checks if an item is present inside of the given array/list.
// Pass true to the callback if it is, otherwise pass false.
/* STRETCH PROBLEM */
let array = [1,2,3,4,2,5,5,6,8,3];
function removeDuplicates(array, cb) {
removeDuplicates(array, function() {
for(let x=(array.length-1);x>=0;x--) {
if (array[x]==array[x-1]) {
array.splice(x, 1);
}
}
console.log(array);
return array;
})};
// removeDuplicates removes all duplicate values from the given array.
// Pass the duplicate free array to the callback function.
// Do not mutate the original array. |
<reponame>Lambda-Anywhere-Fitness-BW/back-end<filename>src/src/main/java/com/justinbenz/anytimefitnessbe/controllers/OpenController.java<gh_stars>0
package com.justinbenz.anytimefitnessbe.controllers;
import com.justinbenz.anytimefitnessbe.models.*;
import com.justinbenz.anytimefitnessbe.services.ClientService;
import com.justinbenz.anytimefitnessbe.services.InstructorService;
import com.justinbenz.anytimefitnessbe.services.RoleService;
import com.justinbenz.anytimefitnessbe.services.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.*;
import org.springframework.security.oauth2.common.OAuth2AccessToken;
import org.springframework.security.oauth2.provider.token.TokenStore;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* The class allows access to endpoints that are open to all users regardless of authentication status.
* Its most important function is to allow a person to create their own username
*/
@RestController
public class OpenController
{
/**
* A method in this controller adds a new user to the application so needs access to User Services to do this.
*/
@Autowired
private UserService userService;
/**
* A method in this controller adds a new user to the application with the role User so needs access to Role Services to do this.
*/
@Autowired
private RoleService roleService;
@Autowired
private ClientService clientService;
@Autowired
private InstructorService instructorService;
@Autowired
private TokenStore tokenStore;
/**
* This endpoint always anyone to create an account with the default role of USER. That role is hardcoded in this method.
*
* @param httpServletRequest the request that comes in for creating the new user
* @param newminuser A special minimum set of data that is needed to create a new user
* @return The token access and other relevent data to token access. Status of CREATED. The location header to look up the new user.
* @throws URISyntaxException we create some URIs during this method. If anything goes wrong with that creation, an exception is thrown.
*/
@PostMapping(value = "/register/client",
consumes = {"application/json"},
produces = {"application/json"})
public ResponseEntity<?> addSelf(
HttpServletRequest httpServletRequest,
@Valid
@RequestBody
ClientMinimum newminuser)
throws
URISyntaxException
{
// Create the user
User newuser = new User();
newuser.setUsername(newminuser.getUsername());
newuser.setPassword(<PASSWORD>());
newuser.setEmail(newminuser.getEmail());
newuser.setName(newminuser.getName());
newuser.setBio(newminuser.getBio());
newuser.setAviurl(newminuser.getAviurl());
// add the default role of user
Set<UserRoles> newRoles = new HashSet<>();
System.out.println(newuser.getUserid());
System.out.println("-----");
System.out.println(roleService.findByName("CLIENT").getRoleid());
UserRoles newUserRole = new UserRoles(newuser,
roleService.findByName("CLIENT"));
System.out.println(newUserRole.getUser().getUserid());
newRoles.add(newUserRole);
newuser.setRoles(newRoles);
System.out.println(newuser);
newuser = userService.save(newuser);
Client client = new Client();
client.setUser(newuser);
client.setFitnesslevel(newminuser.getFitnesslevel());
client.setLocation(newminuser.getLocation());
clientService.save(client);
// set the location header for the newly created resource
// The location comes from a different controller!
HttpHeaders responseHeaders = new HttpHeaders();
URI newUserURI = ServletUriComponentsBuilder.fromUriString(httpServletRequest.getServerName() + ":" + httpServletRequest.getLocalPort() + "/users/user/{userId}")
.buildAndExpand(newuser.getUserid())
.toUri();
responseHeaders.setLocation(newUserURI);
// return the access token
// To get the access token, surf to the endpoint /login just as if a client had done this.
RestTemplate restTemplate = new RestTemplate();
String requestURI = "http://localhost" + ":" + httpServletRequest.getLocalPort() + "/login";
List<MediaType> acceptableMediaTypes = new ArrayList<>();
acceptableMediaTypes.add(MediaType.APPLICATION_JSON);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
headers.setAccept(acceptableMediaTypes);
headers.setBasicAuth(System.getenv("OAUTHCLIENTID"),
System.getenv("OAUTHCLIENTSECRET"));
MultiValueMap<String, String> map = new LinkedMultiValueMap<>();
map.add("grant_type",
"password");
map.add("scope",
"read write trust");
map.add("username",
newminuser.getUsername());
map.add("password",
<PASSWORD>());
HttpEntity<MultiValueMap<String, String>> request = new HttpEntity<>(map,
headers);
String theToken = restTemplate.postForObject(requestURI,
request,
String.class);
return new ResponseEntity<>(theToken,
responseHeaders,
HttpStatus.CREATED);
}
@PostMapping(value = "/register/instructor",
consumes = {"application/json"},
produces = {"application/json"})
public ResponseEntity<?> addSelf(
HttpServletRequest httpServletRequest,
@Valid
@RequestBody
InstructorMinimum newminuser)
throws
URISyntaxException
{
// Create the user
User newuser = new User();
newuser.setUsername(newminuser.getUsername());
newuser.setPassword(<PASSWORD>());
newuser.setEmail(newminuser.getEmail());
newuser.setName(newminuser.getName());
newuser.setBio(newminuser.getBio());
newuser.setAviurl(newminuser.getAviurl());
// add the default role of user
Set<UserRoles> newRoles = new HashSet<>();
newRoles.add(new UserRoles(newuser,
roleService.findByName("INSTRUCTOR")));
newuser.setRoles(newRoles);
newuser = userService.save(newuser);
Instructor instructor = new Instructor();
instructor.setUser(newuser);
instructor.setYearsexp(newminuser.getYearsexp());
instructor.setSpecialty(newminuser.getSpecialty());
instructor.setCredentials(newminuser.getCredentials());
instructorService.save(instructor);
// set the location header for the newly created resource
// The location comes from a different controller!
HttpHeaders responseHeaders = new HttpHeaders();
URI newUserURI = ServletUriComponentsBuilder.fromUriString(httpServletRequest.getServerName() + ":" + httpServletRequest.getLocalPort() + "/users/user/{userId}")
.buildAndExpand(newuser.getUserid())
.toUri();
responseHeaders.setLocation(newUserURI);
// return the access token
// To get the access token, surf to the endpoint /login just as if a client had done this.
RestTemplate restTemplate = new RestTemplate();
String requestURI = "http://localhost" + ":" + httpServletRequest.getLocalPort() + "/login";
List<MediaType> acceptableMediaTypes = new ArrayList<>();
acceptableMediaTypes.add(MediaType.APPLICATION_JSON);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED);
headers.setAccept(acceptableMediaTypes);
headers.setBasicAuth(System.getenv("OAUTHCLIENTID"),
System.getenv("OAUTHCLIENTSECRET"));
MultiValueMap<String, String> map = new LinkedMultiValueMap<>();
map.add("grant_type",
"password");
map.add("scope",
"read write trust");
map.add("username",
newminuser.getUsername());
map.add("password",
<PASSWORD>());
HttpEntity<MultiValueMap<String, String>> request = new HttpEntity<>(map,
headers);
String theToken = restTemplate.postForObject(requestURI,
request,
String.class);
return new ResponseEntity<>(theToken,
responseHeaders,
HttpStatus.CREATED);
}
@GetMapping(value = "/logout")
public ResponseEntity<?> logoutSelf(HttpServletRequest request)
{
String authHeader = request.getHeader("Authorization");
if (authHeader != null)
{
// find the token
String tokenValue = authHeader.replace("Bearer",
"")
.trim();
// and remove it!
OAuth2AccessToken accessToken = tokenStore.readAccessToken(tokenValue);
tokenStore.removeAccessToken(accessToken);
}
return new ResponseEntity<>(HttpStatus.OK);
}
}
|
<filename>lib/car/obj/src/cli_in_b.c<gh_stars>0
/* **** Notes
Append bytes input out of the key board.
Remarks:
Based on UTF-8
Return a number of bytes on one character, or meta keys started from (0xE0).
Return a negative value on meta keys started from (0x00).
Return (0x00) on failure.
*/
# define CAR
# include <stdio.h>
# include "../../../incl/config.h"
signed(__cdecl cli_in_b(signed char(**argp))) {
auto signed char *b;
auto signed r;
auto signed short flag;
if(!argp) return(0x00);
b = (0x00);
r = cli_i_b(&b);
if(!r) {
printf("%s \n","<< Error at fn. cli_i_b()");
return(0x00);
}
if(!(concat_bb(argp,b))) {
printf("%s \n","<< Error at fn. concat_bb()");
AND(r,0x00);
}
embed(0x00,b);
rl(b);
b = (0x00);
return(r);
}
|
<gh_stars>0
import { lighten } from 'polished';
import styled, { css } from 'styled-components';
export const Container = styled.div`
${({ theme }) => css`
padding: ${theme.layout.spacing(1)};
background: ${theme.palette.primary.main};
box-shadow: 0px 0px 1px 0px ${theme.palette.gray[700]};
border-radius: ${theme.layout.borderRadius.small};
text-align: center;
position: relative;
overflow-y: auto;
overflow-x: hidden;
scrollbar-width: thin;
button {
visibility: hidden;
}
transition: all 0.2s;
&:hover {
background-color: ${lighten(0.1, theme.palette.primary.main)};
box-shadow: 0px 0px 3px 2px ${theme.palette.gray[200]};
button {
visibility: visible;
}
}
`}
`;
export const Experience = styled.div`
color: ${({ theme }) => theme.palette.secondary.main};
font-size: 24px;
font-weight: bold;
text-align: left;
`;
export const Name = styled.div`
font-size: 16px;
text-transform: uppercase;
font-weight: bold;
margin-bottom: ${({ theme }) => theme.layout.spacing(1)};
display: flex;
justify-content: center;
align-items: center;
`;
export const Rules = styled.div`
font-size: 12px;
margin-top: ${({ theme }) => theme.layout.spacing(1)};
color: ${({ theme }) => theme.palette.secondary.main};
`;
|
import tensorflow as tf
import numpy as np
from bayes_opt import BayesianOptimization
def optimize(epochs=5, learning_rate=0.001):
model = tf.keras.models.Sequential(
[
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation="relu"),
tf.keras.layers.Dense(10, activation="softmax"),
]
)
model.compile(
optimizer=tf.keras.optimizers.Adam(lr=learning_rate),
loss="sparse_categorical_crossentropy",
metrics=["accuracy"],
)
model.fit(train_images, train_labels, epochs=epochs)
_, accuracy = model.evaluate(test_images, test_labels)
return accuracy
if __name__ == "__main__":
# Actual best parameters found
best_params = {"epochs": 5, "learning_rate": 0.001}
# Hyperparameter tuning with bayesian optimization
bo = BayesianOptimization(optimize, {
"epochs": (2, 10),
"learning_rate": (0.0001, 0.01),
})
bo.maximize(init_points=2, n_iter=4)
best_params = bo.max["params"]
print(best_params) |
<filename>src/main/java/net/anatolich/subscriptions/currency/infrastructure/rest/ExchangeRatesEndpoint.java
package net.anatolich.subscriptions.currency.infrastructure.rest;
import static java.util.Comparator.comparing;
import io.swagger.v3.oas.annotations.OpenAPIDefinition;
import io.swagger.v3.oas.annotations.info.Info;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import javax.validation.Valid;
import net.anatolich.subscriptions.currency.application.ExchangeRatesManagementService;
import net.anatolich.subscriptions.currency.domain.model.ExchangeRate;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseStatus;
import org.springframework.web.bind.annotation.RestController;
@OpenAPIDefinition(
info = @Info(title = "Exchange Rates", description = "Manage currency exchange rates.")
)
@RestController
@RequestMapping(path = "/v1/exchange-rates")
public class ExchangeRatesEndpoint {
private final ExchangeRatesManagementService exchangeRates;
public ExchangeRatesEndpoint(ExchangeRatesManagementService exchangeRates) {
this.exchangeRates = exchangeRates;
}
@PutMapping
@ResponseStatus(HttpStatus.OK)
public void updateExchangeRates(@Valid @RequestBody UpdateExchangeRatesPayload payload) {
final Collection<ExchangeRate> newRates = payload.toRates();
exchangeRates.updateAdminExchangeRates(newRates);
}
@GetMapping
@ResponseStatus(HttpStatus.OK)
public List<ExchangeRateViewPayload> getExchangeRates() {
return exchangeRates.seeExchangeRates().stream()
.map(ExchangeRateViewPayload::from)
.sorted(comparing(ExchangeRateViewPayload::getFrom).thenComparing(ExchangeRateViewPayload::getTo))
.collect(Collectors.toList());
}
}
|
#! /bin/sh
#PBS -l nodes=1:ppn=1
#PBS -l walltime=40:00:00
#PBS -j oe
if [ -n "$PBS_JOBNAME" ]
then
source "${PBS_O_HOME}/.bash_profile"
cd "$PBS_O_WORKDIR"
module load gcc/5.3.0
fi
prefix=../../gekko-output/run-8
ecoevolity --seed 725392277 --prefix ../../gekko-output/run-8 --relax-missing-sites --relax-constant-sites --relax-triallelic-sites ../../configs/gekko-conc377-rate100-varonly.yml 1>../../gekko-output/run-8-gekko-conc377-rate100-varonly.out 2>&1
|
normal_dist <- function(mean, sd){
return(rnorm(n = 1, mean = mean, sd = sd))
} |
<gh_stars>1-10
/*
*
*/
package net.community.apps.tools.filesync;
import java.awt.BorderLayout;
import java.awt.Container;
import java.awt.Frame;
import java.awt.GridLayout;
import java.io.File;
import java.util.concurrent.Callable;
import javax.swing.JPanel;
import net.community.chest.ui.helpers.SettableComponent;
import net.community.chest.ui.helpers.button.WindowDisposeButton;
import net.community.chest.ui.helpers.dialog.ButtonsPanel;
import net.community.chest.ui.helpers.dialog.FormDialog;
import net.community.chest.ui.helpers.panel.input.FileInputTextPanel;
import org.w3c.dom.Element;
/**
* <P>Copyright GPLv2</P>
*
* @author <NAME>.
* @since Apr 2, 2009 2:18:47 PM
*/
public class FilePairLoader extends FormDialog implements SettableComponent<FilePair>, Callable<Boolean> {
/**
*
*/
private static final long serialVersionUID = 7188717376027344238L;
public FilePairLoader (Frame parent, FilePair p, Element elem)
{
super(parent, elem);
if (p != null)
setContent(p);
}
public FilePairLoader (Frame parent, Element elem)
{
this(parent, null, elem);
}
public FilePairLoader (Frame parent, FilePair p)
{
this(parent, p, null);
}
public FilePairLoader (Frame parent)
{
this(parent, null, null);
}
private FilePair _curPair /* =null */;
public FilePair getContent ()
{
return _curPair;
}
private static final boolean updateFileContent (final FilePair p, final FileInputTextPanel t, final boolean srcPart)
{
final File f=(null == t) ? null : t.getSelectedFile();
if ((null == f) || (!f.isDirectory()))
return false;
if (srcPart)
p.setSrcFolder(f);
else
p.setDstFolder(f);
return true;
}
private FilePairInputTextPanel _srcDir /* =null */, _dstDir /* =null */;
/*
* @see java.util.concurrent.Callable#call()
*/
@Override
public Boolean call () throws Exception
{
final FilePair p=getContent();
if (null == p)
return null;
final boolean okToExit=updateFileContent(p, _srcDir, true)
&& updateFileContent(p, _dstDir, false)
;
if (!okToExit)
return null;
return Boolean.TRUE;
}
/*
* @see net.community.chest.ui.helpers.dialog.FormDialog#getButtonsPanel()
*/
@Override
public ButtonsPanel getButtonsPanel ()
{
ButtonsPanel bp=super.getButtonsPanel();
if (null == bp)
{
bp = new ButtonsPanel();
bp.add(new WindowDisposeButton(this, this, "OK"));
setButtonsPanel(bp);
}
return bp;
}
/*
* @see net.community.chest.ui.helpers.dialog.FormDialog#layoutComponent()
*/
@Override
public void layoutComponent () throws RuntimeException
{
super.layoutComponent();
if (null == _srcDir)
_srcDir = new FilePairInputTextPanel("Source");
if (null == _dstDir)
_dstDir = new FilePairInputTextPanel("Destination");
final Container viewPanel=new JPanel(new GridLayout(0, 1, 5, 5));
viewPanel.add(_srcDir);
viewPanel.add(_dstDir);
final Container ctPane=getContentPane();
ctPane.add(viewPanel, BorderLayout.CENTER);
}
/*
* @see net.community.chest.ui.helpers.dialog.SettableDialog#setContent(java.lang.Object)
*/
@Override
public void setContent (FilePair value)
{
final File[] fa={
(null == value) ? null : value.getSrcFolder(),
(null == value) ? null : value.getDstFolder()
};
final FileInputTextPanel[] pa={ _srcDir, _dstDir };
for (int i=0; i < fa.length; i++)
{
final FileInputTextPanel p=pa[i];
final File f=fa[i];
if (null == p)
continue;
if (f != null)
p.setSelectedFile(f, false /* no fire of change events */);
else
p.setText("");
}
_curPair = value;
}
/*
* @see net.community.chest.ui.helpers.dialog.SettableDialog#refreshContent(java.lang.Object)
*/
@Override
public void refreshContent (FilePair value)
{
setContent(value);
}
/*
* @see net.community.chest.ui.helpers.dialog.SettableDialog#clearContent()
*/
@Override
public void clearContent ()
{
setContent(null);
}
}
|
public class Test {
public static void main(String[] args) {
int x = 1;
int y = 2;
int z = x + y;
System.out.println("The sum is " + z);
}
} |
#!/bin/bash
# Cloned from https://github.com/ngineered/nginx-php-fpm/blob/master/scripts/start.sh
# Disable Strict Host checking for non interactive git clones
mkdir -p -m 0700 /root/.ssh
echo -e "Host *\n\tStrictHostKeyChecking no\n" >> /root/.ssh/config
if [ ! -z "$SSH_KEY" ]; then
echo $SSH_KEY > /root/.ssh/id_rsa.base64
base64 -d /root/.ssh/id_rsa.base64 > /root/.ssh/id_rsa
chmod 600 /root/.ssh/id_rsa
fi
# Set custom webroot for Akeneo
if [ ! -z "$WEBROOT" ]; then
webroot=$WEBROOT
sed -i "s#root /var/www/html;#root ${webroot}/web;#g" /etc/nginx/sites-available/default.conf
else
webroot=/var/www/pim
fi
# Setup git variables
if [ ! -z "$GIT_EMAIL" ]; then
git config --global user.email "$GIT_EMAIL"
fi
if [ ! -z "$GIT_NAME" ]; then
git config --global user.name "$GIT_NAME"
git config --global push.default simple
fi
# Dont pull code down if the .git folder exists
if [ ! -d "/var/www/html/.git" ]; then
# Pull down code from git for our site!
if [ ! -z "$GIT_REPO" ]; then
# Remove the test index file
rm -Rf /var/www/html/*
if [ ! -z "$GIT_BRANCH" ]; then
if [ -z "$GIT_USERNAME" ] && [ -z "$GIT_PERSONAL_TOKEN" ]; then
git clone -b $GIT_BRANCH $GIT_REPO /var/www/html/ || exit 1
else
git clone -b ${GIT_BRANCH} https://${GIT_USERNAME}:${GIT_PERSONAL_TOKEN}@${GIT_REPO} /var/www/html || exit 1
fi
else
if [ -z "$GIT_USERNAME" ] && [ -z "$GIT_PERSONAL_TOKEN" ]; then
git clone $GIT_REPO /var/www/html/ || exit 1
else
git clone https://${GIT_USERNAME}:${GIT_PERSONAL_TOKEN}@${GIT_REPO} /var/www/html || exit 1
fi
fi
#chown -Rf nginx.nginx /var/www/html
fi
fi
# Try auto install for composer
if [ -f "$WEBROOT/composer.lock" ]; then
composer install
fi
# Enable custom nginx config files if they exist
if [ -f /var/www/html/conf/nginx/nginx-site.conf ]; then
cp /var/www/html/conf/nginx/nginx-site.conf /etc/nginx/sites-available/default.conf
fi
if [ -f /var/www/html/conf/nginx/nginx-site-ssl.conf ]; then
cp /var/www/html/conf/nginx/nginx-site-ssl.conf /etc/nginx/sites-available/default-ssl.conf
fi
# Display PHP error's or not
if [[ "$ERRORS" != "1" ]] ; then
echo php_flag[display_errors] = off >> /etc/php7/php-fpm.conf
else
echo php_flag[display_errors] = on >> /etc/php7/php-fpm.conf
fi
# Display Version Details or not
if [[ "$HIDE_NGINX_HEADERS" == "0" ]] ; then
sed -i "s/server_tokens off;/server_tokens on;/g" /etc/nginx/nginx.conf
else
sed -i "s/expose_php = On/expose_php = Off/g" /etc/php7/conf.d/php.ini
fi
# Increase the memory_limit
if [ ! -z "$PHP_MEM_LIMIT" ]; then
sed -i "s/memory_limit = 128M/memory_limit = ${PHP_MEM_LIMIT}M/g" /etc/php7/conf.d/php.ini
fi
# Increase the post_max_size
if [ ! -z "$PHP_POST_MAX_SIZE" ]; then
sed -i "s/post_max_size = 100M/post_max_size = ${PHP_POST_MAX_SIZE}M/g" /etc/php7/conf.d/php.ini
fi
# Increase the upload_max_filesize
if [ ! -z "$PHP_UPLOAD_MAX_FILESIZE" ]; then
sed -i "s/upload_max_filesize = 100M/upload_max_filesize= ${PHP_UPLOAD_MAX_FILESIZE}M/g" /etc/php7/conf.d/php.ini
fi
# Run custom scripts
if [[ "$RUN_SCRIPTS" == "1" ]] ; then
if [ -d "/var/www/html/scripts/" ]; then
# make scripts executable incase they aren't
chmod -Rf gu+x /var/www/html/scripts/*
# run scripts in number order
for i in `ls /var/www/html/scripts/`; do /var/www/html/scripts/$i ; done
else
echo "Can't find script directory"
fi
fi
# Start supervisord and services
exec /usr/bin/supervisord -n -c /etc/supervisord.conf |
<filename>app/components/modals/about.js
import Component from '@glimmer/component'
import { inject as service } from '@ember/service'
import config from 'corona/config/environment'
const {
APP: { buildDate }
} = config
export default class AboutComponent extends Component {
@service data
buildDate = buildDate
get sources() {
return this.data.sources
}
}
|
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbLineOfSightOptimizer_hxx
#define otbLineOfSightOptimizer_hxx
#include "otbLineOfSightOptimizer.h"
#include "vnl/vnl_inverse.h"
namespace otb
{
template <class TPrecision, class TLabel>
LineOfSightOptimizer<TPrecision,TLabel>
::LineOfSightOptimizer()
{
m_Residues.clear();
m_GlobalResidue = 0;
m_InvCumul = vnl_matrix<PrecisionType>(3,3);
m_Identity = vnl_matrix<PrecisionType>(3,3);
m_Identity.fill(0);
m_Identity.fill_diagonal(1.);
m_SecCumul = vnl_vector<PrecisionType>(3);
}
template <class TPrecision, class TLabel>
typename LineOfSightOptimizer<TPrecision,TLabel>::PointType
LineOfSightOptimizer<TPrecision,TLabel>
::Compute(PointSetPointerType pointA, PointSetPointerType pointB)
{
// First, empty the cumulators and residues
m_InvCumul.fill(0);
m_SecCumul.fill(0);
m_Residues.clear();
vnl_matrix<PrecisionType> idMinusViViT(3,3);
vnl_matrix<PrecisionType> vi(3,1);
vnl_vector<PrecisionType> si(3);
PointType result;
//check inputs
if (pointA->GetNumberOfPoints() != pointB->GetNumberOfPoints() ||
pointA->GetNumberOfPoints() < 2)
{
itkExceptionMacro(<<"Points are missing in at least one of the input point sets.");
return result;
}
// iterate over lines of sight
PointSetConstIteratorType itPointA = pointA->GetPoints()->Begin();
PointSetConstIteratorType itPointB = pointB->GetPoints()->Begin();
while (itPointA != pointA->GetPoints()->End() &&
itPointB != pointB->GetPoints()->End())
{
vi(0,0) = itPointB.Value()[0] - itPointA.Value()[0];
vi(1,0) = itPointB.Value()[1] - itPointA.Value()[1];
vi(2,0) = itPointB.Value()[2] - itPointA.Value()[2];
PrecisionType norm_inv = 1. / std::sqrt(vi(0,0)*vi(0,0)+vi(1,0)*vi(1,0)+vi(2,0)*vi(2,0));
vi(0,0) *= norm_inv;
vi(1,0) *= norm_inv;
vi(2,0) *= norm_inv;
si(0) = itPointA.Value()[0];
si(1) = itPointA.Value()[1];
si(2) = itPointA.Value()[2];
idMinusViViT = m_Identity - (vi * vi.transpose());
m_InvCumul+=idMinusViViT;
m_SecCumul+=(idMinusViViT * si);
++itPointA;
++itPointB;
}
vnl_vector<PrecisionType> intersection = vnl_inverse(m_InvCumul) * m_SecCumul;
result[0] = intersection[0];
result[1] = intersection[1];
result[2] = intersection[2];
// Compute residues
m_GlobalResidue = 0;
vnl_vector<PrecisionType> AB(3);
vnl_vector<PrecisionType> AC(3);
PrecisionType res2;
itPointA = pointA->GetPoints()->Begin();
itPointB = pointB->GetPoints()->Begin();
while (itPointA != pointA->GetPoints()->End() &&
itPointB != pointB->GetPoints()->End())
{
AB[0] = itPointB.Value()[0] - itPointA.Value()[0];
AB[1] = itPointB.Value()[1] - itPointA.Value()[1];
AB[2] = itPointB.Value()[2] - itPointA.Value()[2];
AC[0] = intersection[0] - itPointA.Value()[0];
AC[1] = intersection[1] - itPointA.Value()[1];
AC[2] = intersection[2] - itPointA.Value()[2];
res2 = std::max(0.0,dot_product(AC,AC) - (dot_product(AB,AC) * dot_product(AB,AC)) / (dot_product(AB,AB)));
m_Residues.push_back( std::sqrt( res2 ) );
m_GlobalResidue += res2;
++itPointA;
++itPointB;
}
m_GlobalResidue = std::sqrt(m_GlobalResidue);
return result;
}
}
#endif
|
<filename>java-server/src/main/java/com/wednesday/helper/Util.java
package com.wednesday.helper;
import com.google.gson.Gson;
import com.wednesday.model.User;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletResponse;
public class Util {
private static Gson gson = null;
public static void flushNClear(EntityManager em) {
em.flush();
em.clear();
}
public static Gson fetchGson() {
if (gson == null) {
gson = new Gson();
}
return gson;
}
public static String toJSON(Object o){
Gson g = fetchGson();
return g.toJson(o);
}
public static void setCorsHeaders(HttpServletResponse res){
res.setHeader("Access-Control-Allow-Origin","*");
res.setHeader("Access-Control-Allow-Methods",
"GET, POST, OPTIONS, PUT, PATCH, DELETE");
res.setHeader("Access-Control-Allow-Headers",
"Access-Control-Allow-Headers, Origin, Accept, X-Requested-With, " +
"Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers, " +
"X-Access-Token, XKey, Authorization");
}
}
|
#!/bin/bash
# If there is a PID file from the e2e tests...
if [ -e /tmp/talk-e2e.pid ]
then
# Then kill the running talk server.
kill $(cat /tmp/talk-e2e.pid)
fi
|
<filename>lib/rules/html-self-closing.js
/**
* @author <NAME>
* @copyright 2016 <NAME>. All rights reserved.
* See LICENSE file in root directory for full license.
*/
'use strict'
// ------------------------------------------------------------------------------
// Requirements
// ------------------------------------------------------------------------------
const utils = require('../utils')
// ------------------------------------------------------------------------------
// Helpers
// ------------------------------------------------------------------------------
/**
* These strings wil be displayed in error messages.
*/
const ELEMENT_TYPE = Object.freeze({
NORMAL: 'HTML elements',
VOID: 'HTML void elements',
COMPONENT: 'Vue.js custom components',
SVG: 'SVG elements',
MATH: 'MathML elements'
})
/**
* Normalize the given options.
* @param {Object|undefined} options The raw options object.
* @returns {Object} Normalized options.
*/
function parseOptions (options) {
return {
[ELEMENT_TYPE.NORMAL]: (options && options.html && options.html.normal) || 'always',
[ELEMENT_TYPE.VOID]: (options && options.html && options.html.void) || 'never',
[ELEMENT_TYPE.COMPONENT]: (options && options.html && options.html.component) || 'always',
[ELEMENT_TYPE.SVG]: (options && options.svg) || 'always',
[ELEMENT_TYPE.MATH]: (options && options.math) || 'always'
}
}
/**
* Get the elementType of the given element.
* @param {VElement} node The element node to get.
* @returns {string} The elementType of the element.
*/
function getElementType (node) {
if (utils.isCustomComponent(node)) {
return ELEMENT_TYPE.COMPONENT
}
if (utils.isHtmlElementNode(node)) {
if (utils.isHtmlVoidElementName(node.name)) {
return ELEMENT_TYPE.VOID
}
return ELEMENT_TYPE.NORMAL
}
if (utils.isSvgElementNode(node)) {
return ELEMENT_TYPE.SVG
}
if (utils.isMathMLElementNode(node)) {
return ELEMENT_TYPE.MATH
}
return 'unknown elements'
}
/**
* Check whether the given element is empty or not.
* This ignores whitespaces, doesn't ignore comments.
* @param {VElement} node The element node to check.
* @param {SourceCode} sourceCode The source code object of the current context.
* @returns {boolean} `true` if the element is empty.
*/
function isEmpty (node, sourceCode) {
const start = node.startTag.range[1]
const end = (node.endTag != null) ? node.endTag.range[0] : node.range[1]
return sourceCode.text.slice(start, end).trim() === ''
}
// ------------------------------------------------------------------------------
// Rule Definition
// ------------------------------------------------------------------------------
module.exports = {
meta: {
docs: {
description: 'enforce self-closing style',
category: 'strongly-recommended',
url: 'https://github.com/vuejs/eslint-plugin-vue/blob/v5.0.0-beta.3/docs/rules/html-self-closing.md'
},
fixable: 'code',
schema: {
definitions: {
optionValue: {
enum: ['always', 'never', 'any']
}
},
type: 'array',
items: [{
type: 'object',
properties: {
html: {
type: 'object',
properties: {
normal: { $ref: '#/definitions/optionValue' },
void: { $ref: '#/definitions/optionValue' },
component: { $ref: '#/definitions/optionValue' }
},
additionalProperties: false
},
svg: { $ref: '#/definitions/optionValue' },
math: { $ref: '#/definitions/optionValue' }
},
additionalProperties: false
}],
maxItems: 1
}
},
create (context) {
const sourceCode = context.getSourceCode()
const options = parseOptions(context.options[0])
let hasInvalidEOF = false
return utils.defineTemplateBodyVisitor(context, {
'VElement' (node) {
if (hasInvalidEOF) {
return
}
const elementType = getElementType(node)
const mode = options[elementType]
if (mode === 'always' && !node.startTag.selfClosing && isEmpty(node, sourceCode)) {
context.report({
node,
loc: node.loc,
message: 'Require self-closing on {{elementType}} (<{{name}}>).',
data: { elementType, name: node.rawName },
fix: (fixer) => {
const tokens = context.parserServices.getTemplateBodyTokenStore()
const close = tokens.getLastToken(node.startTag)
if (close.type !== 'HTMLTagClose') {
return null
}
return fixer.replaceTextRange([close.range[0], node.range[1]], '/>')
}
})
}
if (mode === 'never' && node.startTag.selfClosing) {
context.report({
node,
loc: node.loc,
message: 'Disallow self-closing on {{elementType}} (<{{name}}/>).',
data: { elementType, name: node.rawName },
fix: (fixer) => {
const tokens = context.parserServices.getTemplateBodyTokenStore()
const close = tokens.getLastToken(node.startTag)
if (close.type !== 'HTMLSelfClosingTagClose') {
return null
}
if (elementType === ELEMENT_TYPE.VOID) {
return fixer.replaceText(close, '>')
}
// If only `close` is targeted for replacement, it conflicts with `component-name-in-template-casing`,
// so replace the entire element.
// return fixer.replaceText(close, `></${node.rawName}>`)
const elementPart = sourceCode.text.slice(node.range[0], close.range[0])
return fixer.replaceText(node, elementPart + `></${node.rawName}>`)
}
})
}
}
}, {
Program (node) {
hasInvalidEOF = utils.hasInvalidEOF(node)
}
})
}
}
|
<gh_stars>1-10
import { getToken } from '@/utils/auth';
export const state = {
isAuthenticated: getToken(),
user: null,
error: null,
loading: false
};
|
<reponame>Xmaspiano/project01
package com.xmasworking.project01.service;
import com.xmasworking.project01.entity.UserInfo;
import com.xmasworking.project01.repository.UserInfoRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* Created by IntelliJ IDEA.
*
* @author XmasPiano
* @date 2018/8/31 - 下午4:20
* Created by IntelliJ IDEA.
*/
@Service
public class UserInfoServiceImpl implements UserInfoService {
@Autowired
UserInfoRepository userInfoRepository;
@Override
public List<UserInfo> findAll(){
return userInfoRepository.findAll();
}
@Override
public UserInfo findUserInfoByTel(String tel){
return userInfoRepository.findUserInfoByTel(tel);
}
@Override
public UserInfo findById(Long id){
return userInfoRepository.findOne(id);
}
}
|
<gh_stars>0
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.deskclock.data;
import android.content.Context;
import android.os.SystemClock;
import android.text.format.DateFormat;
import java.util.Calendar;
/**
* All time data is accessed via this model. This model exists so that time can be mocked for
* testing purposes.
*/
final class TimeModel {
private final Context mContext;
TimeModel(Context context) {
mContext = context;
}
/**
* @return the current time in milliseconds
*/
long currentTimeMillis() {
return System.currentTimeMillis();
}
/**
* @return milliseconds since boot, including time spent in sleep
*/
long elapsedRealtime() {
return SystemClock.elapsedRealtime();
}
/**
* @return {@code true} if 24 hour time format is selected; {@code false} otherwise
*/
boolean is24HourFormat() {
return DateFormat.is24HourFormat(mContext);
}
/**
* @return a new Calendar with the {@link #currentTimeMillis}
*/
Calendar getCalendar() {
final Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(currentTimeMillis());
return calendar;
}
} |
<gh_stars>0
/* Copyright 2009-2015 <NAME>
*
* This file is part of the MOEA Framework.
*
* The MOEA Framework is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* The MOEA Framework is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with the MOEA Framework. If not, see <http://www.gnu.org/licenses/>.
*/
package org.moeaframework.core.spi;
import java.util.Iterator;
import java.util.Properties;
import java.util.ServiceConfigurationError;
import java.util.ServiceLoader;
import org.moeaframework.algorithm.StandardAlgorithms;
import org.moeaframework.core.Algorithm;
import org.moeaframework.core.Problem;
/**
* Factory for creating algorithm instances. See {@link AlgorithmProvider} for
* details on adding new providers.
* <p>
* This class is thread safe.
*/
public class AlgorithmFactory {
/**
* The static service loader for loading algorithm providers.
*/
private static final ServiceLoader<AlgorithmProvider> PROVIDERS;
/**
* The default algorithm factory.
*/
private static AlgorithmFactory instance;
/**
* Instantiates the static {@code PROVIDERS} and {@code instance} objects.
*/
static {
PROVIDERS = ServiceLoader.load(AlgorithmProvider.class);
instance = new AlgorithmFactory();
}
/**
* Returns the default algorithm factory.
*
* @return the default algorithm factory
*/
public static synchronized AlgorithmFactory getInstance() {
return instance;
}
/**
* Sets the default algorithm factory.
*
* @param instance the default algorithm factory
*/
public static synchronized void setInstance(AlgorithmFactory instance) {
AlgorithmFactory.instance = instance;
}
/**
* Constructs a new algorithm factory.
*/
public AlgorithmFactory() {
super();
}
/**
* Searches through all discovered {@code AlgorithmProvider} instances,
* returning an instance of the algorithm with the registered name. The
* algorithm is initialized using implementation-specific properties. This
* method must throw an {@link ProviderNotFoundException} if no suitable
* algorithm is found.
*
* @param name the name identifying the algorithm
* @param properties the implementation-specific properties
* @param problem the problem to be solved
* @return an instance of the algorithm with the registered name
* @throws ProviderNotFoundException if no provider for the algorithm is
* available
*/
public synchronized Algorithm getAlgorithm(String name,
Properties properties, Problem problem) {
Iterator<AlgorithmProvider> ps = PROVIDERS.iterator();
// ensure standard algorithms can be found in case the system has not
// setup correctly
if (!ps.hasNext()) {
Algorithm algorithm = instantiateAlgorithm(
new StandardAlgorithms(), name, properties, problem);
if (algorithm != null) {
return algorithm;
}
}
// loop over all providers to find the algorithm implementation
while (ps.hasNext()) {
Algorithm algorithm = instantiateAlgorithm(ps.next(), name,
properties, problem);
if (algorithm != null) {
return algorithm;
}
}
throw new ProviderNotFoundException(name);
}
/**
* Attempts to instantiate the given algorithm using the given provider.
*
* @param provider the algorithm provider
* @param name the name identifying the algorithm
* @param properties the implementation-specific properties
* @param problem the problem to be solved
* @return an instance of the algorithm with the registered name; or
* {@code null} if the provider does not implement the algorithm
*/
private Algorithm instantiateAlgorithm(AlgorithmProvider provider,
String name, Properties properties, Problem problem) {
try {
return provider.getAlgorithm(name, properties, problem);
} catch (ServiceConfigurationError e) {
System.err.println(e.getMessage());
}
return null;
}
}
|
#!/usr/bin/env bash
# Parameters:
# $1: Path to the new truststore
# $2: Truststore password
# $3: Public key to be imported
# $4: Alias of the certificate
function create_truststore {
keytool -keystore $1 -storepass $2 -noprompt -alias $4 -import -file $3 -storetype PKCS12
}
# Parameters:
# $1: Path to the new keystore
# $2: Truststore password
# $3: Public key to be imported
# $4: Private key to be imported
# $5: Alias of the certificate
function create_keystore {
RANDFILE=/tmp/.rnd openssl pkcs12 -export -in $3 -inkey $4 -name $5 -password pass:$2 -out $1
}
# $1 = trusted certs, $2 = TLS auth cert, $3 = TLS auth key, $4 = truststore path, $5 = keystore path, $6 = certs and key path
trusted_certs=$1
tls_auth_cert=$2
tls_auth_key=$3
truststore_path=$4
keystore_path=$5
certs_key_path=$6
if [ -n "$trusted_certs" ]; then
echo "Preparing truststore"
rm -f "$truststore_path"
IFS=';' read -ra CERTS <<< ${trusted_certs}
for cert in "${CERTS[@]}"
do
create_truststore $truststore_path $CERTS_STORE_PASSWORD $certs_key_path/$cert $cert
done
echo "Preparing truststore is complete"
fi
if [ -n "$tls_auth_cert" ] && [ -n "$tls_auth_key" ]; then
echo "Preparing keystore"
rm -f "$keystore_path"
create_keystore $keystore_path $CERTS_STORE_PASSWORD $certs_key_path/$tls_auth_cert $certs_key_path/$tls_auth_key $tls_auth_cert
echo "Preparing keystore is complete"
fi
if [ -d /opt/strimzi/oauth-certs ]; then
echo "Preparing truststore for OAuth"
# Add each certificate to the trust store
STORE=/tmp/strimzi/oauth.truststore.p12
rm -f "$STORE"
declare -i INDEX=0
for CRT in /opt/strimzi/oauth-certs/**/*; do
ALIAS="oauth-${INDEX}"
echo "Adding $CRT to truststore $STORE with alias $ALIAS"
create_truststore "$STORE" "$CERTS_STORE_PASSWORD" "$CRT" "$ALIAS"
INDEX+=1
done
echo "Preparing truststore for OAuth is complete"
fi
|
#!/bin/sh
export AUTOMAKE_SUFFIX=-1.16.3
export AUTOCONF_SUFFIX=-2.71
export LIBTOOL_SUFFIX=-2.4.6
export ACLOCAL="aclocal${AUTOMAKE_SUFFIX}"
export AUTOMAKE="automake${AUTOMAKE_SUFFIX}"
export AUTOCONF="autoconf${AUTOCONF_SUFFIX}"
export AUTOHEADER="autoheader${AUTOCONF_SUFFIX}"
export AUTOM4TE="autom4te${AUTOCONF_SUFFIX}"
export AUTORECONF="autoreconf${AUTOCONF_SUFFIX}"
export AUTOSCAN="autoscan${AUTOCONF_SUFFIX}"
export AUTOUPDATE="autoupdate${AUTOCONF_SUFFIX}"
export IFNAMES="ifnames${AUTOCONF_SUFFIX}"
export AUTOM4TE="autom4te${AUTOCONF_SUFFIX}"
export LIBTOOLIZE="libtoolize${LIBTOOL_SUFFIX}"
export LIBTOOL="libtool${LIBTOOL_SUFFIX}"
export AUTOGEN=autogen
(cd tests && $AUTOGEN ./Makefile.am.def)
(cd include && $AUTOGEN ./Makefile.am.def)
$AUTOGEN ./Makefile.am.def
$LIBTOOLIZE -vcif
$ACLOCAL -I m4 --install -Wall --force
$AUTOMAKE -vcaf
$AUTOCONF -I m4 -f -Wall
$AUTOM4TE --language=Autotest -I tests tests/testsuite.at -o tests/testsuite
|
#!/bin/bash
echo "compress index.htm"
gzip -c -9 index.org > index.htm.gz
echo "Uploading compressed index.htm to ESP8266"
curl -F "file=@index.htm.gz" logger.local/edit
echo "Done"
|
#!/bin/bash -x
../exec/tranche test2.trc.c >test2stdout.dat
diff -q test2.c test2.c.expected
diff -q test2.h test2.h.expected
diff -q test2stdout.dat test2stdout.dat.expected
rm test2.c test2.h test2stdout.dat
|
SELECT c.name AS customer_name
FROM customers c
INNER JOIN orders o
ON c.customer_id = o.customer_id
WHERE o.date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)
GROUP BY c.name
HAVING COUNT(*) > 3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.