text
stringlengths 1
1.05M
|
|---|
package com.createchance.imageeditor.shaders;
import android.opengl.GLES20;
import android.util.Log;
/**
* ${DESC}
*
* @author createchance
* @date 2018/11/17
*/
public class DenoiseFragmentShader extends AbstractShader {
private static final String TAG = "DenoiseFragmentShader";
private final String FRAGMENT_SHADER = "DenoiseFragmentShader.glsl";
private final String U_INPUT_TEXTURE = "u_InputTexture";
private final String U_RESOLUTION = "u_Resolution";
private final String U_EXPONENT = "u_Exponent";
private final String U_STRENGTH = "u_Strength";
private int mUInputTexture, mUResolution, mUExponent, mUStrength;
public DenoiseFragmentShader() {
initShader(FRAGMENT_SHADER, GLES20.GL_FRAGMENT_SHADER);
}
@Override
public void initLocation(int programId) {
mUInputTexture = GLES20.glGetUniformLocation(programId, U_INPUT_TEXTURE);
mUResolution = GLES20.glGetUniformLocation(programId, U_RESOLUTION);
// mUExponent = GLES20.glGetUniformLocation(programId, U_EXPONENT);
// mUStrength = GLES20.glGetUniformLocation(programId, U_STRENGTH);
}
public void setUInputTexture(int textureTarget, int textureId) {
// bind texture
GLES20.glActiveTexture(textureTarget);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glUniform1i(mUInputTexture, textureTarget - GLES20.GL_TEXTURE0);
}
public void setUResolution(float width, float height) {
float[] vec = new float[]{width, height};
Log.d(TAG, "setUResolution: " + width + ", height: " + height);
GLES20.glUniform2fv(mUResolution, 1, vec, 0);
}
public void setUExponent(float exponent) {
// GLES20.glUniform1f(mUExponent, exponent);
}
public void setUStrength(float strength) {
GLES20.glUniform1f(mUStrength, strength);
}
}
|
#! /bin/sh
# $Id: genconfig.sh,v 1.79 2015/06/09 15:33:50 nanard Exp $
# miniupnp daemon
# http://miniupnp.free.fr or http://miniupnp.tuxfamily.org/
# (c) 2006-2015 Thomas Bernard
# This software is subject to the conditions detailed in the
# LICENCE file provided within the distribution
for argv; do
case "$argv" in
--ipv6) IPV6=1 ;;
--igd2) IGD2=1 ;;
--strict) STRICT=1 ;;
--leasefile) LEASEFILE=1 ;;
--vendorcfg) VENDORCFG=1 ;;
--pcp-peer) PCP_PEER=1 ;;
--portinuse) PORTINUSE=1 ;;
--help|-h)
echo "Usage : $0 [options]"
echo " --ipv6 enable IPv6"
echo " --igd2 build an IGDv2 instead of an IGDv1"
echo " --strict be more strict regarding compliance with UPnP specifications"
echo " --leasefile enable lease file"
echo " --vendorcfg enable configuration of manufacturer info"
echo " --pcp-peer enable PCP PEER operation"
echo " --portinuse enable port in use check"
exit 1
;;
*)
echo "Option not recognized : $argv"
echo "use -h option to display help"
exit 1
;;
esac
done
RM="rm -f"
MV="mv"
CONFIGFILE="config.h.tmp"
CONFIGFILE_FINAL="config.h"
CONFIGMACRO="CONFIG_H_INCLUDED"
# version reported in XML descriptions
#UPNP_VERSION=20070827
UPNP_VERSION=`date +"%Y%m%d"`
# Facility to syslog
LOG_MINIUPNPD="LOG_DAEMON"
# detecting the OS name and version
OS_NAME=`uname -s`
OS_VERSION=`uname -r`
# pfSense special case
if [ -f /etc/platform ]; then
if [ `cat /etc/platform` = "pfSense" ]; then
OS_NAME=pfSense
OS_VERSION=`cat /etc/version`
fi
fi
# OpenWRT special case
if [ -f ./os.openwrt ]; then
OS_NAME=OpenWRT
OS_VERSION=$(cat ./os.openwrt)
fi
# AstLinux special case
if [ -f ./os.astlinux ]; then
OS_NAME=AstLinux
OS_VERSION=$(cat ./os.astlinux)
fi
# Tomato USB special case
if [ -f ../shared/tomato_version ]; then
OS_NAME=Tomato
OS_VERSION="Tomato $(cat ../shared/tomato_version)"
fi
${RM} ${CONFIGFILE}
echo "/* MiniUPnP Project" >> ${CONFIGFILE}
echo " * http://miniupnp.free.fr/ or http://miniupnp.tuxfamily.org/" >> ${CONFIGFILE}
echo " * (c) 2006-2014 Thomas Bernard" >> ${CONFIGFILE}
echo " * generated by $0 on `date`" >> ${CONFIGFILE}
echo " * using command line options $* */" >> ${CONFIGFILE}
echo "#ifndef $CONFIGMACRO" >> ${CONFIGFILE}
echo "#define $CONFIGMACRO" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "#include <inttypes.h>" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "#define MINIUPNPD_VERSION \"`cat VERSION`\"" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "#define UPNP_VERSION \"$UPNP_VERSION\"" >> ${CONFIGFILE}
# OS Specific stuff
case $OS_NAME in
OpenBSD)
MAJORVER=`echo $OS_VERSION | cut -d. -f1`
MINORVER=`echo $OS_VERSION | cut -d. -f2`
#echo "OpenBSD majorversion=$MAJORVER minorversion=$MINORVER"
# rtableid was introduced in OpenBSD 4.0
if [ $MAJORVER -ge 4 ]; then
echo "#define PFRULE_HAS_RTABLEID" >> ${CONFIGFILE}
fi
# from the 3.8 version, packets and bytes counters are double : in/out
if [ \( $MAJORVER -ge 4 \) -o \( $MAJORVER -eq 3 -a $MINORVER -ge 8 \) ]; then
echo "#define PFRULE_INOUT_COUNTS" >> ${CONFIGFILE}
fi
# from the 4.7 version, new pf
if [ \( $MAJORVER -ge 5 \) -o \( $MAJORVER -eq 4 -a $MINORVER -ge 7 \) ]; then
echo "#define PF_NEWSTYLE" >> ${CONFIGFILE}
fi
# onrdomain was introduced in OpenBSD 5.0
if [ $MAJORVER -ge 5 ]; then
echo "#define PFRULE_HAS_ONRDOMAIN" >> ${CONFIGFILE}
fi
FW=pf
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
OS_URL=http://www.openbsd.org/
V6SOCKETS_ARE_V6ONLY=`sysctl -n net.inet6.ip6.v6only`
;;
FreeBSD)
VER=`grep '#define __FreeBSD_version' /usr/include/sys/param.h | awk '{print $3}'`
if [ $VER -ge 700049 ]; then
echo "#define PFRULE_INOUT_COUNTS" >> ${CONFIGFILE}
fi
# new way to see which one to use PF or IPF.
# see http://miniupnp.tuxfamily.org/forum/viewtopic.php?p=957
if [ -f /etc/rc.subr ] && [ -f /etc/rc.conf ] ; then
# source file with handy subroutines like checkyesno
. /etc/rc.subr
# source config file so we can probe vars
. /etc/rc.conf
if checkyesno ipfilter_enable; then
echo "Using ipf"
FW=ipf
elif checkyesno pf_enable; then
echo "Using pf"
FW=pf
elif checkyesno firewall_enable; then
echo "Using ifpw"
FW=ipfw
fi
fi
if [ -z $FW ] ; then
echo "Could not detect usage of ipf, pf, ipfw. Compiling for pf by default"
FW=pf
fi
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
OS_URL=http://www.freebsd.org/
V6SOCKETS_ARE_V6ONLY=`sysctl -n net.inet6.ip6.v6only`
;;
pfSense)
# we need to detect if PFRULE_INOUT_COUNTS macro is needed
FW=pf
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
OS_URL=http://www.pfsense.com/
V6SOCKETS_ARE_V6ONLY=`sysctl -n net.inet6.ip6.v6only`
;;
NetBSD)
if [ -f /etc/rc.subr ] && [ -f /etc/rc.conf ] ; then
# source file with handy subroutines like checkyesno
. /etc/rc.subr
# source config file so we can probe vars
. /etc/rc.conf
if checkyesno pf; then
FW=pf
elif checkyesno ipfilter; then
FW=ipf
fi
fi
if [ -z $FW ] ; then
echo "Could not detect ipf nor pf, defaulting to pf."
FW=pf
fi
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
OS_URL=http://www.netbsd.org/
;;
DragonFly)
if [ -f /etc/rc.subr ] && [ -f /etc/rc.conf ] ; then
# source file with handy subroutines like checkyesno
. /etc/rc.subr
# source config file so we can probe vars
. /etc/rc.conf
if checkyesno pf; then
FW=pf
elif checkyesno ipfilter; then
FW=ipf
fi
fi
if [ -z $FW ] ; then
echo "Could not detect ipf nor pf, defaulting to pf."
FW=pf
fi
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
OS_URL=http://www.dragonflybsd.org/
;;
SunOS)
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
FW=ipf
echo "#define LOG_PERROR 0" >> ${CONFIGFILE}
echo "#define SOLARIS_KSTATS 1" >> ${CONFIGFILE}
# solaris 10 does not define u_int64_t ?
# but it does define uint64_t
echo "typedef uint64_t u_int64_t;" >> ${CONFIGFILE}
OS_URL=http://www.sun.com/solaris/
;;
Linux)
OS_URL=http://www.kernel.org/
KERNVERA=`echo $OS_VERSION | awk -F. '{print $1}'`
KERNVERB=`echo $OS_VERSION | awk -F. '{print $2}'`
KERNVERC=`echo $OS_VERSION | awk -F. '{print $3}'`
KERNVERD=`echo $OS_VERSION | awk -F. '{print $4}'`
#echo "$KERNVERA.$KERNVERB.$KERNVERC.$KERNVERD"
# Debian GNU/Linux special case
if [ -f /etc/debian_version ]; then
OS_NAME=Debian
OS_VERSION=`cat /etc/debian_version`
OS_URL=http://www.debian.org/
fi
# same thing for Gentoo linux
if [ -f /etc/gentoo-release ]; then
OS_NAME=Gentoo
OS_VERSION=`cat /etc/gentoo-release`
OS_URL=http://www.gentoo.org/
fi
# use lsb_release (Linux Standard Base) when available
LSB_RELEASE=`which lsb_release`
if [ 0 -eq $? ]; then
OS_NAME=`${LSB_RELEASE} -i -s`
OS_VERSION=`${LSB_RELEASE} -r -s`
case $OS_NAME in
Debian)
OS_URL=http://www.debian.org/
OS_VERSION=`${LSB_RELEASE} -c -s`
;;
Ubuntu)
OS_URL=http://www.ubuntu.com/
OS_VERSION=`${LSB_RELEASE} -c -s`
;;
Gentoo)
OS_URL=http://www.gentoo.org/
;;
arch)
OS_URL=http://www.archlinux.org/
OS_VERSION=`uname -r`
;;
esac
fi
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
FW=netfilter
V6SOCKETS_ARE_V6ONLY=`/sbin/sysctl -n net.ipv6.bindv6only`
;;
OpenWRT)
OS_URL=http://www.openwrt.org/
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
FW=netfilter
;;
AstLinux)
OS_URL=http://www.astlinux.org/
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
FW=netfilter
;;
Tomato)
OS_NAME=UPnP
OS_URL=http://tomatousb.org/
echo "" >> ${CONFIGFILE}
echo "#ifdef LINUX26" >> ${CONFIGFILE}
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
echo "#endif" >> ${CONFIGFILE}
echo "#ifdef TCONFIG_IPV6" >> ${CONFIGFILE}
echo "#define ENABLE_IPV6" >> ${CONFIGFILE}
echo "#endif" >> ${CONFIGFILE}
FW=netfilter
;;
Darwin)
MAJORVER=`echo $OS_VERSION | cut -d. -f1`
echo "#define USE_IFACEWATCHER 1" >> ${CONFIGFILE}
# OS X switched to pf since 10.7 Lion (Darwin 11.0)
if [ $MAJORVER -ge 11 ] ; then
FW=pf
echo "#define PFRULE_INOUT_COUNTS" >> ${CONFIGFILE}
else
FW=ipfw
fi
OS_URL=http://developer.apple.com/macosx
;;
*)
echo "Unknown OS : $OS_NAME"
echo "Please contact the author at http://miniupnp.free.fr/ or http://miniupnp.tuxfamily.org/."
exit 1
;;
esac
case $FW in
pf)
echo "#define USE_PF 1" >> ${CONFIGFILE}
;;
ipf)
echo "#define USE_IPF 1" >> ${CONFIGFILE}
;;
ipfw)
echo "#define USE_IPFW 1" >> ${CONFIGFILE}
;;
netfilter)
echo "#define USE_NETFILTER 1" >> ${CONFIGFILE}
;;
*)
echo "Unknown Firewall/packet filtering software [$FW]"
echo "Please contact the author at http://miniupnp.free.fr/ or http://miniupnp.tuxfamily.org/."
exit 1
;;
esac
# set V6SOCKETS_ARE_V6ONLY to 0 if it was not set above
if [ -z "$V6SOCKETS_ARE_V6ONLY" ] ; then
V6SOCKETS_ARE_V6ONLY=0
fi
echo "Configuring compilation for [$OS_NAME] [$OS_VERSION] with [$FW] firewall software."
echo "Please edit config.h for more compilation options."
# define SUPPORT_REMOTEHOST if the FW related code really supports setting
# a RemoteHost
if [ \( "$FW" = "netfilter" \) -o \( "$FW" = "pf" \) -o \( "$FW" = "ipfw" \) ] ; then
echo "#define SUPPORT_REMOTEHOST" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "#define OS_NAME \"$OS_NAME\"" >> ${CONFIGFILE}
echo "#define OS_VERSION \"$OS_NAME/$OS_VERSION\"" >> ${CONFIGFILE}
echo "#define OS_URL \"${OS_URL}\"" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* syslog facility to be used by miniupnpd */" >> ${CONFIGFILE}
echo "#define LOG_MINIUPNPD ${LOG_MINIUPNPD}" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to allow miniupnpd to be" >> ${CONFIGFILE}
echo " * controlled by miniupnpdctl */" >> ${CONFIGFILE}
echo "/*#define USE_MINIUPNPDCTL*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Comment the following line to disable NAT-PMP operations */" >> ${CONFIGFILE}
echo "#define ENABLE_NATPMP" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Comment the following line to disable PCP operations */" >> ${CONFIGFILE}
echo "#define ENABLE_PCP" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "#ifdef ENABLE_PCP" >> ${CONFIGFILE}
if [ -n "$PCP_PEER" ]; then
echo "/* Comment the following line to disable PCP PEER operation */" >> ${CONFIGFILE}
echo "#define PCP_PEER" >> ${CONFIGFILE}
else
echo "/* Uncomment the following line to enable PCP PEER operation */" >> ${CONFIGFILE}
echo "/*#define PCP_PEER*/" >> ${CONFIGFILE}
fi
echo "#ifdef PCP_PEER" >> ${CONFIGFILE}
echo "/*#define PCP_FLOWP*/" >> ${CONFIGFILE}
echo "#endif /*PCP_PEER*/" >> ${CONFIGFILE}
echo "/*#define PCP_SADSCP*/" >> ${CONFIGFILE}
echo "#endif /*ENABLE_PCP*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to enable generation of" >> ${CONFIGFILE}
echo " * filter rules with pf */" >> ${CONFIGFILE}
echo "/*#define PF_ENABLE_FILTER_RULES*/">> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to enable caching of results of" >> ${CONFIGFILE}
echo " * the getifstats() function */" >> ${CONFIGFILE}
echo "/*#define ENABLE_GETIFSTATS_CACHING*/" >> ${CONFIGFILE}
echo "/* The cache duration is indicated in seconds */" >> ${CONFIGFILE}
echo "#define GETIFSTATS_CACHING_DURATION 2" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to enable multiple external ip support */" >> ${CONFIGFILE}
echo "/* note : That is EXPERIMENTAL, do not use that unless you know perfectly what you are doing */" >> ${CONFIGFILE}
echo "/* Dynamic external ip adresses are not supported when this option is enabled." >> ${CONFIGFILE}
echo " * Also note that you would need to configure your .conf file accordingly. */" >> ${CONFIGFILE}
echo "/*#define MULTIPLE_EXTERNAL_IP*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Comment the following line to use home made daemonize() func instead" >> ${CONFIGFILE}
echo " * of BSD daemon() */" >> ${CONFIGFILE}
echo "#define USE_DAEMON" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to enable lease file support */" >> ${CONFIGFILE}
if [ -n "$LEASEFILE" ] ; then
echo "#define ENABLE_LEASEFILE" >> ${CONFIGFILE}
else
echo "/*#define ENABLE_LEASEFILE*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to enable port in use check */" >> ${CONFIGFILE}
if [ -n "$PORTINUSE" ]; then
echo "#define CHECK_PORTINUSE" >> ${CONFIGFILE}
else
echo "/*#define CHECK_PORTINUSE*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* Define one or none of the two following macros in order to make some" >> ${CONFIGFILE}
echo " * clients happy. It will change the XML Root Description of the IGD." >> ${CONFIGFILE}
echo " * Enabling the Layer3Forwarding Service seems to be the more compatible" >> ${CONFIGFILE}
echo " * option. */" >> ${CONFIGFILE}
echo "/*#define HAS_DUMMY_SERVICE*/" >> ${CONFIGFILE}
echo "#define ENABLE_L3F_SERVICE" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Enable IP v6 support */" >> ${CONFIGFILE}
if [ -n "$IPV6" ]; then
echo "#define ENABLE_IPV6" >> ${CONFIGFILE}
else
echo "/*#define ENABLE_IPV6*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* Define V6SOCKETS_ARE_V6ONLY if AF_INET6 sockets are restricted" >> ${CONFIGFILE}
echo " * to IPv6 communications only. */" >> ${CONFIGFILE}
if [ $V6SOCKETS_ARE_V6ONLY -eq 1 ] ; then
echo "#define V6SOCKETS_ARE_V6ONLY" >> ${CONFIGFILE}
else
echo "/*#define V6SOCKETS_ARE_V6ONLY*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* Enable the support of IGD v2 specification." >> ${CONFIGFILE}
echo " * This is not fully tested yet and can cause incompatibilities with some" >> ${CONFIGFILE}
echo " * control points, so enable with care. */" >> ${CONFIGFILE}
if [ -n "$IGD2" ]; then
echo "#define IGD_V2" >> ${CONFIGFILE}
else
echo "/*#define IGD_V2*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "#ifdef IGD_V2" >> ${CONFIGFILE}
echo "/* Enable DeviceProtection service (IGDv2) */" >> ${CONFIGFILE}
echo "#define ENABLE_DP_SERVICE" >> ${CONFIGFILE}
echo "/*#define ENABLE_HTTPS*/" >> ${CONFIGFILE}
echo "/*#define HTTPS_CERTFILE \"/path/to/certificate.pem\"*/" >> ${CONFIGFILE}
echo "/*#define HTTPS_KEYFILE \"/path/to/private.key\"*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Enable WANIPv6FirewallControl service (IGDv2). needs IPv6 */" >> ${CONFIGFILE}
echo "#ifdef ENABLE_IPV6" >> ${CONFIGFILE}
echo "#define ENABLE_6FC_SERVICE" >> ${CONFIGFILE}
echo "#endif /* ENABLE_IPV6 */" >> ${CONFIGFILE}
echo "#endif /* IGD_V2 */" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* UPnP Events support. Working well enough to be enabled by default." >> ${CONFIGFILE}
echo " * It can be disabled to save a few bytes. */" >> ${CONFIGFILE}
echo "#define ENABLE_EVENTS" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* include interface name in pf and ipf rules */" >> ${CONFIGFILE}
echo "#define USE_IFNAME_IN_RULES" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Experimental NFQUEUE support. */" >> ${CONFIGFILE}
echo "/*#define ENABLE_NFQUEUE*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Enable to make MiniUPnPd more strict about UPnP conformance" >> ${CONFIGFILE}
echo " * and the messages it receives from control points */" >> ${CONFIGFILE}
if [ -n "$STRICT" ] ; then
echo "#define UPNP_STRICT" >> ${CONFIGFILE}
else
echo "/*#define UPNP_STRICT*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* If SSDP_RESPOND_SAME_VERSION is defined, the M-SEARCH response" >> ${CONFIGFILE}
echo " * include the same device version as was contained in the search" >> ${CONFIGFILE}
echo " * request. It conforms to UPnP DA v1.1 */" >> ${CONFIGFILE}
echo "#define SSDP_RESPOND_SAME_VERSION" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Add the optional Date: header in all HTTP responses */" >> ${CONFIGFILE}
if [ -n "$STRICT" ] ; then
echo "#define ENABLE_HTTP_DATE" >> ${CONFIGFILE}
else
echo "/*#define ENABLE_HTTP_DATE*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* Wait a little before answering M-SEARCH request */" >> ${CONFIGFILE}
if [ -n "$STRICT" ] ; then
echo "#define DELAY_MSEARCH_RESPONSE" >> ${CONFIGFILE}
else
echo "/*#define DELAY_MSEARCH_RESPONSE*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
echo "/* disable reading and parsing of config file (miniupnpd.conf) */" >> ${CONFIGFILE}
echo "/*#define DISABLE_CONFIG_FILE*/" >> ${CONFIGFILE}
echo "" >> ${CONFIGFILE}
echo "/* Uncomment the following line to configure all manufacturer infos through miniupnpd.conf */" >> ${CONFIGFILE}
if [ -n "$VENDORCFG" ] ; then
echo "#define ENABLE_MANUFACTURER_INFO_CONFIGURATION" >> ${CONFIGFILE}
else
echo "/*#define ENABLE_MANUFACTURER_INFO_CONFIGURATION*/" >> ${CONFIGFILE}
fi
echo "" >> ${CONFIGFILE}
cat >> ${CONFIGFILE} <<EOF
#if defined(ENABLE_6FC_SERVICE) || (defined(ENABLE_PCP) && defined(ENABLE_IPV6))
#define ENABLE_UPNPPINHOLE
#endif
EOF
cat >> ${CONFIGFILE} <<EOF
/* Uncomment the following line if your device does not have a proper clock
* BOOTID.UPNP.ORG can be set with command line */
#define USE_TIME_AS_BOOTID
EOF
echo "#define TOMATO 1" >> ${CONFIGFILE}
echo "#endif /* ${CONFIGMACRO} */" >> ${CONFIGFILE}
${MV} ${CONFIGFILE} ${CONFIGFILE_FINAL}
exit 0
|
<reponame>rovedit/Fort-Candle<gh_stars>0
#pragma once
#include <typed-geometry/types/objects/aabb.hh>
#include <typed-geometry/types/objects/box.hh>
#include <typed-geometry/types/objects/capsule.hh>
#include <typed-geometry/types/objects/cylinder.hh>
#include <typed-geometry/types/objects/ellipse.hh>
#include <typed-geometry/types/objects/halfspace.hh>
#include <typed-geometry/types/objects/hemisphere.hh>
#include <typed-geometry/types/objects/inf_cone.hh>
#include <typed-geometry/types/objects/inf_cylinder.hh>
#include <typed-geometry/types/objects/plane.hh>
#include <typed-geometry/types/objects/pyramid.hh>
#include <typed-geometry/types/objects/sphere.hh>
// tg::boundary_of(obj) converts an object to its boundary (identity for objects that are already boundaries)
namespace tg
{
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr aabb_boundary<D, ScalarT> boundary_of(aabb<D, ScalarT, TraitsT> const& v)
{
return {v.min, v.max};
}
template <int ObjectD, class ScalarT, int DomainD, class TraitsT>
[[nodiscard]] constexpr box_boundary<ObjectD, ScalarT, DomainD> boundary_of(box<ObjectD, ScalarT, DomainD, TraitsT> const& v)
{
return {v.center, v.half_extents};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr capsule_boundary<D, ScalarT> boundary_of(capsule<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr cylinder_boundary<D, ScalarT> boundary_of(cylinder<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <int ObjectD, class ScalarT, int DomainD, class TraitsT>
[[nodiscard]] constexpr ellipse_boundary<ObjectD, ScalarT, DomainD> boundary_of(ellipse<ObjectD, ScalarT, DomainD, TraitsT> const& v)
{
return {v.center, v.semi_axes};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr hemisphere_boundary<D, ScalarT> boundary_of(hemisphere<D, ScalarT, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr inf_cone_boundary<D, ScalarT> boundary_of(inf_cone<D, ScalarT, TraitsT> const& v)
{
return {v.apex, v.opening_dir, v.opening_angle};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr inf_cylinder_boundary<D, ScalarT> boundary_of(inf_cylinder<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <class BaseT, class TraitsT>
[[nodiscard]] constexpr pyramid_boundary<BaseT> boundary_of(pyramid<BaseT, TraitsT> const& v)
{
return {v.base, v.height};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere_boundary<D, ScalarT> boundary_of(sphere<D, ScalarT, D, TraitsT> const& v)
{
return {v.center, v.radius};
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere_boundary<1, ScalarT, 2> boundary_of(sphere<1, ScalarT, 2, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere_boundary<2, ScalarT, 3> boundary_of(sphere<2, ScalarT, 3, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <int D, class ScalarT>
[[nodiscard]] constexpr plane<D, ScalarT> boundary_of(halfspace<D, ScalarT> const& v)
{
return {v.normal, v.dis};
}
// === no caps versions ===
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr cylinder_boundary_no_caps<D, ScalarT> boundary_no_caps_of(cylinder<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr hemisphere_boundary_no_caps<D, ScalarT> boundary_no_caps_of(hemisphere<D, ScalarT, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <class BaseT, class TraitsT>
[[nodiscard]] constexpr pyramid_boundary_no_caps<BaseT> boundary_no_caps_of(pyramid<BaseT, TraitsT> const& v)
{
return {v.base, v.height};
}
template <int D, class ScalarT>
[[nodiscard]] constexpr cylinder_boundary_no_caps<D, ScalarT> boundary_of(cylinder_boundary_no_caps<D, ScalarT> v)
{
return v;
}
template <int D, class ScalarT>
[[nodiscard]] constexpr hemisphere_boundary_no_caps<D, ScalarT> boundary_of(hemisphere_boundary_no_caps<D, ScalarT> v)
{
return v;
}
template <class BaseT>
[[nodiscard]] constexpr pyramid_boundary_no_caps<BaseT> boundary_of(pyramid_boundary_no_caps<BaseT> v)
{
return v;
}
// === only caps versions ===
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr auto caps_of(hemisphere<3, ScalarT, TraitsT> const& v)
{
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return circle<3, ScalarT>(v.center, v.radius, v.normal);
else
return disk<3, ScalarT>(v.center, v.radius, v.normal);
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr auto caps_of(hemisphere<2, ScalarT, TraitsT> const& v)
{
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return sphere_boundary<1, ScalarT, 2>(v.center, v.radius, v.normal);
else
{
auto half = perpendicular(v.normal) * v.radius;
return segment<2, ScalarT>(v.center - half, v.center + half);
}
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr pos<1, ScalarT> caps_of(hemisphere<1, ScalarT, TraitsT> const& v)
{
static_assert(!std::is_same_v<TraitsT, boundary_no_caps_tag> && "1D hemisphere_boundary_no_caps does not have any caps");
return v.center;
}
template <class BaseT, class TraitsT>
[[nodiscard]] constexpr auto caps_of(pyramid<BaseT, TraitsT> const& v)
{
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return boundary_of(v.base);
else
return v.base;
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr auto caps_of(cylinder<D, ScalarT, TraitsT> const& v)
{
const auto normal = normalize(v.axis.pos1 - v.axis.pos0);
if constexpr (std::is_same_v<TraitsT, boundary_no_caps_tag>)
return array<sphere<D - 1, ScalarT, D, boundary_tag>, 2>{{{v.axis.pos0, v.radius, -normal}, {v.axis.pos1, v.radius, normal}}};
else
return array<sphere<D - 1, ScalarT, D>, 2>{{{v.axis.pos0, v.radius, -normal}, {v.axis.pos1, v.radius, normal}}};
}
// === solid version ===
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr aabb<D, ScalarT> solid_of(aabb<D, ScalarT, TraitsT> const& v)
{
return {v.min, v.max};
}
template <int ObjectD, class ScalarT, int DomainD, class TraitsT>
[[nodiscard]] constexpr box<ObjectD, ScalarT, DomainD> solid_of(box<ObjectD, ScalarT, DomainD, TraitsT> const& v)
{
return {v.center, v.half_extents};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr capsule<D, ScalarT> solid_of(capsule<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr cylinder<D, ScalarT> solid_of(cylinder<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <int ObjectD, class ScalarT, int DomainD, class TraitsT>
[[nodiscard]] constexpr ellipse<ObjectD, ScalarT, DomainD> solid_of(ellipse<ObjectD, ScalarT, DomainD, TraitsT> const& v)
{
return {v.center, v.semi_axes};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr hemisphere<D, ScalarT> solid_of(hemisphere<D, ScalarT, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr inf_cone<D, ScalarT> solid_of(inf_cone<D, ScalarT, TraitsT> const& v)
{
return {v.apex, v.opening_dir, v.opening_angle};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr inf_cylinder<D, ScalarT> solid_of(inf_cylinder<D, ScalarT, TraitsT> const& v)
{
return {v.axis, v.radius};
}
template <class BaseT, class TraitsT>
[[nodiscard]] constexpr pyramid<BaseT> solid_of(pyramid<BaseT, TraitsT> const& v)
{
return {v.base, v.height};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere<D, ScalarT> solid_of(sphere<D, ScalarT, D, TraitsT> const& v)
{
return {v.center, v.radius};
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere<1, ScalarT, 2> solid_of(sphere<1, ScalarT, 2, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
template <class ScalarT, class TraitsT>
[[nodiscard]] constexpr sphere<2, ScalarT, 3> solid_of(sphere<2, ScalarT, 3, TraitsT> const& v)
{
return {v.center, v.radius, v.normal};
}
// === infinite versions ===
template <int D, class ScalarT>
[[nodiscard]] constexpr line<D, ScalarT> inf_of(segment<D, ScalarT> const& v)
{
return {v.pos0, normalize(v.pos1 - v.pos0)};
}
template <int D, class ScalarT>
[[nodiscard]] constexpr line<D, ScalarT> inf_of(ray<D, ScalarT> const& v)
{
return {v.origin, v.dir};
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr auto inf_of(cone<D, ScalarT, TraitsT> const& v)
{
const auto apex = apex_of(v);
const auto openingDir = -v.base.normal;
const auto openingAngle = ScalarT(2) * angle_between(normalize(v.base.center + any_normal(v.base.normal) * v.base.radius - apex), openingDir);
if constexpr (std::is_same_v<TraitsT, default_object_tag>)
return inf_cone<D, ScalarT, default_object_tag>(apex, openingDir, openingAngle);
else
return inf_cone<D, ScalarT, boundary_tag>(apex, openingDir, openingAngle);
}
template <int D, class ScalarT, class TraitsT>
[[nodiscard]] constexpr auto inf_of(cylinder<D, ScalarT, TraitsT> const& v)
{
const auto axis = inf_of(v.axis);
if constexpr (std::is_same_v<TraitsT, default_object_tag>)
return inf_cylinder<D, ScalarT, default_object_tag>(axis, v.radius);
else
return inf_cylinder<D, ScalarT, boundary_tag>(axis, v.radius);
}
}
|
#!/bin/bash
#SBATCH --job-name=pca_1
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=1
#SBATCH --time=168:00:00
#SBATCH --mem=250gb
#SBATCH --partition=benson,tmp_anvil,batch
module load anaconda
# conda activate statsmodels
conda activate pca_env
Rscript logistic_pca_clustering_program_newport.R
conda deactivate
|
# some code from https://github.com/ming71/toolbox/blob/master/rotation/order_points.py
import os
import math
import cv2
import numpy as np
# this function is confined to rectangle
# clockwise, write by ming71
def order_points(pts):
# sort the points based on their x-coordinates
xSorted = pts[np.argsort(pts[:, 0]), :]
# grab the left-most and right-most points from the sorted
# x-roodinate points
leftMost = xSorted[:2, :]
rightMost = xSorted[2:, :]
# now, sort the left-most coordinates according to their
# y-coordinates so we can grab the top-left and bottom-left
# points, respectively
leftMost = leftMost[np.argsort(leftMost[:, 1]), :]
(tl, bl) = leftMost
# now that we have the top-left coordinate, use it as an
# anchor to calculate the Euclidean distance between the
# top-left and right-most points; by the Pythagorean
# theorem, the point with the largest distance will be
# our bottom-right point
D = dist.cdist(tl[np.newaxis], rightMost, "euclidean")[0]
(br, tr) = rightMost[np.argsort(D)[::-1], :]
# return the coordinates in top-left, top-right,
# bottom-right, and bottom-left order
return np.array([tl, tr, br, bl], dtype="float32")
# this function can be used for polygon
def order_points_quadrangle(pts):
from scipy.spatial import distance as dist
# sort the points based on their x-coordinates
xSorted = pts[np.argsort(pts[:, 0]), :]
# grab the left-most and right-most points from the sorted
# x-roodinate points
leftMost = xSorted[:2, :]
rightMost = xSorted[2:, :]
# now, sort the left-most coordinates according to their
# y-coordinates so we can grab the top-left and bottom-left
# points, respectively
leftMost = leftMost[np.argsort(leftMost[:, 1]), :]
(tl, bl) = leftMost
# now that we have the top-left and bottom-left coordinate, use it as an
# base vector to calculate the angles between the other two vectors
vector_0 = np.array(bl - tl)
vector_1 = np.array(rightMost[0] - tl)
vector_2 = np.array(rightMost[1] - tl)
angle = [np.arccos(cos_dist(vector_0, vector_1)), np.arccos(cos_dist(vector_0, vector_2))]
(br, tr) = rightMost[np.argsort(angle), :]
# return the coordinates in top-left, top-right,
# bottom-right, and bottom-left order
return np.array([tl, tr, br, bl], dtype="float32")
def cos_dist(a, b):
if len(a) != len(b):
return None
part_up = 0.0
a_sq = 0.0
b_sq = 0.0
# print(a, b)
# print(zip(a, b))
for a1, b1 in zip(a, b):
part_up += a1 * b1
a_sq += a1 ** 2
b_sq += b1 ** 2
part_down = math.sqrt(a_sq * b_sq)
if part_down == 0.0:
return None
else:
return part_up / part_down
############ another method##############
def sort_corners(quads):
sorted_quads = np.zeros(quads.shape, dtype=np.float32)
for i, corners in enumerate(quads):
corners = corners.reshape(4, 2)
centers = np.mean(corners, axis=0)
corners = corners - centers
cosine = corners[:, 0] / np.sqrt(corners[:, 0] ** 2 + corners[:, 1] ** 2)
cosine = np.minimum(np.maximum(cosine, -1.0), 1.0)
thetas = np.arccos(cosine) / np.pi * 180.0
indice = np.where(corners[:, 1] > 0)[0]
thetas[indice] = 360.0 - thetas[indice]
corners = corners + centers
corners = corners[thetas.argsort()[::-1], :]
corners = corners.reshape(8)
dx1, dy1 = (corners[4] - corners[0]), (corners[5] - corners[1])
dx2, dy2 = (corners[6] - corners[2]), (corners[7] - corners[3])
slope_1 = dy1 / dx1 if dx1 != 0 else np.iinfo(np.int32).max
slope_2 = dy2 / dx2 if dx2 != 0 else np.iinfo(np.int32).max
if slope_1 > slope_2:
if corners[0] < corners[4]:
first_idx = 0
elif corners[0] == corners[4]:
first_idx = 0 if corners[1] < corners[5] else 2
else:
first_idx = 2
else:
if corners[2] < corners[6]:
first_idx = 1
elif corners[2] == corners[6]:
first_idx = 1 if corners[3] < corners[7] else 3
else:
first_idx = 3
for j in range(4):
idx = (first_idx + j) % 4
sorted_quads[i, j * 2] = corners[idx * 2]
sorted_quads[i, j * 2 + 1] = corners[idx * 2 + 1]
return sorted_quads
# counterclockwise, write by WenQian
def re_order(bboxes, with_label=False):
n=len(bboxes)
targets=[]
for i in range(n):
box=bboxes[i]
# 寻找x1
x1=box[0]
y1=box[1]
x1_index=0
for j in range(1,4):
### if x larger than x1 then continue
if box[2*j]>x1:
continue
### if x smaller than x1 then replace x1 as x
elif box[2*j]<x1:
x1=box[2*j]
y1=box[2*j+1]
x1_index=j
### if they are euqal then we aims to find the upper point
else:
if box[2*j+1]<y1:
x1=box[2*j]
y1=box[2*j+1]
x1_index=j
else:
continue
#寻找与x1连线中间点
for j in range(4):
if j==x1_index:
continue
x_=box[2*j]
y_=box[2*j+1]
x_index=j
val=[]
for k in range(4):
if k==x_index or k==x1_index:
continue
else:
x=box[2*k]
y=box[2*k+1]
if x1==x_:
val.append(x-x1)
else:
val1=(y-y1)-(y_-y1)/(x_-x1)*(x-x1)
val.append(val1)
if val[0]*val[1]<0:
x3=x_
y3=y_
for k in range(4):
if k==x_index or k==x1_index:
continue
x=box[2*k]
y=box[2*k+1]
if not x1==x3:
val=(y-y1)-(y3-y1)/(x3-x1)*(x-x1)
if val>=0:
x2=x
y2=y
if val<0:
x4=x
y4=y
else:
val=x1-x
if val>=0:
x2=x
y2=y
if val<0:
x4=x
y4=y
break
try:
if with_label:
targets.append([x1, y1, x2, y2, x3, y3, x4, y4, box[-1]])
else:
targets.append([x1, y1, x2, y2, x3, y3, x4, y4])
except:
print('**'*20)
print(box)
targets.append(box)
return np.array(targets, np.float32)
# pts = np.array([[296, 245] ,[351 ,266], [208, 487],[263, 507]])
# npts = order_points_quadrangle(pts)
if __name__ == '__main__':
pts = np.array([
[242.7452, 314.5097, 242.7452, 133.4903, 333.2548, 133.4903, 333.2548, 314.5097],
[333.2548, 133.4903, 333.2548, 314.5097, 242.7452, 314.5097, 242.7452, 133.4903],
[60, 0, 80, 20, 20, 80, 0, 60],
[40, 0, 40, 40, 0, 40, 0, 0]
])
npts = sort_corners(pts)
print(pts)
# print(npts)
print(re_order([[242.7452, 314.5097, 242.7452, 133.4903, 333.2548, 133.4903, 333.2548, 314.5097],
[333.2548, 133.4903, 333.2548, 314.5097, 242.7452, 314.5097, 242.7452, 133.4903],
[60, 0, 80, 20, 20, 80, 0, 60],
[40, 0, 40, 40, 0, 40, 0, 0]]))
|
<reponame>SeekinG-K/fund-analysis-parent<gh_stars>0
/**************************************************************************/
/* */
/* Copyright (c) 2019 XunceTech Company */
/* 深圳迅策科技有限公司版权所有 */
/* */
/* PROPRIETARY RIGHTS of XunceTech Company are involved in the */
/* subject matter of this material. All manufacturing, reproduction, use, */
/* and sales rights pertaining to this subject matter are governed by the */
/* license agreement. The recipient of this software implicitly accepts */
/* the terms of the license. */
/* 本软件文档资料是深圳迅策科技有限公司的资产,任何人士阅读和 */
/* 使用本资料必须获得相应的书面授权,承担保密责任和接受相应的法律约束。 */
/* */
/**************************************************************************/
package com.scale.invest.api.model.env;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* @description: Bean信息
* @copyright: Copyright (c) 2019 迅策科技
* @author: chasel
* @version: 1.0
* @date: 2019年7月24日
* @time: 上午11:01:02
*/
@Data
@AllArgsConstructor
@NoArgsConstructor
@Builder
public class BeanInfo implements Serializable {
private static final long serialVersionUID = 8496049375326969409L;
/**
* Bean对应的Class
*/
private Class<?> beanClass;
/**
* Service对应的Class
*/
private Class<?> serviceClass;
}
|
#!/bin/bash
#
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
APP_TITLE="NVIDIA Jetson Add-On Installer"
LOG_ZOO="[jetson-zoo] "
LOG_FILE=./jetson-zoo.log
UPDATES_URL="https://raw.githubusercontent.com/dusty-nv/jetson-zoo/master/jetson-zoo.sh"
#
# check for updates to this script
#
function check_updates()
{
CHECKSUM=sha256sum
NEW_PATH="jetson-zoo.new"
OLD_PATH="jetson-zoo.old"
CUR_PATH=$0
echo "$LOG_ZOO checking for updates..."
echo "$LOG_ZOO current path: $CUR_PATH old path: $OLD_PATH new path: $NEW_PATH"
# make a backup of the current script
echo "$LOG_ZOO backing up $CUR_PATH to $OLD_PATH"
cp $CUR_PATH $OLD_PATH
# download the latest
echo "$LOG_ZOO downloading latest script to $NEW_PATH"
wget --no-check-certificate "$UPDATES_URL" -O $NEW_PATH
# get checksums
CHECKSUM_OLD=$(sha256sum $CUR_PATH | awk '{print $1}')
CHECKSUM_NEW=$(sha256sum $NEW_PATH | awk '{print $1}')
echo "$LOG_ZOO old checksum: $CHECKSUM_OLD"
echo "$LOG_ZOO new checksum: $CHECKSUM_NEW"
# compare checksums
if [ $CHECKSUM_OLD != $CHECKSUM_NEW ]; then
echo "$LOG_ZOO updated version found"
return 1
fi
echo "$LOG_ZOO already using the latest version"
return 0
}
#
# check if a particular deb package is installed with dpkg-query
# arg $1 -> package name
# arg $2 -> variable name to output status to (e.g. HAS_PACKAGE=1)
#
function find_deb_package()
{
local PKG_NAME=$1
local HAS_PKG=`dpkg-query -W --showformat='${Status}\n' $PKG_NAME|grep "install ok installed"`
if [ "$HAS_PKG" == "" ]; then
echo "$LOG_ZOO Checking for '$PKG_NAME' deb package...not installed"
else
echo "$LOG_ZOO Checking for '$PKG_NAME' deb package...installed"
eval "$2=INSTALLED"
fi
}
#
# install a debian package if it isn't already installed
# arg $1 -> package name
# arg $2 -> variable name to output status to (e.g. FOUND_PACKAGE=INSTALLED)
#
function install_deb_package()
{
local PKG_NAME=$1
# check to see if the package is already installed
find_deb_package $PKG_NAME $2
# if not, install the package
if [ -z $2 ]; then
echo "$LOG_ZOO Missing '$PKG_NAME' deb package...installing '$PKG_NAME' package."
sudo apt-get --force-yes --yes install $PKG_NAME
else
return 0
fi
# verify that the package was installed
find_deb_package $PKG_NAME $2
if [ -z $2 ]; then
echo "$LOG_ZOO Failed to install '$PKG_NAME' deb package."
return 1
else
echo "$LOG_ZOO Successfully installed '$PKG_NAME' deb package."
return 0
fi
}
#
# install Hello AI World
#
function install_jetson_inference()
{
jetson_inference_path=~/jetson-inference
jetson_inference_path=$(dialog --backtitle "$APP_TITLE" --output-fd 1 --inputbox "Path to install jetson-inference:" 20 80 $jetson_inference_path)
jetson_inference_path_status=$?
for i in {1..5}; do echo ""; done
echo "$LOG_ZOO jetson-inference path selection exit status: $jetson_inference_path_status"
if [ $jetson_inference_path_status = 0 ]; then
echo "$LOG_ZOO jetson-inference path: $jetson_inference_path"
fi
}
#
# package installation menu
#
function install_packages()
{
pkg_selected=$(dialog --backtitle "$APP_TITLE" \
--title "Install Add-On Packages" \
--checklist "Keys:\n ↑↓ Navigate Menu\n Space to Select Packages \n Enter to Continue" 20 80 10 \
--output-fd 1 \
1 "Hello AI World (jetson-inference)" off \
2 "TensorFlow 1.13" off \
3 "PyTorch 1.0 (Python 2.7)" off \
4 "PyTorch 1.0 (Python 3.6)" off \
5 "Caffe2" off \
6 "MXNet" off \
7 "ROS Melodic" off \
8 "ros_deep_learning" off \
9 "Gazebo" off \
10 "AWS Greengrass" off )
pkg_selection_status=$?
clear
{
echo "$LOG_ZOO Packages selection status: $pkg_selection_status"
if [ $pkg_selection_status = 0 ]; then
if [ -z $pkg_selected ]; then
echo "$LOG_ZOO No packages were selected for installation."
else
echo "$LOG_ZOO Packages selected for installation: $pkg_selected"
for pkg in $pkg_selected
do
if [ $pkg = 1 ]; then
echo "$LOG_ZOO Installing Hello AI World (jetson-inference)..."
install_jetson_inference
elif [ $pkg = 2 ]; then
echo "$LOG_ZOO Installing TensorFlow 1.13 (Python 3.6)..."
elif [ $pkg = 3 ]; then
echo "$LOG_ZOO Installing PyTorch 1.0 (Python 2.7)..."
elif [ $pkg = 4 ]; then
echo "$LOG_ZOO Installing PyTorch 1.0 (Python 3.6)..."
elif [ $pkg = 5 ]; then
echo "$LOG_ZOO Installing MXNet..."
elif [ $pkg = 6 ]; then
echo "$LOG_ZOO Installing AWS Greengrass..."
elif [ $pkg = 7 ]; then
echo "$LOG_ZOO Installing ROS Melodic..."
fi
done
fi
else
echo "$LOG_ZOO Package selection cancelled."
fi
echo "$LOG_ZOO Press Enter key to quit."
} > >(tee -a -i $LOG_FILE) 2>&1
}
#
# retrieve jetson board info
#
function read_jetson_info()
{
# verify architecture
JETSON_ARCH=$(uname -i)
ARCH_REQUIRED="aarch64"
echo "$LOG_ZOO System Architecture: $JETSON_ARCH"
if [ $JETSON_ARCH != $ARCH_REQUIRED ]; then
echo "$LOG_ZOO $JETSON_ARCH architecture detected, $ARCH_REQUIRED required"
echo "$LOG_ZOO Please run jetson-zoo from the Jetson ($ARCH_REQUIRED)"
return 1
fi
# Tegra Chip ID
JETSON_CHIP_ID=$(cat /sys/module/tegra_fuse/parameters/tegra_chip_id)
case $JETSON_CHIP_ID in
64)
JETSON_CHIP_ID="T124"
JETSON_CHIP="TK1" ;;
33)
JETSON_CHIP_ID="T210"
JETSON_CHIP="TX1" ;;
24)
JETSON_CHIP_ID="T186"
JETSON_CHIP="TX2" ;;
25)
JETSON_CHIP_ID="T194"
JETSON_CHIP="Xavier" ;;
*)
JETSON_CHIP="UNKNOWN" ;;
esac
echo "$LOG_ZOO Jetson Chip ID: $JETSON_CHIP ($JETSON_CHIP_ID)"
# Board model
JETSON_MODEL=$(tr -d '\0' < /sys/firmware/devicetree/base/model) # remove NULL bytes to avoid bash warning
echo "$LOG_ZOO Jetson Board Model: $JETSON_MODEL"
# Serial number
JETSON_SERIAL=$(tr -d '\0' < /sys/firmware/devicetree/base/serial-number) # remove NULL bytes to avoid bash warning
echo "$LOG_ZOO Jetson Serial Number: $JETSON_SERIAL"
# Active power mode
JETSON_POWER_MODE=$(nvpmodel -q | head -n 1 | sed 's/\NV Power Mode: //g')
echo "$LOG_ZOO Jetson Active Power Mode: $JETSON_POWER_MODE"
# Memory capacity/usage
JETSON_MEMORY=$(free --mega | awk '/^Mem:/{print $2}')
JETSON_MEMORY_USED=$(free --mega | awk '/^Mem:/{print $3}')
JETSON_MEMORY_FREE=$(expr $JETSON_MEMORY - $JETSON_MEMORY_USED)
echo "$LOG_ZOO Jetson Memory Total: $JETSON_MEMORY MB"
echo "$LOG_ZOO Jetson Memory Used: $JETSON_MEMORY_USED MB"
echo "$LOG_ZOO Jetson Memory Free: $JETSON_MEMORY_FREE MB"
# Disk storage
JETSON_DISK=$(($(stat -f --format="%b*%S" .)))
JETSON_DISK_FREE=$(($(stat -f --format="%f*%S" .)))
JETSON_DISK_USED=$(expr $JETSON_DISK - $JETSON_DISK_FREE)
JETSON_DISK=$(expr $JETSON_DISK / 1048576) # convert bytes to MB
JETSON_DISK_FREE=$(expr $JETSON_DISK_FREE / 1048576)
JETSON_DISK_USED=$(expr $JETSON_DISK_USED / 1048576)
echo "$LOG_ZOO Jetson Disk Total: $JETSON_DISK MB"
echo "$LOG_ZOO Jetson Disk Used: $JETSON_DISK_USED MB"
echo "$LOG_ZOO Jetson Disk Free: $JETSON_DISK_FREE MB"
# Kernel version
JETSON_KERNEL=$(uname -r)
echo "$LOG_ZOO Jetson L4T Kernel Version: $JETSON_KERNEL"
# L4T version
local JETSON_L4T_STRING=$(head -n 1 /etc/nv_tegra_release)
JETSON_L4T_RELEASE=$(echo $JETSON_L4T_STRING | cut -f 2 -d ' ' | grep -Po '(?<=R)[^;]+')
JETSON_L4T_REVISION=$(echo $JETSON_L4T_STRING | cut -f 2 -d ',' | grep -Po '(?<=REVISION: )[^;]+')
JETSON_L4T="$JETSON_L4T_RELEASE.$JETSON_L4T_REVISION"
echo "$LOG_ZOO Jetson L4T BSP Version: L4T R$JETSON_L4T"
# JetPack version
case $JETSON_L4T in
"32.1.0") JETSON_JETPACK="4.2" ;;
"31.1.0") JETSON_JETPACK="4.1.1" ;;
"31.0.2") JETSON_JETPACK="4.1" ;;
"31.0.1") JETSON_JETPACK="4.0" ;;
"28.2.1") JETSON_JETPACK="3.3 | 3.2.1" ;;
"28.2" | "28.2.0" ) JETSON_JETPACK="3.2" ;;
"28.1") JETSON_JETPACK="3.1" ;;
"27.1") JETSON_JETPACK="3.0" ;;
"24.2") JETSON_JETPACK="2.3" ;;
"24.1") JETSON_JETPACK="2.2.1 | 2.2" ;;
"23.2") JETSON_JETPACK="2.1" ;;
"23.1") JETSON_JETPACK="2.0" ;;
"21.5") JETSON_JETPACK="2.3.1 | 2.3" ;;
"21.4") JETSON_JETPACK="2.2 | 2.1 | 2.0 | 1.2" ;;
"21.3") JETSON_JETPACK="1.1" ;;
"21.2") JETSON_JETPACK="1.0" ;;
*) JETSON_JETPACK="UNKNOWN" ;;
esac
echo "$LOG_ZOO Jetson JetPack Version: JetPack $JETSON_JETPACK"
# CUDA version
if [ -f /usr/local/cuda/version.txt ]; then
JETSON_CUDA=$(cat /usr/local/cuda/version.txt | sed 's/\CUDA Version //g')
else
JETSON_CUDA="NOT_INSTALLED"
fi
echo "$LOG_ZOO Jetson CUDA Version: CUDA $JETSON_CUDA"
return 0
}
# display jetson board info
function jetson_info()
{
echo "$LOG_ZOO Jetson Board Information"
local mem_total_str=$(printf "│ Total: %4d MB │" ${JETSON_MEMORY})
local mem_used_str=$(printf "│ Used: %4d MB │" ${JETSON_MEMORY_USED})
local mem_free_str=$(printf "│ Free: %4d MB │" ${JETSON_MEMORY_FREE})
local disk_total_str=$(printf "│ Total: %5d MB │" ${JETSON_DISK})
local disk_used_str=$(printf "│ Used: %5d MB │" ${JETSON_DISK_USED})
local disk_free_str=$(printf "│ Free: %5d MB │" ${JETSON_DISK_FREE})
local sw_kernel_str=$(printf "│ Linux Kernel: %-23s│" "$JETSON_KERNEL ($JETSON_ARCH)")
local sw_l4t_str=$(printf "│ L4T Version: %-23s│" "$JETSON_L4T")
local sw_jetpack_str=$(printf "│ JetPack Version: %-23s│" "$JETSON_JETPACK")
local sw_cuda_str=$(printf "│ CUDA Version: %-23s│" "$JETSON_CUDA")
local info_str="Part Name: $JETSON_MODEL\n
Chip Arch: $JETSON_CHIP ($JETSON_CHIP_ID)\n
Serial No: $JETSON_SERIAL\n
Power Mode: $JETSON_POWER_MODE\n\n
┌─\ZbSoftware Configuration\ZB────────────────────┐\n
${sw_kernel_str}\n
${sw_l4t_str}\n
${sw_jetpack_str}\n
${sw_cuda_str}\n
└───────────────────────────────────────────┘\n\n
┌─\ZbMemory\ZB────────────┐ ┌─\ZbDisk Storage\ZB───────┐\n
${mem_total_str} ${disk_total_str}\n
${mem_used_str} ${disk_used_str}\n
${mem_free_str} ${disk_free_str}\n
└───────────────────┘ └────────────────────┘\n"
dialog --backtitle "$APP_TITLE" \
--title "Jetson Board Information" \
--colors \
--msgbox "$info_str" 22 85
}
# initial config
{
#
# run sections that we want logged in a subshell
#
echo "$LOG_ZOO `date`"
echo "$LOG_ZOO Logging to: $LOG_FILE"
#
# retrieve jetson info and verify architecture
#
read_jetson_info
if [ $? != 0 ]; then
exit $?
fi
#
# check for dialog package
#
install_deb_package "dialog" FOUND_DIALOG
echo "$LOG_ZOO FOUND_DIALOG=$FOUND_DIALOG"
#
# check for updates
#
check_updates
version_updated=$?
} > >(tee -i $LOG_FILE) 2>&1 # clear the log on first subshell (tee without -a)
# use customized RC config
export DIALOGRC=./jetson-zoo.rc
# if an update occured, exit this instance of the script
echo "TEST TEST"
echo "$LOG_ZOO version updated: $version_updated"
if [ $version_updated != 0 ]; then
dialog --backtitle "$APP_TITLE" --title "Update Notification" --yesno "\nAn updated version of this script is available.\n\nWould you like for it to be downloaded now?" 10 55
update_status=$?
if [ $update_status == 0 ]; then
echo "$LOG_ZOO applying update ($NEW_PATH -> $CUR_PATH)..."
#cp $NEW_PATH $CUR_PATH
#$CUR_PATH $@
exit 0
fi
#echo "$LOG_ZOO finished updating, restarting script"
fi
#
# main menu
#
while true; do
menu_selected=$(dialog --backtitle "$APP_TITLE" \
--title "Main Menu" \
--cancel-label "Quit" \
--menu "Keys:\n ↑↓ Navigate Menu\n Enter to Continue" 20 80 7 \
--output-fd 1 \
1 "Install Add-On Packages" \
2 "Uninstall Add-On Packages" \
3 "View Installed Add-Ons" \
4 "View Board Information" \
5 "Check for Updates" )
menu_status=$?
clear
{
echo "$LOG_ZOO Menu status: $menu_status"
# non-zero exit code means the user quit
if [ $menu_status != 0 ]; then
echo "$LOG_ZOO Press Enter key to exit"
exit 0
fi
echo "$LOG_ZOO Menu selected: $menu_selected"
} > >(tee -a -i $LOG_FILE) 2>&1 # 'tee -a' (append to log on further subshells)
# execute the selected menu option
case $menu_selected in
1)
echo "$LOG_ZOO Install Add-On Packages"
install_packages ;;
2)
echo "$LOG_ZOO Uninstall Add-On Packages" ;;
3)
echo "$LOG_ZOO View Installed Add-Ons" ;;
4)
echo "$LOG_ZOO View Board Information"
jetson_info ;;
5)
echo "$LOG_ZOO Check for Updates" ;;
*)
echo "$LOG_ZOO Unknown Menu Option" ;;
esac
done
|
########################
# Intall NodeJS
#
# user:root
# path:/opt
#
########################
#!/bin/sh
cd /opt
# check node install
if which node 2>/dev/null; then
echo "node exists!"
else
# install node dependency
yum install -y gcc gcc-c++ make
# Download node source tar gz
wget https://nodejs.org/dist/v6.11.0/node-v6.11.0.tar.gz
# unzip
tar -zxvf node-v6.11.0.tar.gz
# change folder
cd node-v6.11.0
# set configure
./configure
# make
make
# make install
make install
# copy command
cp /usr/local/bin/node /usr/sbin/
# test node command
node --version
fi
|
sudo update-rc.d throne defaults
sudo update-rc.d throne enable
|
from geist.backends.windows import get_all_windows
coldharbour_window_finder = LocationFinderFilter(
lambda loc: loc.window.title.startswith(u'HOMECARE Domiciliary Care'),
WindowFinder()
)
class _WindowLocation(Location):
"""
Adds window to Location object
"""
def __init__(self, window, parent):
self.window = window
Location.__init__(self, 1, 1, 10, 10, parent=parent)
class WindowFinder(BaseFinder):
"""
Finds all windows in the given location
"""
def find(self, in_location):
for window in get_all_windows():
try:
yield _WindowLocation(window, in_location)
except ValueError:
pass
class WindowTitleFinder(BaseFinder):
"""
Creates a Finder which finds Windows with a particular title
"""
def __init__(self, title):
self.finder = LocationFinderFilter(
lambda loc: (title in loc.window.title),
WindowFinder()
)
class WindowTitleClassnameFinder(BaseFinder):
"""
Creates a Finder which finds Windows with a particular title
"""
def __init__(self, title, classname):
self.finder = LocationFinderFilter(
lambda loc: (title in loc.window.title
and classname == loc.window.classname),
WindowFinder()
)
|
import React, {useState} from 'react';
const Calculator = () => {
const [num1, setNum1] = useState(0);
const [num2, setNum2] = useState(0);
const [operation, setOperation] = useState('add');
const [result, setResult] = useState(0);
const handleOperation = (e) => {
setOperation(e.target.value);
};
const calculate = () => {
let res = 0;
switch (operation) {
case 'add':
res = num1 + num2;
break;
case 'subtract':
res = num1 - num2;
break;
case 'multiply':
res = num1 * num2;
break;
case 'divide':
res = num1 / num2;
break;
default:
break;
}
setResult(res);
};
return (
<div>
<input
type='number'
value={num1}
onChange={(e) => setNum1(e.target.value)}
/>
<input
type='number'
value={num2}
onChange={(e) => setNum2(e.target.value)}
/>
<select onChange={handleOperation}>
<option value='add'>+</option>
<option value='subtract'>-</option>
<option value='multiply'>*</option>
<option value='divide'>/</option>
</select>
<button onClick={calculate}>Calculate</button>
<p>Result: {result}</p>
</div>
);
};
export default Calculator;
|
#!/bin/sh
# The MIT License (MIT)
#
# Copyright (c) 2017 Eficode Oy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
VERSION="2.2.1"
set -- "$@" -- "$TIMEOUT" "$QUIET" "$PROTOCOL" "$HOST" "$PORT" "$result"
TIMEOUT=15
QUIET=0
# The protocol to make the request with, either "tcp" or "http"
PROTOCOL="tcp"
echoerr() {
if [ "$QUIET" -ne 1 ]; then printf "%s\n" "$*" 1>&2; fi
}
usage() {
exitcode="$1"
cat << USAGE >&2
Usage:
$0 host:port|url [-t timeout] [-- command args]
-q | --quiet Do not output any status messages
-t TIMEOUT | --timeout=timeout Timeout in seconds, zero for no timeout
-v | --version Show the version of this tool
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
exit "$exitcode"
}
wait_for() {
case "$PROTOCOL" in
tcp)
if ! command -v nc >/dev/null; then
echoerr 'nc command is missing!'
exit 1
fi
;;
wget)
if ! command -v wget >/dev/null; then
echoerr 'wget command is missing!'
exit 1
fi
;;
esac
TIMEOUT_END=$(($(date +%s) + TIMEOUT))
while :; do
case "$PROTOCOL" in
tcp)
nc -w 1 -z "$HOST" "$PORT" > /dev/null 2>&1
;;
http)
wget --timeout=1 -q "$HOST" -O /dev/null > /dev/null 2>&1
;;
*)
echoerr "Unknown protocol '$PROTOCOL'"
exit 1
;;
esac
result=$?
if [ $result -eq 0 ] ; then
if [ $# -gt 7 ] ; then
for result in $(seq $(($# - 7))); do
result=$1
shift
set -- "$@" "$result"
done
TIMEOUT=$2 QUIET=$3 PROTOCOL=$4 HOST=$5 PORT=$6 result=$7
shift 7
exec "$@"
fi
exit 0
fi
if [ $(date +%s) -ge $TIMEOUT_END ]; then
echo "Operation timed out" >&2
exit 1
fi
sleep 1
done
}
while :; do
case "$1" in
http://*|https://*)
HOST="$1"
PROTOCOL="http"
shift 1
;;
*:* )
HOST=$(printf "%s\n" "$1"| cut -d : -f 1)
PORT=$(printf "%s\n" "$1"| cut -d : -f 2)
shift 1
;;
-v | --version)
echo $VERSION
exit
;;
-q | --quiet)
QUIET=1
shift 1
;;
-q-*)
QUIET=0
echoerr "Unknown option: $1"
usage 1
;;
-q*)
QUIET=1
result=$1
shift 1
set -- -"${result#-q}" "$@"
;;
-t | --timeout)
TIMEOUT="$2"
shift 2
;;
-t*)
TIMEOUT="${1#-t}"
shift 1
;;
--timeout=*)
TIMEOUT="${1#*=}"
shift 1
;;
--)
shift
break
;;
--help)
usage 0
;;
-*)
QUIET=0
echoerr "Unknown option: $1"
usage 1
;;
*)
QUIET=0
echoerr "Unknown argument: $1"
usage 1
;;
esac
done
if ! [ "$TIMEOUT" -ge 0 ] 2>/dev/null; then
echoerr "Error: invalid timeout '$TIMEOUT'"
usage 3
fi
case "$PROTOCOL" in
tcp)
if [ "$HOST" = "" ] || [ "$PORT" = "" ]; then
echoerr "Error: you need to provide a host and port to test."
usage 2
fi
;;
http)
if [ "$HOST" = "" ]; then
echoerr "Error: you need to provide a host to test."
usage 2
fi
;;
esac
wait_for "$@"
|
#!/bin/bash
#SBATCH --job-name=W4F_TEST_US_INIT_W4AO
#SBATCH --cluster=invest
#SBATCH --partition=lchong
#SBATCH --nodes=1
#SBATCH --ntasks-per-node=4
#SBATCH --mem=16g
#SBATCH --time=24:00:00
#SBATCH --mail-user=dty7@pitt.edu
#SBATCH --mail-type=END,FAIL
#SBATCH --output=slurm_init.out
#SBATCH --error=slurm_init.err
# TRP
PHI_IAT="5,7,9,29"
PSI_IAT="7,9,29,31"
PHI=5
PSI=5
source ~/.setup.sh
source ../../scripts/gen_window_fun_mpi.sh
mkdir CONFS
mkdir -p CONFS/${PHI}_${PSI}
cd CONFS/${PHI}_${PSI}
# RES PHI PHIIAT PSI PSIIAT NCPU
gen_window W4F $PHI $PHI_IAT $PSI $PSI_IAT 4
|
#include <math.h>
#include <assert.h>
#include "bam.h"
#include "bam_maqcns.h"
#include "ksort.h"
#include "kaln.h"
KSORT_INIT_GENERIC(uint32_t)
#define INDEL_WINDOW_SIZE 50
#define INDEL_EXT_DEP 0.9
typedef struct __bmc_aux_t {
int max;
uint32_t *info;
} bmc_aux_t;
typedef struct {
float esum[4], fsum[4];
uint32_t c[4];
uint32_t rms_mapQ;
} glf_call_aux_t;
char bam_nt16_nt4_table[] = { 4, 0, 1, 4, 2, 4, 4, 4, 3, 4, 4, 4, 4, 4, 4, 4 };
/*
P(<b1,b2>) = \theta \sum_{i=1}^{N-1} 1/i
P(D|<b1,b2>) = \sum_{k=1}^{N-1} p_k 1/2 [(k/N)^n_2(1-k/N)^n_1 + (k/N)^n1(1-k/N)^n_2]
p_k = 1/k / \sum_{i=1}^{N-1} 1/i
*/
static void cal_het(bam_maqcns_t *aa)
{
int k, n1, n2;
double sum_harmo; // harmonic sum
double poly_rate;
free(aa->lhet);
aa->lhet = (double*)calloc(256 * 256, sizeof(double));
sum_harmo = 0.0;
for (k = 1; k <= aa->n_hap - 1; ++k)
sum_harmo += 1.0 / k;
for (n1 = 0; n1 < 256; ++n1) {
for (n2 = 0; n2 < 256; ++n2) {
long double sum = 0.0;
double lC = aa->is_soap? 0 : lgamma(n1+n2+1) - lgamma(n1+1) - lgamma(n2+1); // \binom{n1+n2}{n1}
for (k = 1; k <= aa->n_hap - 1; ++k) {
double pk = 1.0 / k / sum_harmo;
double log1 = log((double)k/aa->n_hap);
double log2 = log(1.0 - (double)k/aa->n_hap);
sum += pk * 0.5 * (expl(log1*n2) * expl(log2*n1) + expl(log1*n1) * expl(log2*n2));
}
aa->lhet[n1<<8|n2] = lC + logl(sum);
}
}
poly_rate = aa->het_rate * sum_harmo;
aa->q_r = -4.343 * log(2.0 * poly_rate / (1.0 - poly_rate));
}
/** initialize the helper structure */
static void cal_coef(bam_maqcns_t *aa)
{
int k, n, q;
long double sum_a[257], b[256], q_c[256], tmp[256], fk2[256];
double *lC;
// aa->lhet will be allocated and initialized
free(aa->fk); free(aa->coef);
aa->coef = 0;
aa->fk = (double*)calloc(256, sizeof(double));
aa->fk[0] = fk2[0] = 1.0;
for (n = 1; n != 256; ++n) {
aa->fk[n] = pow(aa->theta, n) * (1.0 - aa->eta) + aa->eta;
fk2[n] = aa->fk[n>>1]; // this is an approximation, assuming reads equally likely come from both strands
}
if (aa->is_soap) return;
aa->coef = (double*)calloc(256*256*64, sizeof(double));
lC = (double*)calloc(256 * 256, sizeof(double));
for (n = 1; n != 256; ++n)
for (k = 1; k <= n; ++k)
lC[n<<8|k] = lgamma(n+1) - lgamma(k+1) - lgamma(n-k+1);
for (q = 1; q != 64; ++q) {
double e = pow(10.0, -q/10.0);
double le = log(e);
double le1 = log(1.0-e);
for (n = 1; n != 256; ++n) {
double *coef = aa->coef + (q<<16|n<<8);
sum_a[n+1] = 0.0;
for (k = n; k >= 0; --k) { // a_k = \sum_{i=k}^n C^n_k \epsilon^k (1-\epsilon)^{n-k}
sum_a[k] = sum_a[k+1] + expl(lC[n<<8|k] + k*le + (n-k)*le1);
b[k] = sum_a[k+1] / sum_a[k];
if (b[k] > 0.99) b[k] = 0.99;
}
for (k = 0; k != n; ++k) // log(\bar\beta_{nk}(\bar\epsilon)^{f_k})
q_c[k] = -4.343 * fk2[k] * logl(b[k] / e);
for (k = 1; k != n; ++k) q_c[k] += q_c[k-1]; // \prod_{i=0}^k c_i
for (k = 0; k <= n; ++k) { // powl() in 64-bit mode seems broken on my Mac OS X 10.4.9
tmp[k] = -4.343 * logl(1.0 - expl(fk2[k] * logl(b[k])));
coef[k] = (k? q_c[k-1] : 0) + tmp[k]; // this is the final c_{nk}
}
}
}
free(lC);
}
bam_maqcns_t *bam_maqcns_init()
{
bam_maqcns_t *bm;
bm = (bam_maqcns_t*)calloc(1, sizeof(bam_maqcns_t));
bm->aux = (bmc_aux_t*)calloc(1, sizeof(bmc_aux_t));
bm->het_rate = 0.001;
bm->theta = 0.85;
bm->n_hap = 2;
bm->eta = 0.03;
bm->cap_mapQ = 60;
return bm;
}
void bam_maqcns_prepare(bam_maqcns_t *bm)
{
cal_coef(bm); cal_het(bm);
}
void bam_maqcns_destroy(bam_maqcns_t *bm)
{
if (bm == 0) return;
free(bm->lhet); free(bm->fk); free(bm->coef); free(bm->aux->info);
free(bm->aux); free(bm);
}
glf1_t *bam_maqcns_glfgen(int _n, const bam_pileup1_t *pl, uint8_t ref_base, bam_maqcns_t *bm)
{
glf_call_aux_t *b;
int i, j, k, w[8], c, n;
glf1_t *g = (glf1_t*)calloc(1, sizeof(glf1_t));
float p[16], min_p = 1e30;
uint64_t rms;
g->ref_base = ref_base;
if (_n == 0) return g;
// construct aux array
if (bm->aux->max < _n) {
bm->aux->max = _n;
kroundup32(bm->aux->max);
bm->aux->info = (uint32_t*)realloc(bm->aux->info, 4 * bm->aux->max);
}
for (i = n = 0; i < _n; ++i) {
const bam_pileup1_t *p = pl + i;
uint32_t q, x = 0, qq;
if (p->is_del || (p->b->core.flag&BAM_FUNMAP)) continue;
q = (uint32_t)bam1_qual(p->b)[p->qpos];
x |= (uint32_t)bam1_strand(p->b) << 18 | q << 8 | p->b->core.qual;
if (p->b->core.qual < q) q = p->b->core.qual;
x |= q << 24;
qq = bam1_seqi(bam1_seq(p->b), p->qpos);
q = bam_nt16_nt4_table[qq? qq : ref_base];
if (!p->is_del && q < 4) x |= 1 << 21 | q << 16;
bm->aux->info[n++] = x;
}
ks_introsort(uint32_t, n, bm->aux->info);
// generate esum and fsum
b = (glf_call_aux_t*)calloc(1, sizeof(glf_call_aux_t));
for (k = 0; k != 8; ++k) w[k] = 0;
rms = 0;
for (j = n - 1; j >= 0; --j) { // calculate esum and fsum
uint32_t info = bm->aux->info[j];
int tmp;
if (info>>24 < 4 && (info>>8&0x3f) != 0) info = 4<<24 | (info&0xffffff);
k = info>>16&7;
if (info>>24 > 0) {
b->esum[k&3] += bm->fk[w[k]] * (info>>24);
b->fsum[k&3] += bm->fk[w[k]];
if (w[k] < 0xff) ++w[k];
++b->c[k&3];
}
tmp = (int)(info&0xff) < bm->cap_mapQ? (int)(info&0xff) : bm->cap_mapQ;
rms += tmp * tmp;
}
b->rms_mapQ = (uint8_t)(sqrt((double)rms / n) + .499);
// rescale ->c[]
for (j = c = 0; j != 4; ++j) c += b->c[j];
if (c > 255) {
for (j = 0; j != 4; ++j) b->c[j] = (int)(254.0 * b->c[j] / c + 0.5);
for (j = c = 0; j != 4; ++j) c += b->c[j];
}
if (!bm->is_soap) {
// generate likelihood
for (j = 0; j != 4; ++j) {
// homozygous
float tmp1, tmp3;
int tmp2, bar_e;
for (k = 0, tmp1 = tmp3 = 0.0, tmp2 = 0; k != 4; ++k) {
if (j == k) continue;
tmp1 += b->esum[k]; tmp2 += b->c[k]; tmp3 += b->fsum[k];
}
if (tmp2) {
bar_e = (int)(tmp1 / tmp3 + 0.5);
if (bar_e < 4) bar_e = 4; // should not happen
if (bar_e > 63) bar_e = 63;
p[j<<2|j] = tmp1 + bm->coef[bar_e<<16|c<<8|tmp2];
} else p[j<<2|j] = 0.0; // all the bases are j
// heterozygous
for (k = j + 1; k < 4; ++k) {
for (i = 0, tmp2 = 0, tmp1 = tmp3 = 0.0; i != 4; ++i) {
if (i == j || i == k) continue;
tmp1 += b->esum[i]; tmp2 += b->c[i]; tmp3 += b->fsum[i];
}
if (tmp2) {
bar_e = (int)(tmp1 / tmp3 + 0.5);
if (bar_e < 4) bar_e = 4;
if (bar_e > 63) bar_e = 63;
p[j<<2|k] = p[k<<2|j] = -4.343 * bm->lhet[b->c[j]<<8|b->c[k]] + tmp1 + bm->coef[bar_e<<16|c<<8|tmp2];
} else p[j<<2|k] = p[k<<2|j] = -4.343 * bm->lhet[b->c[j]<<8|b->c[k]]; // all the bases are either j or k
}
//
for (k = 0; k != 4; ++k)
if (p[j<<2|k] < 0.0) p[j<<2|k] = 0.0;
}
{ // fix p[k<<2|k]
float max1, max2, min1, min2;
int max_k, min_k;
max_k = min_k = -1;
max1 = max2 = -1.0; min1 = min2 = 1e30;
for (k = 0; k < 4; ++k) {
if (b->esum[k] > max1) {
max2 = max1; max1 = b->esum[k]; max_k = k;
} else if (b->esum[k] > max2) max2 = b->esum[k];
}
for (k = 0; k < 4; ++k) {
if (p[k<<2|k] < min1) {
min2 = min1; min1 = p[k<<2|k]; min_k = k;
} else if (p[k<<2|k] < min2) min2 = p[k<<2|k];
}
if (max1 > max2 && (min_k != max_k || min1 + 1.0 > min2))
p[max_k<<2|max_k] = min1 > 1.0? min1 - 1.0 : 0.0;
}
} else { // apply the SOAP model
// generate likelihood
for (j = 0; j != 4; ++j) {
float tmp;
// homozygous
for (k = 0, tmp = 0.0; k != 4; ++k)
if (j != k) tmp += b->esum[k];
p[j<<2|j] = tmp;
// heterozygous
for (k = j + 1; k < 4; ++k) {
for (i = 0, tmp = 0.0; i != 4; ++i)
if (i != j && i != k) tmp += b->esum[i];
p[j<<2|k] = p[k<<2|j] = -4.343 * bm->lhet[b->c[j]<<8|b->c[k]] + tmp;
}
}
}
// convert necessary information to glf1_t
g->ref_base = ref_base; g->max_mapQ = b->rms_mapQ;
g->depth = n > 16777215? 16777215 : n;
for (j = 0; j != 4; ++j)
for (k = j; k < 4; ++k)
if (p[j<<2|k] < min_p) min_p = p[j<<2|k];
g->min_lk = min_p > 255.0? 255 : (int)(min_p + 0.5);
for (j = c = 0; j != 4; ++j)
for (k = j; k < 4; ++k)
g->lk[c++] = p[j<<2|k]-min_p > 255.0? 255 : (int)(p[j<<2|k]-min_p + 0.5);
free(b);
return g;
}
uint32_t glf2cns(const glf1_t *g, int q_r)
{
int i, j, k, tmp[16], min = 10000, min2 = 10000, min3 = 10000, min_g = -1, min_g2 = -1;
uint32_t x = 0;
for (i = k = 0; i < 4; ++i)
for (j = i; j < 4; ++j) {
tmp[j<<2|i] = -1;
tmp[i<<2|j] = g->lk[k++] + (i == j? 0 : q_r);
}
for (i = 0; i < 16; ++i) {
if (tmp[i] < 0) continue;
if (tmp[i] < min) {
min3 = min2; min2 = min; min = tmp[i]; min_g2 = min_g; min_g = i;
} else if (tmp[i] < min2) {
min3 = min2; min2 = tmp[i]; min_g2 = i;
} else if (tmp[i] < min3) min3 = tmp[i];
}
x = min_g >= 0? (1U<<(min_g>>2&3) | 1U<<(min_g&3)) << 28 : 0xf << 28;
x |= min_g2 >= 0? (1U<<(min_g2>>2&3) | 1U<<(min_g2&3)) << 24 : 0xf << 24;
x |= (uint32_t)g->max_mapQ << 16;
x |= min2 < 10000? (min2 - min < 256? min2 - min : 255) << 8 : 0xff << 8;
x |= min2 < 10000 && min3 < 10000? (min3 - min2 < 256? min3 - min2 : 255) : 0xff;
return x;
}
uint32_t bam_maqcns_call(int n, const bam_pileup1_t *pl, bam_maqcns_t *bm)
{
glf1_t *g;
uint32_t x;
if (n) {
g = bam_maqcns_glfgen(n, pl, 0xf, bm);
x = glf2cns(g, (int)(bm->q_r + 0.5));
free(g);
} else x = 0xfU<<28 | 0xfU<<24;
return x;
}
/************** *****************/
bam_maqindel_opt_t *bam_maqindel_opt_init()
{
bam_maqindel_opt_t *mi = (bam_maqindel_opt_t*)calloc(1, sizeof(bam_maqindel_opt_t));
mi->q_indel = 40;
mi->r_indel = 0.00015;
mi->r_snp = 0.001;
//
mi->mm_penalty = 3;
mi->indel_err = 4;
mi->ambi_thres = 10;
return mi;
}
void bam_maqindel_ret_destroy(bam_maqindel_ret_t *mir)
{
if (mir == 0) return;
free(mir->s[0]); free(mir->s[1]); free(mir);
}
int bam_tpos2qpos(const bam1_core_t *c, const uint32_t *cigar, int32_t tpos, int is_left, int32_t *_tpos)
{
int k, x = c->pos, y = 0, last_y = 0;
*_tpos = c->pos;
for (k = 0; k < c->n_cigar; ++k) {
int op = cigar[k] & BAM_CIGAR_MASK;
int l = cigar[k] >> BAM_CIGAR_SHIFT;
if (op == BAM_CMATCH) {
if (c->pos > tpos) return y;
if (x + l > tpos) {
*_tpos = tpos;
return y + (tpos - x);
}
x += l; y += l;
last_y = y;
} else if (op == BAM_CINS || op == BAM_CSOFT_CLIP) y += l;
else if (op == BAM_CDEL || op == BAM_CREF_SKIP) {
if (x + l > tpos) {
*_tpos = is_left? x : x + l;
return y;
}
x += l;
}
}
*_tpos = x;
return last_y;
}
#define MINUS_CONST 0x10000000
bam_maqindel_ret_t *bam_maqindel(int n, int pos, const bam_maqindel_opt_t *mi, const bam_pileup1_t *pl, const char *ref,
int _n_types, int *_types)
{
int i, j, n_types, *types, left, right, max_rd_len = 0;
bam_maqindel_ret_t *ret = 0;
// if there is no proposed indel, check if there is an indel from the alignment
if (_n_types == 0) {
for (i = 0; i < n; ++i) {
const bam_pileup1_t *p = pl + i;
if (!(p->b->core.flag&BAM_FUNMAP) && p->indel != 0) break;
}
if (i == n) return 0; // no indel
}
{ // calculate how many types of indels are available (set n_types and types)
int m;
uint32_t *aux;
aux = (uint32_t*)calloc(n + _n_types + 1, 4);
m = 0;
aux[m++] = MINUS_CONST; // zero indel is always a type
for (i = 0; i < n; ++i) {
const bam_pileup1_t *p = pl + i;
if (!(p->b->core.flag&BAM_FUNMAP) && p->indel != 0)
aux[m++] = MINUS_CONST + p->indel;
j = bam_cigar2qlen(&p->b->core, bam1_cigar(p->b));
if (j > max_rd_len) max_rd_len = j;
}
if (_n_types) // then also add this to aux[]
for (i = 0; i < _n_types; ++i)
if (_types[i]) aux[m++] = MINUS_CONST + _types[i];
ks_introsort(uint32_t, m, aux);
// squeeze out identical types
for (i = 1, n_types = 1; i < m; ++i)
if (aux[i] != aux[i-1]) ++n_types;
types = (int*)calloc(n_types, sizeof(int));
j = 0;
types[j++] = aux[0] - MINUS_CONST;
for (i = 1; i < m; ++i) {
if (aux[i] != aux[i-1])
types[j++] = aux[i] - MINUS_CONST;
}
free(aux);
}
{ // calculate left and right boundary
left = pos > INDEL_WINDOW_SIZE? pos - INDEL_WINDOW_SIZE : 0;
right = pos + INDEL_WINDOW_SIZE;
if (types[0] < 0) right -= types[0];
// in case the alignments stand out the reference
for (i = pos; i < right; ++i)
if (ref[i] == 0) break;
right = i;
}
{ // the core part
char *ref2, *rs, *inscns = 0;
int qr_snp, k, l, *score, *pscore, max_ins = types[n_types-1];
qr_snp = (int)(-4.343 * log(mi->r_snp) + .499);
if (max_ins > 0) { // get the consensus of inserted sequences
int *inscns_aux = (int*)calloc(4 * n_types * max_ins, sizeof(int));
// count occurrences
for (i = 0; i < n_types; ++i) {
if (types[i] <= 0) continue; // not insertion
for (j = 0; j < n; ++j) {
const bam_pileup1_t *p = pl + j;
if (!(p->b->core.flag&BAM_FUNMAP) && p->indel == types[i]) {
for (k = 1; k <= p->indel; ++k) {
int c = bam_nt16_nt4_table[bam1_seqi(bam1_seq(p->b), p->qpos + k)];
if (c < 4) ++inscns_aux[i*max_ins*4 + (k-1)*4 + c];
}
}
}
}
// construct the consensus of inserted sequence
inscns = (char*)calloc(n_types * max_ins, sizeof(char));
for (i = 0; i < n_types; ++i) {
for (j = 0; j < types[i]; ++j) {
int max = 0, max_k = -1, *ia = inscns_aux + i*max_ins*4 + j*4;
for (k = 0; k < 4; ++k) {
if (ia[k] > max) {
max = ia[k];
max_k = k;
}
}
inscns[i*max_ins + j] = max? 1<<max_k : 15;
}
}
free(inscns_aux);
}
// calculate score
ref2 = (char*)calloc(right - left + types[n_types-1] + 2, 1);
rs = (char*)calloc(right - left + max_rd_len + types[n_types-1] + 2, 1);
score = (int*)calloc(n_types * n, sizeof(int));
pscore = (int*)calloc(n_types * n, sizeof(int));
for (i = 0; i < n_types; ++i) {
ka_param_t ap = ka_param_blast;
ap.band_width = 2 * types[n_types - 1] + 2;
ap.gap_end = 0;
// write ref2
for (k = 0, j = left; j <= pos; ++j)
ref2[k++] = bam_nt16_nt4_table[bam_nt16_table[(int)ref[j]]];
if (types[i] <= 0) j += -types[i];
else for (l = 0; l < types[i]; ++l)
ref2[k++] = bam_nt16_nt4_table[(int)inscns[i*max_ins + l]];
if (types[0] < 0) { // mask deleted sequences
int jj, tmp = types[i] >= 0? -types[0] : -types[0] + types[i];
for (jj = 0; jj < tmp && j < right && ref[j]; ++jj, ++j)
ref2[k++] = 4;
}
for (; j < right && ref[j]; ++j)
ref2[k++] = bam_nt16_nt4_table[bam_nt16_table[(int)ref[j]]];
if (j < right) right = j;
// calculate score for each read
for (j = 0; j < n; ++j) {
const bam_pileup1_t *p = pl + j;
int qbeg, qend, tbeg, tend;
if (p->b->core.flag & BAM_FUNMAP) continue;
qbeg = bam_tpos2qpos(&p->b->core, bam1_cigar(p->b), left, 0, &tbeg);
qend = bam_tpos2qpos(&p->b->core, bam1_cigar(p->b), right, 1, &tend);
assert(tbeg >= left);
for (l = qbeg; l < qend; ++l)
rs[l - qbeg] = bam_nt16_nt4_table[bam1_seqi(bam1_seq(p->b), l)];
{
int x, y, n_acigar, ps;
uint32_t *acigar;
ps = 0;
if (tend - tbeg + types[i] <= 0) {
score[i*n+j] = -(1<<20);
pscore[i*n+j] = 1<<20;
continue;
}
acigar = ka_global_core((uint8_t*)ref2 + tbeg - left, tend - tbeg + types[i], (uint8_t*)rs, qend - qbeg, &ap, &score[i*n+j], &n_acigar);
x = tbeg - left; y = 0;
for (l = 0; l < n_acigar; ++l) {
int op = acigar[l]&0xf;
int len = acigar[l]>>4;
if (op == BAM_CMATCH) {
int k;
for (k = 0; k < len; ++k)
if (ref2[x+k] != rs[y+k] && ref2[x+k] < 4)
ps += bam1_qual(p->b)[y+k] < qr_snp? bam1_qual(p->b)[y+k] : qr_snp;
x += len; y += len;
} else if (op == BAM_CINS || op == BAM_CSOFT_CLIP) {
if (op == BAM_CINS && l > 0 && l < n_acigar - 1) ps += mi->q_indel * len;
y += len;
} else if (op == BAM_CDEL) {
if (l > 0 && l < n_acigar - 1) ps += mi->q_indel * len;
x += len;
}
}
pscore[i*n+j] = ps;
/*if (1) { // for debugging only
fprintf(stderr, "id=%d, pos=%d, type=%d, j=%d, score=%d, psore=%d, %d, %d, %d, %d, %d, ",
j, pos+1, types[i], j, score[i*n+j], pscore[i*n+j], tbeg, tend, qbeg, qend, mi->q_indel);
for (l = 0; l < n_acigar; ++l) fprintf(stderr, "%d%c", acigar[l]>>4, "MIDS"[acigar[l]&0xf]);
fprintf(stderr, "\n");
for (l = 0; l < tend - tbeg + types[i]; ++l) fputc("ACGTN"[ref2[l+tbeg-left]], stderr);
fputc('\n', stderr);
for (l = 0; l < qend - qbeg; ++l) fputc("ACGTN"[rs[l]], stderr);
fputc('\n', stderr);
}*/
free(acigar);
}
}
}
{ // get final result
int *sum, max1, max2, max1_i, max2_i;
// pick up the best two score
sum = (int*)calloc(n_types, sizeof(int));
for (i = 0; i < n_types; ++i)
for (j = 0; j < n; ++j)
sum[i] += -pscore[i*n+j];
max1 = max2 = -0x7fffffff; max1_i = max2_i = -1;
for (i = 0; i < n_types; ++i) {
if (sum[i] > max1) {
max2 = max1; max2_i = max1_i; max1 = sum[i]; max1_i = i;
} else if (sum[i] > max2) {
max2 = sum[i]; max2_i = i;
}
}
free(sum);
// write ret
ret = (bam_maqindel_ret_t*)calloc(1, sizeof(bam_maqindel_ret_t));
ret->indel1 = types[max1_i]; ret->indel2 = types[max2_i];
ret->s[0] = (char*)calloc(abs(ret->indel1) + 2, 1);
ret->s[1] = (char*)calloc(abs(ret->indel2) + 2, 1);
// write indel sequence
if (ret->indel1 > 0) {
ret->s[0][0] = '+';
for (k = 0; k < ret->indel1; ++k)
ret->s[0][k+1] = bam_nt16_rev_table[(int)inscns[max1_i*max_ins + k]];
} else if (ret->indel1 < 0) {
ret->s[0][0] = '-';
for (k = 0; k < -ret->indel1 && ref[pos + k + 1]; ++k)
ret->s[0][k+1] = ref[pos + k + 1];
} else ret->s[0][0] = '*';
if (ret->indel2 > 0) {
ret->s[1][0] = '+';
for (k = 0; k < ret->indel2; ++k)
ret->s[1][k+1] = bam_nt16_rev_table[(int)inscns[max2_i*max_ins + k]];
} else if (ret->indel2 < 0) {
ret->s[1][0] = '-';
for (k = 0; k < -ret->indel2 && ref[pos + k + 1]; ++k)
ret->s[1][k+1] = ref[pos + k + 1];
} else ret->s[1][0] = '*';
// write count
for (i = 0; i < n; ++i) {
const bam_pileup1_t *p = pl + i;
if (p->indel == ret->indel1) ++ret->cnt1;
else if (p->indel == ret->indel2) ++ret->cnt2;
else ++ret->cnt_anti;
}
{ // write gl[]
int tmp, seq_err = 0;
double x = 1.0;
tmp = max1_i - max2_i;
if (tmp < 0) tmp = -tmp;
for (j = 0; j < tmp + 1; ++j) x *= INDEL_EXT_DEP;
seq_err = mi->q_indel * (1.0 - x) / (1.0 - INDEL_EXT_DEP);
ret->gl[0] = ret->gl[1] = 0;
for (j = 0; j < n; ++j) {
int s1 = pscore[max1_i*n + j], s2 = pscore[max2_i*n + j];
//fprintf(stderr, "id=%d, %d, %d, %d, %d, %d\n", j, pl[j].b->core.pos+1, types[max1_i], types[max2_i], s1, s2);
if (s1 > s2) ret->gl[0] += s1 - s2 < seq_err? s1 - s2 : seq_err;
else ret->gl[1] += s2 - s1 < seq_err? s2 - s1 : seq_err;
}
}
// write cnt_ref and cnt_ambi
if (max1_i != 0 && max2_i != 0) {
for (j = 0; j < n; ++j) {
int diff1 = score[j] - score[max1_i * n + j];
int diff2 = score[j] - score[max2_i * n + j];
if (diff1 > 0 && diff2 > 0) ++ret->cnt_ref;
else if (diff1 == 0 || diff2 == 0) ++ret->cnt_ambi;
}
}
}
free(score); free(pscore); free(ref2); free(rs); free(inscns);
}
{ // call genotype
int q[3], qr_indel = (int)(-4.343 * log(mi->r_indel) + 0.5);
int min1, min2, min1_i;
q[0] = ret->gl[0] + (ret->s[0][0] != '*'? 0 : 0) * qr_indel;
q[1] = ret->gl[1] + (ret->s[1][0] != '*'? 0 : 0) * qr_indel;
q[2] = n * 3 + (ret->s[0][0] == '*' || ret->s[1][0] == '*'? 1 : 1) * qr_indel;
min1 = min2 = 0x7fffffff; min1_i = -1;
for (i = 0; i < 3; ++i) {
if (q[i] < min1) {
min2 = min1; min1 = q[i]; min1_i = i;
} else if (q[i] < min2) min2 = q[i];
}
ret->gt = min1_i;
ret->q_cns = min2 - min1;
// set q_ref
if (ret->gt < 2) ret->q_ref = (ret->s[ret->gt][0] == '*')? 0 : q[1-ret->gt] - q[ret->gt] - qr_indel - 3;
else ret->q_ref = (ret->s[0][0] == '*')? q[0] - q[2] : q[1] - q[2];
if (ret->q_ref < 0) ret->q_ref = 0;
}
free(types);
return ret;
}
|
/*
* Range.java
*
* Copyright (c) 2015 <NAME>
* Released under the MIT license.
* https://github.com/npedotnet/NPESDK_GWT/blob/master/LICENSE
*
* English document
* https://github.com/npedotnet/NPESDK_GWT/blob/master/README.md
*
* Japanese document
* http://3dtech.jp/wiki/index.php?NPESDK_GWT
*
*/
package net.npe.gwt.user.client.ui;
import net.npe.gwt.event.dom.client.HasInputHandlers;
import net.npe.gwt.event.dom.client.InputEvent;
import net.npe.gwt.event.dom.client.InputHandler;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.dom.client.ChangeEvent;
import com.google.gwt.event.dom.client.ChangeHandler;
import com.google.gwt.event.dom.client.HasChangeHandlers;
import com.google.gwt.event.shared.HandlerRegistration;
import com.google.gwt.user.client.ui.Widget;
public class Range extends Widget implements HasChangeHandlers, HasInputHandlers {
public Range(double value, double min, double max, double step) {
super();
Element element = Document.get().createElement("input").<Element>cast();
element.setPropertyString("type", "range");
setElement(element);
setMin(min);
setMax(max);
setStep(step);
setValue(value);
}
public Range() {
this(50, 0, 100, 1);
}
@Override
public HandlerRegistration addChangeHandler(ChangeHandler handler) {
return addDomHandler(handler, ChangeEvent.getType());
}
@Override
public HandlerRegistration addInputHandler(InputHandler handler) {
return addDomHandler(handler, InputEvent.getType());
}
public double getValue() {
return getElement().getPropertyDouble("value");
}
public double getMin() {
return getElement().getPropertyDouble("min");
}
public double getMax() {
return getElement().getPropertyDouble("max");
}
public double getStep() {
return getElement().getPropertyDouble("step");
}
public void setValue(double value) {
getElement().setPropertyDouble("value", value);
}
public void setMin(double min) {
getElement().setPropertyDouble("min", min);
}
public void setMax(double max) {
getElement().setPropertyDouble("max", max);
}
public void setStep(double step) {
getElement().setPropertyDouble("step", step);
}
}
|
//
// Created by ooooo on 2020/1/20.
//
#ifndef CPP_0744__SOLUTION2_H_
#define CPP_0744__SOLUTION2_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* loop
*/
class Solution {
public:
char nextGreatestLetter(vector<char> &letters, char target) {
for (auto c: letters) {
if (c > target) return c;
}
return letters[0];
}
};
#endif //CPP_0744__SOLUTION2_H_
|
<reponame>zzhdhz/chatroom
package com.client.window;
import com.client.ClientMain;
import com.client.listener.ChatListener;
import javax.swing.*;
import java.awt.*;
/**
* Created by form on 2017-07-20.
*/
public class RoomWindow {
public void open(){
WindowController.dispose();
JFrame frame = WindowController.getFrame();
frame.setTitle("Chat Room");
// Setting the width and height of frame
frame.setSize(500, 600);
frame.setLocationRelativeTo(null);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
JPanel panel = WindowController.getPanel();
// 添加面板
frame.add(panel);
// 调用用户定义的方法并添加组件到面板
placeComponents(panel);
ClientMain client = ClientMain.getInstance();
client.run();
// 设置界面可见
frame.setVisible(true);
}
private static void placeComponents(JPanel panel) {
panel.setLayout(null);
JTextArea area = new JTextArea();
area.setName("area");
area.setForeground(Color.BLACK);
area.setBounds(10,10,460,500);
area.setEnabled(false);
panel.add(area);
JLabel label = new JLabel("输入:");
label.setBounds(10,525,30,25);
panel.add(label);
JTextField message = new JTextField(20);
message.setName("message");
message.setBounds(50,525,400,25);
message.addKeyListener(new ChatListener());
panel.add(message);
}
}
|
<reponame>m-wrona/hevicado<filename>fe/gulpfile.js
// IMPORT PLUGINS (just regular node modules)
var gulp = require('gulp'),
jshint = require('gulp-jshint'),
jscs = require('gulp-jscs'),
karma = require('karma').server,
usemin = require('gulp-usemin'),
uglify = require('gulp-uglify'),
minifyCss = require('gulp-minify-css'),
less = require('gulp-less'),
autoprefixer = require('gulp-autoprefixer'),
rev = require('gulp-rev'),
clean = require('gulp-clean'),
browserSync = require('browser-sync'),
reloadBrowser = browserSync.reload,
templateCache = require('gulp-angular-templatecache'),
inject = require('gulp-inject'),
debug = require('gulp-debug');
// LINTS JS CODE
gulp.task('lint', function () {
return gulp
.src(['./app/modules/**/js/*.js', 'test/**/*.js'])
.pipe(jshint('.jshintrc'))
.pipe(jshint.reporter("default"));
});
// JS CHECK STYLE
gulp.task('jscs', function () {
return gulp
.src(['./app/modules/**/js/*.js', 'test/**/*.js'])
.pipe(jscs());
});
// RUN UNIT TESTS
gulp.task('test', function (done) {
karma.start({
configFile: __dirname + '/test/karma.conf.js',
singleRun: true
}, done);
});
// DELETES THE RELEASE DIR
gulp.task('clean', function () {
return gulp
.src('release', {read: false})
.pipe(clean({force: true}));
});
// CREATE AN ANGULAR TEMPLATE CACHE
gulp.task('views', function () {
return gulp
.src('./app/modules/**/*.html')
.pipe(templateCache({
module: 'hevicado',
root: 'modules'
}))
.pipe(gulp.dest('./app/tmp/js'));
});
// COPY INDEPENDENT ASSETS TO RELEASE DIR (own and 3rd party)
gulp.task('copy:assets', function () {
gulp.src('./app/css/*.css')
.pipe(gulp.dest('release/css'));
gulp.src('./app/fonts/*')
.pipe(gulp.dest('release/fonts'));
gulp.src('./app/images/**/*')
.pipe(gulp.dest('release/images'));
gulp.src('./app/lang/**/*')
.pipe(gulp.dest('release/lang'));
gulp.src('./app/*.txt')
.pipe(gulp.dest('release'));
});
// CREATE A RELEASE TO THE RELEASE DIR
gulp.task('release', ['clean', 'views', 'less', 'copy:assets'], function () {
return gulp
.src('./app/index.html')
.pipe(inject(
gulp.src('./app/tmp/js/templates.js', {read: false}),
{
starttag: '<!-- inject:templates:js -->',
ignorePath: './app/tmp/',
relative: true
}
))
.pipe(usemin(
{
css: [minifyCss(), 'concat'],
libJs: [uglify(), rev()],
kunishuJs: [uglify(), rev()]
}
))
.pipe(gulp.dest('release/'));
});
// STARTS A WEB SERVER FOR THE RELEASE BUILD (for testing the release build locally)
gulp.task('serve:release', function () {
browserSync({
server: {
baseDir: './release'
},
port: 8444
});
});
// PRE-PROCESSES LESS FILE AND OUTPUTS CSS
gulp.task('less', function () {
return gulp
.src('./app/css/*.less')
.pipe(less())
// auto-prefixes css so you don't need to use vendor prefixes.
.pipe(autoprefixer({
browsers: ['last 2 versions', 'IE 9']
}))
.pipe(gulp.dest('./app/css'))
// Stream changes to browser if browserSync is on
.pipe(reloadBrowser({stream: true}));
});
// STARTS A DEVELOPMENT WEB SERVER WHICH RELOADS BROWSER ON CHANGES
gulp.task('serve:dev', ['less'], function () {
// TODO: add js linting (lint) once we have less errors
// TODO: add js code style check (jscs) once coding conventions are ok
// TODO: ...
// Watch for Less changes
gulp.watch('./app/**/*.less', ['less']);
// Watch for CSS changes
gulp.watch('./app/css/*.css',
function () {
reloadBrowser();
}
);
//Watch for language files changes
gulp.watch('./app/lang/*.json',
function () {
reloadBrowser();
}
);
// Watch for JS or template changes
gulp.watch([
'./app/**/*.js',
'./app/**/*.html',
'./app/index.html'
], function () {
reloadBrowser();
}
);
browserSync({
server: {
baseDir: './app'
},
port: 8444
});
});
// START THE DEVELOPMENT WEB SERVER TASK BY DEFAULT IF NO TARGET TASK IS GIVEN
gulp.task('default', ['serve:dev']);
|
<filename>build/esm/shaders/glsl/surface.position.normal.js
export default /* glsl */ `uniform vec4 mapSize;
uniform vec4 geometryResolution;
uniform vec4 geometryClip;
attribute vec4 position4;
attribute vec2 surface;
// External
vec3 getPosition(vec4 xyzw, float canonical);
void getSurfaceGeometry(vec4 xyzw, float edgeX, float edgeY, out vec3 left, out vec3 center, out vec3 right, out vec3 up, out vec3 down) {
vec4 deltaX = vec4(1.0, 0.0, 0.0, 0.0);
vec4 deltaY = vec4(0.0, 1.0, 0.0, 0.0);
/*
// high quality, 5 tap
center = getPosition(xyzw, 1.0);
left = (edgeX > -0.5) ? getPosition(xyzw - deltaX, 0.0) : center;
right = (edgeX < 0.5) ? getPosition(xyzw + deltaX, 0.0) : center;
down = (edgeY > -0.5) ? getPosition(xyzw - deltaY, 0.0) : center;
up = (edgeY < 0.5) ? getPosition(xyzw + deltaY, 0.0) : center;
*/
// low quality, 3 tap
center = getPosition(xyzw, 1.0);
left = center;
down = center;
right = (edgeX < 0.5) ? getPosition(xyzw + deltaX, 0.0) : (2.0 * center - getPosition(xyzw - deltaX, 0.0));
up = (edgeY < 0.5) ? getPosition(xyzw + deltaY, 0.0) : (2.0 * center - getPosition(xyzw - deltaY, 0.0));
}
vec3 getSurfaceNormal(vec3 left, vec3 center, vec3 right, vec3 up, vec3 down) {
vec3 dx = right - left;
vec3 dy = up - down;
vec3 n = cross(dy, dx);
if (length(n) > 0.0) {
return normalize(n);
}
return vec3(0.0, 1.0, 0.0);
}
varying vec3 vNormal;
varying vec3 vLight;
varying vec3 vPosition;
vec3 getSurfacePositionNormal() {
vec3 left, center, right, up, down;
vec4 p = min(geometryClip, position4);
getSurfaceGeometry(p, surface.x, surface.y, left, center, right, up, down);
vNormal = getSurfaceNormal(left, center, right, up, down);
vLight = normalize((viewMatrix * vec4(1.0, 2.0, 2.0, 0.0)).xyz); // hardcoded directional light
vPosition = -center;
#ifdef POSITION_UV
#ifdef POSITION_UV_INT
vUV = -.5 + (position4.xy * geometryResolution.xy) * mapSize.xy;
#else
vUV = position4.xy * geometryResolution.xy;
#endif
#endif
return center;
}
`;
|
package com.mc.user.service.impl;
import com.mc.common.model.SysMenu;
import com.mc.common.service.impl.SuperServiceImpl;
import com.mc.user.mapper.SysRoleMenuMapper;
import com.mc.user.model.SysRoleMenu;
import com.mc.user.service.ISysRoleMenuService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
import java.util.Set;
/**
* [SysRoleMenuServiceImpl ]
*
* @author likai
* @version 1.0
* @date 2019/12/11 0011 15:50
* @company Gainet
* @copyright copyright (c) 2019
*/
@Slf4j
@Service
public class SysRoleMenuServiceImpl extends SuperServiceImpl<SysRoleMenuMapper, SysRoleMenu> implements ISysRoleMenuService {
@Resource
private SysRoleMenuMapper sysRoleMenuMapper;
@Override
public int save(Long roleId, Long menuId) {
return sysRoleMenuMapper.save(roleId, menuId);
}
@Override
public int delete(Long roleId, Long menuId) {
return sysRoleMenuMapper.delete(roleId, menuId);
}
@Override
public List<SysMenu> findMenusByRoleIds(Set<Long> roleIds, Integer type) {
return sysRoleMenuMapper.findMenusByRoleIds(roleIds, type);
}
@Override
public List<SysMenu> findMenusByRoleCodes(Set<String> roleCodes, Integer type) {
return sysRoleMenuMapper.findMenusByRoleCodes(roleCodes, type);
}
}
|
module Kernel
def silence_stdout_if(cond, &run)
silence_stream_if(cond, STDOUT, &run)
end
def silence_stderr_if(cond, &run)
silence_stream_if(cond, STDERR, &run)
end
def silence_stream_if(cond, stream, &run)
if cond
silence_stream(stream, &run)
else
run.call
end
end
def silence_stream(stream)
old_stream = stream.dup
stream.reopen(File::NULL)
stream.sync = true
yield
ensure
stream.reopen(old_stream)
old_stream.close
end unless method_defined?(:silence_stream)
end
|
func processResponseError(_ error: ResponseError) -> Foundation.HTTPURLResponse {
switch error {
case .badResponse(_, _, let rsp):
return rsp as! Foundation.HTTPURLResponse
case .stringEncoding(_, _, let rsp):
return rsp as! Foundation.HTTPURLResponse
}
}
|
<reponame>jameseden1/lorawan-stack
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: lorawan-stack/api/applicationserver_packages.proto
package ttnpb
import (
context "context"
fmt "fmt"
_ "github.com/envoyproxy/protoc-gen-validate/validate"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
types "github.com/gogo/protobuf/types"
golang_proto "github.com/golang/protobuf/proto"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = golang_proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
type ApplicationPackage struct {
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
DefaultFPort uint32 `protobuf:"varint,2,opt,name=default_f_port,json=defaultFPort,proto3" json:"default_f_port,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackage) Reset() { *m = ApplicationPackage{} }
func (m *ApplicationPackage) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackage) ProtoMessage() {}
func (*ApplicationPackage) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{0}
}
func (m *ApplicationPackage) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackage.Unmarshal(m, b)
}
func (m *ApplicationPackage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackage.Marshal(b, m, deterministic)
}
func (m *ApplicationPackage) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackage.Merge(m, src)
}
func (m *ApplicationPackage) XXX_Size() int {
return xxx_messageInfo_ApplicationPackage.Size(m)
}
func (m *ApplicationPackage) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackage.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackage proto.InternalMessageInfo
func (m *ApplicationPackage) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *ApplicationPackage) GetDefaultFPort() uint32 {
if m != nil {
return m.DefaultFPort
}
return 0
}
type ApplicationPackages struct {
Packages []*ApplicationPackage `protobuf:"bytes,1,rep,name=packages,proto3" json:"packages,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackages) Reset() { *m = ApplicationPackages{} }
func (m *ApplicationPackages) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackages) ProtoMessage() {}
func (*ApplicationPackages) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{1}
}
func (m *ApplicationPackages) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackages.Unmarshal(m, b)
}
func (m *ApplicationPackages) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackages.Marshal(b, m, deterministic)
}
func (m *ApplicationPackages) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackages.Merge(m, src)
}
func (m *ApplicationPackages) XXX_Size() int {
return xxx_messageInfo_ApplicationPackages.Size(m)
}
func (m *ApplicationPackages) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackages.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackages proto.InternalMessageInfo
func (m *ApplicationPackages) GetPackages() []*ApplicationPackage {
if m != nil {
return m.Packages
}
return nil
}
type ApplicationPackageAssociationIdentifiers struct {
EndDeviceIds *EndDeviceIdentifiers `protobuf:"bytes,1,opt,name=end_device_ids,json=endDeviceIds,proto3" json:"end_device_ids,omitempty"`
FPort uint32 `protobuf:"varint,2,opt,name=f_port,json=fPort,proto3" json:"f_port,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageAssociationIdentifiers) Reset() {
*m = ApplicationPackageAssociationIdentifiers{}
}
func (m *ApplicationPackageAssociationIdentifiers) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackageAssociationIdentifiers) ProtoMessage() {}
func (*ApplicationPackageAssociationIdentifiers) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{2}
}
func (m *ApplicationPackageAssociationIdentifiers) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageAssociationIdentifiers.Unmarshal(m, b)
}
func (m *ApplicationPackageAssociationIdentifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageAssociationIdentifiers.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageAssociationIdentifiers) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageAssociationIdentifiers.Merge(m, src)
}
func (m *ApplicationPackageAssociationIdentifiers) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageAssociationIdentifiers.Size(m)
}
func (m *ApplicationPackageAssociationIdentifiers) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageAssociationIdentifiers.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageAssociationIdentifiers proto.InternalMessageInfo
func (m *ApplicationPackageAssociationIdentifiers) GetEndDeviceIds() *EndDeviceIdentifiers {
if m != nil {
return m.EndDeviceIds
}
return nil
}
func (m *ApplicationPackageAssociationIdentifiers) GetFPort() uint32 {
if m != nil {
return m.FPort
}
return 0
}
type ApplicationPackageAssociation struct {
Ids *ApplicationPackageAssociationIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
CreatedAt *types.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"`
UpdatedAt *types.Timestamp `protobuf:"bytes,3,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"`
PackageName string `protobuf:"bytes,4,opt,name=package_name,json=packageName,proto3" json:"package_name,omitempty"`
Data *types.Struct `protobuf:"bytes,5,opt,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageAssociation) Reset() { *m = ApplicationPackageAssociation{} }
func (m *ApplicationPackageAssociation) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackageAssociation) ProtoMessage() {}
func (*ApplicationPackageAssociation) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{3}
}
func (m *ApplicationPackageAssociation) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageAssociation.Unmarshal(m, b)
}
func (m *ApplicationPackageAssociation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageAssociation.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageAssociation) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageAssociation.Merge(m, src)
}
func (m *ApplicationPackageAssociation) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageAssociation.Size(m)
}
func (m *ApplicationPackageAssociation) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageAssociation.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageAssociation proto.InternalMessageInfo
func (m *ApplicationPackageAssociation) GetIds() *ApplicationPackageAssociationIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *ApplicationPackageAssociation) GetCreatedAt() *types.Timestamp {
if m != nil {
return m.CreatedAt
}
return nil
}
func (m *ApplicationPackageAssociation) GetUpdatedAt() *types.Timestamp {
if m != nil {
return m.UpdatedAt
}
return nil
}
func (m *ApplicationPackageAssociation) GetPackageName() string {
if m != nil {
return m.PackageName
}
return ""
}
func (m *ApplicationPackageAssociation) GetData() *types.Struct {
if m != nil {
return m.Data
}
return nil
}
type ApplicationPackageAssociations struct {
Associations []*ApplicationPackageAssociation `protobuf:"bytes,1,rep,name=associations,proto3" json:"associations,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageAssociations) Reset() { *m = ApplicationPackageAssociations{} }
func (m *ApplicationPackageAssociations) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackageAssociations) ProtoMessage() {}
func (*ApplicationPackageAssociations) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{4}
}
func (m *ApplicationPackageAssociations) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageAssociations.Unmarshal(m, b)
}
func (m *ApplicationPackageAssociations) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageAssociations.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageAssociations) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageAssociations.Merge(m, src)
}
func (m *ApplicationPackageAssociations) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageAssociations.Size(m)
}
func (m *ApplicationPackageAssociations) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageAssociations.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageAssociations proto.InternalMessageInfo
func (m *ApplicationPackageAssociations) GetAssociations() []*ApplicationPackageAssociation {
if m != nil {
return m.Associations
}
return nil
}
type GetApplicationPackageAssociationRequest struct {
Ids *ApplicationPackageAssociationIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetApplicationPackageAssociationRequest) Reset() {
*m = GetApplicationPackageAssociationRequest{}
}
func (m *GetApplicationPackageAssociationRequest) String() string { return proto.CompactTextString(m) }
func (*GetApplicationPackageAssociationRequest) ProtoMessage() {}
func (*GetApplicationPackageAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{5}
}
func (m *GetApplicationPackageAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetApplicationPackageAssociationRequest.Unmarshal(m, b)
}
func (m *GetApplicationPackageAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetApplicationPackageAssociationRequest.Marshal(b, m, deterministic)
}
func (m *GetApplicationPackageAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetApplicationPackageAssociationRequest.Merge(m, src)
}
func (m *GetApplicationPackageAssociationRequest) XXX_Size() int {
return xxx_messageInfo_GetApplicationPackageAssociationRequest.Size(m)
}
func (m *GetApplicationPackageAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetApplicationPackageAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetApplicationPackageAssociationRequest proto.InternalMessageInfo
func (m *GetApplicationPackageAssociationRequest) GetIds() *ApplicationPackageAssociationIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *GetApplicationPackageAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ListApplicationPackageAssociationRequest struct {
Ids *EndDeviceIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
// Limit the number of results per page.
// Each page is ordered by the FPort.
Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
// Page number for pagination. 0 is interpreted as 1.
Page uint32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,4,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListApplicationPackageAssociationRequest) Reset() {
*m = ListApplicationPackageAssociationRequest{}
}
func (m *ListApplicationPackageAssociationRequest) String() string { return proto.CompactTextString(m) }
func (*ListApplicationPackageAssociationRequest) ProtoMessage() {}
func (*ListApplicationPackageAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{6}
}
func (m *ListApplicationPackageAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListApplicationPackageAssociationRequest.Unmarshal(m, b)
}
func (m *ListApplicationPackageAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListApplicationPackageAssociationRequest.Marshal(b, m, deterministic)
}
func (m *ListApplicationPackageAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListApplicationPackageAssociationRequest.Merge(m, src)
}
func (m *ListApplicationPackageAssociationRequest) XXX_Size() int {
return xxx_messageInfo_ListApplicationPackageAssociationRequest.Size(m)
}
func (m *ListApplicationPackageAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListApplicationPackageAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListApplicationPackageAssociationRequest proto.InternalMessageInfo
func (m *ListApplicationPackageAssociationRequest) GetIds() *EndDeviceIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *ListApplicationPackageAssociationRequest) GetLimit() uint32 {
if m != nil {
return m.Limit
}
return 0
}
func (m *ListApplicationPackageAssociationRequest) GetPage() uint32 {
if m != nil {
return m.Page
}
return 0
}
func (m *ListApplicationPackageAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type SetApplicationPackageAssociationRequest struct {
Association *ApplicationPackageAssociation `protobuf:"bytes,1,opt,name=association,proto3" json:"association,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SetApplicationPackageAssociationRequest) Reset() {
*m = SetApplicationPackageAssociationRequest{}
}
func (m *SetApplicationPackageAssociationRequest) String() string { return proto.CompactTextString(m) }
func (*SetApplicationPackageAssociationRequest) ProtoMessage() {}
func (*SetApplicationPackageAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{7}
}
func (m *SetApplicationPackageAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SetApplicationPackageAssociationRequest.Unmarshal(m, b)
}
func (m *SetApplicationPackageAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SetApplicationPackageAssociationRequest.Marshal(b, m, deterministic)
}
func (m *SetApplicationPackageAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SetApplicationPackageAssociationRequest.Merge(m, src)
}
func (m *SetApplicationPackageAssociationRequest) XXX_Size() int {
return xxx_messageInfo_SetApplicationPackageAssociationRequest.Size(m)
}
func (m *SetApplicationPackageAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_SetApplicationPackageAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_SetApplicationPackageAssociationRequest proto.InternalMessageInfo
func (m *SetApplicationPackageAssociationRequest) GetAssociation() *ApplicationPackageAssociation {
if m != nil {
return m.Association
}
return nil
}
func (m *SetApplicationPackageAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ApplicationPackageDefaultAssociationIdentifiers struct {
ApplicationIds *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=application_ids,json=applicationIds,proto3" json:"application_ids,omitempty"`
FPort uint32 `protobuf:"varint,2,opt,name=f_port,json=fPort,proto3" json:"f_port,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) Reset() {
*m = ApplicationPackageDefaultAssociationIdentifiers{}
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) String() string {
return proto.CompactTextString(m)
}
func (*ApplicationPackageDefaultAssociationIdentifiers) ProtoMessage() {}
func (*ApplicationPackageDefaultAssociationIdentifiers) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{8}
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers.Unmarshal(m, b)
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers.Merge(m, src)
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers.Size(m)
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageDefaultAssociationIdentifiers proto.InternalMessageInfo
func (m *ApplicationPackageDefaultAssociationIdentifiers) GetApplicationIds() *ApplicationIdentifiers {
if m != nil {
return m.ApplicationIds
}
return nil
}
func (m *ApplicationPackageDefaultAssociationIdentifiers) GetFPort() uint32 {
if m != nil {
return m.FPort
}
return 0
}
type ApplicationPackageDefaultAssociation struct {
Ids *ApplicationPackageDefaultAssociationIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
CreatedAt *types.Timestamp `protobuf:"bytes,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"`
UpdatedAt *types.Timestamp `protobuf:"bytes,3,opt,name=updated_at,json=updatedAt,proto3" json:"updated_at,omitempty"`
PackageName string `protobuf:"bytes,4,opt,name=package_name,json=packageName,proto3" json:"package_name,omitempty"`
Data *types.Struct `protobuf:"bytes,5,opt,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageDefaultAssociation) Reset() { *m = ApplicationPackageDefaultAssociation{} }
func (m *ApplicationPackageDefaultAssociation) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackageDefaultAssociation) ProtoMessage() {}
func (*ApplicationPackageDefaultAssociation) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{9}
}
func (m *ApplicationPackageDefaultAssociation) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageDefaultAssociation.Unmarshal(m, b)
}
func (m *ApplicationPackageDefaultAssociation) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageDefaultAssociation.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageDefaultAssociation) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageDefaultAssociation.Merge(m, src)
}
func (m *ApplicationPackageDefaultAssociation) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageDefaultAssociation.Size(m)
}
func (m *ApplicationPackageDefaultAssociation) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageDefaultAssociation.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageDefaultAssociation proto.InternalMessageInfo
func (m *ApplicationPackageDefaultAssociation) GetIds() *ApplicationPackageDefaultAssociationIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *ApplicationPackageDefaultAssociation) GetCreatedAt() *types.Timestamp {
if m != nil {
return m.CreatedAt
}
return nil
}
func (m *ApplicationPackageDefaultAssociation) GetUpdatedAt() *types.Timestamp {
if m != nil {
return m.UpdatedAt
}
return nil
}
func (m *ApplicationPackageDefaultAssociation) GetPackageName() string {
if m != nil {
return m.PackageName
}
return ""
}
func (m *ApplicationPackageDefaultAssociation) GetData() *types.Struct {
if m != nil {
return m.Data
}
return nil
}
type ApplicationPackageDefaultAssociations struct {
Defaults []*ApplicationPackageDefaultAssociation `protobuf:"bytes,1,rep,name=defaults,proto3" json:"defaults,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ApplicationPackageDefaultAssociations) Reset() { *m = ApplicationPackageDefaultAssociations{} }
func (m *ApplicationPackageDefaultAssociations) String() string { return proto.CompactTextString(m) }
func (*ApplicationPackageDefaultAssociations) ProtoMessage() {}
func (*ApplicationPackageDefaultAssociations) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{10}
}
func (m *ApplicationPackageDefaultAssociations) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ApplicationPackageDefaultAssociations.Unmarshal(m, b)
}
func (m *ApplicationPackageDefaultAssociations) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ApplicationPackageDefaultAssociations.Marshal(b, m, deterministic)
}
func (m *ApplicationPackageDefaultAssociations) XXX_Merge(src proto.Message) {
xxx_messageInfo_ApplicationPackageDefaultAssociations.Merge(m, src)
}
func (m *ApplicationPackageDefaultAssociations) XXX_Size() int {
return xxx_messageInfo_ApplicationPackageDefaultAssociations.Size(m)
}
func (m *ApplicationPackageDefaultAssociations) XXX_DiscardUnknown() {
xxx_messageInfo_ApplicationPackageDefaultAssociations.DiscardUnknown(m)
}
var xxx_messageInfo_ApplicationPackageDefaultAssociations proto.InternalMessageInfo
func (m *ApplicationPackageDefaultAssociations) GetDefaults() []*ApplicationPackageDefaultAssociation {
if m != nil {
return m.Defaults
}
return nil
}
type GetApplicationPackageDefaultAssociationRequest struct {
Ids *ApplicationPackageDefaultAssociationIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetApplicationPackageDefaultAssociationRequest) Reset() {
*m = GetApplicationPackageDefaultAssociationRequest{}
}
func (m *GetApplicationPackageDefaultAssociationRequest) String() string {
return proto.CompactTextString(m)
}
func (*GetApplicationPackageDefaultAssociationRequest) ProtoMessage() {}
func (*GetApplicationPackageDefaultAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{11}
}
func (m *GetApplicationPackageDefaultAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest.Unmarshal(m, b)
}
func (m *GetApplicationPackageDefaultAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest.Marshal(b, m, deterministic)
}
func (m *GetApplicationPackageDefaultAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest.Merge(m, src)
}
func (m *GetApplicationPackageDefaultAssociationRequest) XXX_Size() int {
return xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest.Size(m)
}
func (m *GetApplicationPackageDefaultAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetApplicationPackageDefaultAssociationRequest proto.InternalMessageInfo
func (m *GetApplicationPackageDefaultAssociationRequest) GetIds() *ApplicationPackageDefaultAssociationIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *GetApplicationPackageDefaultAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type ListApplicationPackageDefaultAssociationRequest struct {
Ids *ApplicationIdentifiers `protobuf:"bytes,1,opt,name=ids,proto3" json:"ids,omitempty"`
// Limit the number of results per page.
// Each page is ordered by the FPort.
Limit uint32 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
// Page number for pagination. 0 is interpreted as 1.
Page uint32 `protobuf:"varint,3,opt,name=page,proto3" json:"page,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,4,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ListApplicationPackageDefaultAssociationRequest) Reset() {
*m = ListApplicationPackageDefaultAssociationRequest{}
}
func (m *ListApplicationPackageDefaultAssociationRequest) String() string {
return proto.CompactTextString(m)
}
func (*ListApplicationPackageDefaultAssociationRequest) ProtoMessage() {}
func (*ListApplicationPackageDefaultAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{12}
}
func (m *ListApplicationPackageDefaultAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest.Unmarshal(m, b)
}
func (m *ListApplicationPackageDefaultAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest.Marshal(b, m, deterministic)
}
func (m *ListApplicationPackageDefaultAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest.Merge(m, src)
}
func (m *ListApplicationPackageDefaultAssociationRequest) XXX_Size() int {
return xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest.Size(m)
}
func (m *ListApplicationPackageDefaultAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_ListApplicationPackageDefaultAssociationRequest proto.InternalMessageInfo
func (m *ListApplicationPackageDefaultAssociationRequest) GetIds() *ApplicationIdentifiers {
if m != nil {
return m.Ids
}
return nil
}
func (m *ListApplicationPackageDefaultAssociationRequest) GetLimit() uint32 {
if m != nil {
return m.Limit
}
return 0
}
func (m *ListApplicationPackageDefaultAssociationRequest) GetPage() uint32 {
if m != nil {
return m.Page
}
return 0
}
func (m *ListApplicationPackageDefaultAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
type SetApplicationPackageDefaultAssociationRequest struct {
Default *ApplicationPackageDefaultAssociation `protobuf:"bytes,1,opt,name=default,proto3" json:"default,omitempty"`
FieldMask *types.FieldMask `protobuf:"bytes,2,opt,name=field_mask,json=fieldMask,proto3" json:"field_mask,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *SetApplicationPackageDefaultAssociationRequest) Reset() {
*m = SetApplicationPackageDefaultAssociationRequest{}
}
func (m *SetApplicationPackageDefaultAssociationRequest) String() string {
return proto.CompactTextString(m)
}
func (*SetApplicationPackageDefaultAssociationRequest) ProtoMessage() {}
func (*SetApplicationPackageDefaultAssociationRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_aa4ce58e965b6ca0, []int{13}
}
func (m *SetApplicationPackageDefaultAssociationRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest.Unmarshal(m, b)
}
func (m *SetApplicationPackageDefaultAssociationRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest.Marshal(b, m, deterministic)
}
func (m *SetApplicationPackageDefaultAssociationRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest.Merge(m, src)
}
func (m *SetApplicationPackageDefaultAssociationRequest) XXX_Size() int {
return xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest.Size(m)
}
func (m *SetApplicationPackageDefaultAssociationRequest) XXX_DiscardUnknown() {
xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest.DiscardUnknown(m)
}
var xxx_messageInfo_SetApplicationPackageDefaultAssociationRequest proto.InternalMessageInfo
func (m *SetApplicationPackageDefaultAssociationRequest) GetDefault() *ApplicationPackageDefaultAssociation {
if m != nil {
return m.Default
}
return nil
}
func (m *SetApplicationPackageDefaultAssociationRequest) GetFieldMask() *types.FieldMask {
if m != nil {
return m.FieldMask
}
return nil
}
func init() {
proto.RegisterType((*ApplicationPackage)(nil), "ttn.lorawan.v3.ApplicationPackage")
golang_proto.RegisterType((*ApplicationPackage)(nil), "ttn.lorawan.v3.ApplicationPackage")
proto.RegisterType((*ApplicationPackages)(nil), "ttn.lorawan.v3.ApplicationPackages")
golang_proto.RegisterType((*ApplicationPackages)(nil), "ttn.lorawan.v3.ApplicationPackages")
proto.RegisterType((*ApplicationPackageAssociationIdentifiers)(nil), "ttn.lorawan.v3.ApplicationPackageAssociationIdentifiers")
golang_proto.RegisterType((*ApplicationPackageAssociationIdentifiers)(nil), "ttn.lorawan.v3.ApplicationPackageAssociationIdentifiers")
proto.RegisterType((*ApplicationPackageAssociation)(nil), "ttn.lorawan.v3.ApplicationPackageAssociation")
golang_proto.RegisterType((*ApplicationPackageAssociation)(nil), "ttn.lorawan.v3.ApplicationPackageAssociation")
proto.RegisterType((*ApplicationPackageAssociations)(nil), "ttn.lorawan.v3.ApplicationPackageAssociations")
golang_proto.RegisterType((*ApplicationPackageAssociations)(nil), "ttn.lorawan.v3.ApplicationPackageAssociations")
proto.RegisterType((*GetApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.GetApplicationPackageAssociationRequest")
golang_proto.RegisterType((*GetApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.GetApplicationPackageAssociationRequest")
proto.RegisterType((*ListApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.ListApplicationPackageAssociationRequest")
golang_proto.RegisterType((*ListApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.ListApplicationPackageAssociationRequest")
proto.RegisterType((*SetApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.SetApplicationPackageAssociationRequest")
golang_proto.RegisterType((*SetApplicationPackageAssociationRequest)(nil), "ttn.lorawan.v3.SetApplicationPackageAssociationRequest")
proto.RegisterType((*ApplicationPackageDefaultAssociationIdentifiers)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociationIdentifiers")
golang_proto.RegisterType((*ApplicationPackageDefaultAssociationIdentifiers)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociationIdentifiers")
proto.RegisterType((*ApplicationPackageDefaultAssociation)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociation")
golang_proto.RegisterType((*ApplicationPackageDefaultAssociation)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociation")
proto.RegisterType((*ApplicationPackageDefaultAssociations)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociations")
golang_proto.RegisterType((*ApplicationPackageDefaultAssociations)(nil), "ttn.lorawan.v3.ApplicationPackageDefaultAssociations")
proto.RegisterType((*GetApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.GetApplicationPackageDefaultAssociationRequest")
golang_proto.RegisterType((*GetApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.GetApplicationPackageDefaultAssociationRequest")
proto.RegisterType((*ListApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.ListApplicationPackageDefaultAssociationRequest")
golang_proto.RegisterType((*ListApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.ListApplicationPackageDefaultAssociationRequest")
proto.RegisterType((*SetApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.SetApplicationPackageDefaultAssociationRequest")
golang_proto.RegisterType((*SetApplicationPackageDefaultAssociationRequest)(nil), "ttn.lorawan.v3.SetApplicationPackageDefaultAssociationRequest")
}
func init() {
proto.RegisterFile("lorawan-stack/api/applicationserver_packages.proto", fileDescriptor_aa4ce58e965b6ca0)
}
func init() {
golang_proto.RegisterFile("lorawan-stack/api/applicationserver_packages.proto", fileDescriptor_aa4ce58e965b6ca0)
}
var fileDescriptor_aa4ce58e965b6ca0 = []byte{
// 1222 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe4, 0x58, 0x4f, 0x6c, 0xdb, 0x54,
0x18, 0xd7, 0x4b, 0xd3, 0xfd, 0xf9, 0xda, 0x85, 0xed, 0xa1, 0xb1, 0x28, 0x40, 0x29, 0x5e, 0xa1,
0xa1, 0x10, 0x7b, 0x4a, 0x99, 0x60, 0x9b, 0x58, 0x69, 0xe8, 0x36, 0x81, 0x60, 0x2a, 0x4e, 0x41,
0xdd, 0xaa, 0x11, 0x5e, 0xe3, 0x17, 0xd7, 0x4a, 0x62, 0x1b, 0xbf, 0x97, 0x8c, 0x52, 0xf5, 0x52,
0xc4, 0x05, 0x21, 0x24, 0xb4, 0x0b, 0x27, 0x2e, 0x9c, 0x38, 0x21, 0x71, 0x42, 0xbb, 0x8c, 0x0b,
0x12, 0x17, 0x4e, 0x48, 0x5c, 0x38, 0xd2, 0x03, 0x9c, 0x90, 0x10, 0xa7, 0x5e, 0x40, 0x79, 0xb6,
0x5b, 0x27, 0x76, 0x12, 0x3b, 0x55, 0xe1, 0xc0, 0xcd, 0xee, 0xfb, 0xbe, 0x2f, 0xdf, 0xef, 0xf7,
0x7d, 0xdf, 0xef, 0x7d, 0x2e, 0x14, 0x1b, 0x96, 0x43, 0xee, 0x12, 0xb3, 0xc0, 0x38, 0xa9, 0xd6,
0x15, 0x62, 0x1b, 0x0a, 0xb1, 0xed, 0x86, 0x51, 0x25, 0xdc, 0xb0, 0x4c, 0x46, 0x9d, 0x36, 0x75,
0x2a, 0x36, 0xa9, 0xd6, 0x89, 0x4e, 0x99, 0x6c, 0x3b, 0x16, 0xb7, 0x70, 0x86, 0x73, 0x53, 0xf6,
0xfc, 0xe4, 0xf6, 0x7c, 0x6e, 0x51, 0x37, 0xf8, 0x46, 0x6b, 0x5d, 0xae, 0x5a, 0x4d, 0x85, 0x9a,
0x6d, 0x6b, 0xd3, 0x76, 0xac, 0xf7, 0x37, 0x15, 0x61, 0x5c, 0x2d, 0xe8, 0xd4, 0x2c, 0xb4, 0x49,
0xc3, 0xd0, 0x08, 0xa7, 0x4a, 0xe8, 0xc1, 0x0d, 0x99, 0x2b, 0x04, 0x42, 0xe8, 0x96, 0x6e, 0xb9,
0xce, 0xeb, 0xad, 0x9a, 0x78, 0x13, 0x2f, 0xe2, 0xc9, 0x33, 0x7f, 0x4c, 0xb7, 0x2c, 0xbd, 0x41,
0xdd, 0x74, 0x4d, 0xd3, 0xe2, 0x6e, 0xb6, 0xde, 0xe9, 0xa3, 0xde, 0xe9, 0x7e, 0x0c, 0xda, 0xb4,
0xf9, 0xa6, 0x77, 0x38, 0xdd, 0x7b, 0x58, 0x33, 0x68, 0x43, 0xab, 0x34, 0x09, 0xab, 0x7b, 0x16,
0x4f, 0xf4, 0x5a, 0x70, 0xa3, 0x49, 0x19, 0x27, 0x4d, 0xbb, 0xe7, 0xd7, 0xf7, 0x0d, 0x18, 0x77,
0x5a, 0x55, 0xee, 0x9d, 0x9e, 0x0f, 0x33, 0x6a, 0x68, 0xd4, 0xe4, 0x46, 0xcd, 0xa0, 0x8e, 0x97,
0xa2, 0xf4, 0x21, 0x02, 0xbc, 0x78, 0xc0, 0xf3, 0xb2, 0x4b, 0x30, 0xbe, 0x02, 0x69, 0x93, 0x34,
0x69, 0x16, 0x4d, 0xa3, 0xfc, 0xc9, 0xd2, 0xec, 0x5e, 0x69, 0xc6, 0x91, 0xb2, 0x33, 0xc5, 0xa9,
0x77, 0xd6, 0x48, 0xe1, 0x83, 0x0b, 0x85, 0x4b, 0x77, 0xf2, 0x0b, 0x97, 0xd7, 0x0a, 0x77, 0x16,
0xfc, 0xd7, 0x67, 0xb6, 0x8a, 0xcf, 0x6d, 0xcf, 0xa8, 0xc2, 0x09, 0x5f, 0x80, 0x8c, 0x46, 0x6b,
0xa4, 0xd5, 0xe0, 0x95, 0x5a, 0xc5, 0xb6, 0x1c, 0x9e, 0x4d, 0x4d, 0xa3, 0xfc, 0xa9, 0x12, 0xec,
0x95, 0x8e, 0xcf, 0x8d, 0x67, 0xff, 0x46, 0x79, 0xa4, 0x4e, 0x7a, 0x16, 0xd7, 0x97, 0x2d, 0x87,
0x4b, 0x6f, 0xc1, 0xc3, 0xe1, 0x24, 0x18, 0xbe, 0x0a, 0x27, 0xfc, 0x8a, 0x67, 0xd1, 0xf4, 0x58,
0x7e, 0xa2, 0x28, 0xc9, 0xdd, 0x25, 0x97, 0xc3, 0x6e, 0xea, 0xbe, 0x8f, 0xf4, 0x25, 0x82, 0x7c,
0xd8, 0x60, 0x91, 0x31, 0xab, 0x6a, 0x88, 0xbf, 0xbc, 0x7a, 0xc0, 0x07, 0x5e, 0x81, 0x0c, 0x35,
0xb5, 0x8a, 0x46, 0xdb, 0x46, 0x95, 0x56, 0x0c, 0x8d, 0x09, 0xf0, 0x13, 0xc5, 0x99, 0xde, 0x9f,
0xbc, 0x66, 0x6a, 0x4b, 0xc2, 0x28, 0xe0, 0x5d, 0x3a, 0xb1, 0x57, 0x1a, 0xff, 0x18, 0xa5, 0x4e,
0x23, 0x75, 0x92, 0x1e, 0x9c, 0x33, 0xfc, 0x24, 0x1c, 0xeb, 0xcb, 0xc1, 0x78, 0x4d, 0x80, 0xdf,
0x4d, 0xc1, 0xe3, 0x03, 0xb3, 0xc4, 0x2b, 0x30, 0x76, 0x90, 0xcf, 0x8b, 0xc3, 0x29, 0x88, 0x46,
0x18, 0xc8, 0xb1, 0x13, 0x0e, 0x5f, 0x02, 0xa8, 0x3a, 0x94, 0x70, 0xaa, 0x55, 0x88, 0x9b, 0xde,
0x44, 0x31, 0x27, 0xbb, 0x2d, 0x25, 0xfb, 0x2d, 0x25, 0xaf, 0xf8, 0x3d, 0xa7, 0x9e, 0xf4, 0xac,
0x17, 0x79, 0xc7, 0xb5, 0x65, 0x6b, 0xbe, 0xeb, 0xd8, 0x70, 0x57, 0xcf, 0x7a, 0x91, 0xe3, 0xd7,
0x60, 0xd2, 0xab, 0x4f, 0x45, 0x74, 0x58, 0x3a, 0x59, 0x87, 0x4d, 0x78, 0xce, 0x37, 0x3b, 0x8d,
0xf6, 0x2c, 0xa4, 0x35, 0xc2, 0x49, 0x76, 0x5c, 0x24, 0x70, 0x2e, 0x94, 0x40, 0x59, 0x8c, 0x83,
0x2a, 0x8c, 0x24, 0x06, 0x53, 0x03, 0x99, 0x62, 0xf8, 0x4d, 0x98, 0x24, 0x81, 0x77, 0xaf, 0xe5,
0x0a, 0x89, 0xf8, 0x56, 0xbb, 0x42, 0x48, 0xf7, 0x11, 0xcc, 0xde, 0xa0, 0x7c, 0xb0, 0x0b, 0x7d,
0xaf, 0x45, 0x19, 0x3f, 0xba, 0x2a, 0x1f, 0x08, 0x4b, 0xdf, 0x2a, 0x5f, 0xef, 0x98, 0xbc, 0x41,
0x58, 0x5d, 0x3d, 0x59, 0xf3, 0x1f, 0xa5, 0x5f, 0x10, 0xe4, 0x5f, 0x37, 0x58, 0xbc, 0xec, 0x5f,
0x0e, 0x66, 0x9f, 0x74, 0x66, 0x44, 0xa6, 0x53, 0x30, 0xde, 0x30, 0x9a, 0x86, 0x3f, 0x29, 0x9d,
0xd3, 0xb9, 0xb1, 0xec, 0x6f, 0xc7, 0x55, 0xf7, 0xcf, 0x18, 0x43, 0xda, 0x26, 0x3a, 0x15, 0xed,
0x76, 0x4a, 0x15, 0xcf, 0x3d, 0xe8, 0xd2, 0x49, 0xd0, 0x3d, 0x40, 0x30, 0x5b, 0x8e, 0x59, 0x9a,
0x5b, 0x30, 0x11, 0x28, 0xab, 0x07, 0x32, 0x59, 0x63, 0x04, 0xd0, 0x06, 0x63, 0x1d, 0xa6, 0x3e,
0x5f, 0x23, 0x50, 0xc2, 0xbf, 0xb9, 0xe4, 0x0a, 0x6b, 0x1f, 0x95, 0xbb, 0x05, 0x0f, 0x05, 0xae,
0xd5, 0x80, 0xcc, 0x3d, 0x3d, 0x00, 0x4d, 0x74, 0xd1, 0x32, 0x24, 0x68, 0x11, 0x4b, 0xea, 0xfe,
0x4c, 0xc1, 0x4c, 0x9c, 0x8c, 0xf1, 0x5a, 0xb0, 0x9b, 0x16, 0x86, 0x13, 0x3d, 0x10, 0xf4, 0xff,
0x5c, 0xf8, 0x36, 0xe1, 0xa9, 0x38, 0x84, 0x31, 0xbc, 0x0c, 0x27, 0xbc, 0x5b, 0xd9, 0xd7, 0xbe,
0xe7, 0x47, 0x61, 0x5e, 0xdd, 0x8f, 0x22, 0xfd, 0x80, 0x40, 0x8e, 0x94, 0xbf, 0x08, 0x2f, 0x6f,
0xd4, 0x8e, 0xba, 0xf2, 0xa3, 0x0e, 0xdb, 0x2e, 0x02, 0x25, 0x5a, 0x0c, 0xfb, 0x63, 0x29, 0x05,
0xb1, 0x24, 0x1f, 0xb0, 0xff, 0x42, 0x15, 0xbf, 0x47, 0x20, 0x97, 0x93, 0x55, 0x6c, 0x15, 0x8e,
0x7b, 0x05, 0xf7, 0x90, 0x8e, 0xd4, 0x35, 0x01, 0xdc, 0x7e, 0xb8, 0x43, 0x94, 0xab, 0xf8, 0xc5,
0x19, 0xc8, 0x45, 0xec, 0x86, 0x54, 0x37, 0x18, 0x77, 0x36, 0xf1, 0x57, 0x08, 0xd2, 0x9d, 0x6a,
0xe2, 0x58, 0x37, 0x55, 0xee, 0xfc, 0x70, 0x44, 0x4c, 0x7a, 0x7b, 0xe7, 0xa7, 0xdd, 0x7b, 0xa9,
0x65, 0x7c, 0x53, 0x21, 0xac, 0xeb, 0xdb, 0x45, 0xd9, 0xea, 0x91, 0x5c, 0xb9, 0xfb, 0x7d, 0x5b,
0x71, 0x97, 0x4e, 0xa6, 0x6c, 0xed, 0x6f, 0x9f, 0xdb, 0x8a, 0xbf, 0xc5, 0xe2, 0x7b, 0x29, 0xc8,
0x74, 0x86, 0x28, 0x20, 0x8f, 0x2f, 0xf4, 0xe6, 0x13, 0x73, 0xc7, 0xc8, 0x25, 0xbb, 0xb3, 0xa4,
0xcf, 0x91, 0xc0, 0xf4, 0x19, 0xc2, 0x9f, 0xa2, 0x30, 0xaa, 0x0e, 0x92, 0xee, 0x95, 0x59, 0x8e,
0x0d, 0x34, 0xc2, 0x37, 0x02, 0xbb, 0x12, 0x5c, 0xa6, 0x5c, 0x27, 0xf7, 0x8a, 0xd9, 0xc6, 0xbf,
0x23, 0x38, 0x2d, 0xe6, 0x31, 0xa8, 0x60, 0xa1, 0xad, 0x29, 0xee, 0xfa, 0x92, 0x93, 0x13, 0x11,
0xc3, 0xa4, 0xba, 0x20, 0x86, 0xe2, 0x6a, 0x34, 0x2d, 0x89, 0x78, 0x18, 0x06, 0x1c, 0xdf, 0x4f,
0x41, 0xa6, 0x3c, 0xa4, 0x03, 0xca, 0x47, 0xd3, 0x01, 0x0f, 0xdc, 0x0e, 0xf8, 0x16, 0xe5, 0xbe,
0x89, 0xe8, 0x80, 0x40, 0x96, 0xf2, 0x61, 0xba, 0x61, 0x48, 0x9c, 0xe1, 0x9d, 0xd1, 0x1b, 0xc0,
0xeb, 0x92, 0xcb, 0x68, 0x0e, 0xff, 0x85, 0xe0, 0xcc, 0x12, 0x6d, 0x50, 0xde, 0xf5, 0x49, 0x35,
0xf2, 0x7e, 0x9d, 0x7b, 0x24, 0x24, 0x40, 0xd7, 0x3a, 0x5f, 0xf5, 0xd2, 0x27, 0x2e, 0x53, 0x1f,
0xa1, 0xb9, 0x9d, 0x08, 0xa6, 0x46, 0x65, 0x26, 0x31, 0x13, 0xfe, 0x7c, 0xfc, 0x81, 0xe0, 0xec,
0x0d, 0xca, 0x23, 0x76, 0xab, 0xab, 0xb1, 0xc4, 0xa3, 0xaf, 0xde, 0xe7, 0x46, 0x92, 0x77, 0xe9,
0x5d, 0xc1, 0xce, 0x6d, 0xbc, 0x3a, 0xda, 0xc0, 0xc4, 0x50, 0x84, 0x9f, 0x11, 0x9c, 0xeb, 0xcc,
0x7b, 0xd4, 0x6a, 0xb3, 0x10, 0x4f, 0x18, 0xfa, 0x83, 0xbe, 0x38, 0x0a, 0x68, 0x26, 0xbd, 0x22,
0x50, 0xbf, 0x84, 0xaf, 0xc4, 0x41, 0xdd, 0x6f, 0xfc, 0x77, 0x52, 0x70, 0xb6, 0x1c, 0xaf, 0x94,
0xe5, 0x7f, 0xa3, 0x94, 0x4c, 0x80, 0x6a, 0xe6, 0x36, 0xc2, 0xa0, 0xbc, 0x9b, 0x5b, 0x3e, 0x44,
0x49, 0x83, 0x21, 0x02, 0x63, 0xfc, 0x23, 0x82, 0xac, 0x3b, 0xc6, 0x11, 0x3c, 0x1c, 0x76, 0x4f,
0xec, 0x3b, 0xd4, 0xab, 0x02, 0xaa, 0x3a, 0xb7, 0x9c, 0xfc, 0x4e, 0x1f, 0x3c, 0x9f, 0xa5, 0x8b,
0xdf, 0xfd, 0x3a, 0x85, 0x6e, 0x2b, 0xba, 0x25, 0xf3, 0x0d, 0xca, 0x37, 0x0c, 0x53, 0x67, 0xb2,
0x49, 0xf9, 0x5d, 0xcb, 0xa9, 0x2b, 0xdd, 0xff, 0xb8, 0x6b, 0xcf, 0x2b, 0x76, 0x5d, 0x57, 0x38,
0x37, 0xed, 0xf5, 0xf5, 0x63, 0x22, 0xc1, 0xf9, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa0, 0x6b,
0x58, 0xd2, 0x2f, 0x15, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// ApplicationPackageRegistryClient is the client API for ApplicationPackageRegistry service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type ApplicationPackageRegistryClient interface {
// List returns the available packages for the end device.
List(ctx context.Context, in *EndDeviceIdentifiers, opts ...grpc.CallOption) (*ApplicationPackages, error)
// GetAssociation returns the association registered on the FPort of the end device.
GetAssociation(ctx context.Context, in *GetApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociation, error)
// ListAssociations returns all of the associations of the end device.
ListAssociations(ctx context.Context, in *ListApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociations, error)
// SetAssociation updates or creates the association on the FPort of the end device.
SetAssociation(ctx context.Context, in *SetApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociation, error)
// DeleteAssociation removes the association on the FPort of the end device.
DeleteAssociation(ctx context.Context, in *ApplicationPackageAssociationIdentifiers, opts ...grpc.CallOption) (*types.Empty, error)
// GetDefaultAssociation returns the default association registered on the FPort of the application.
GetDefaultAssociation(ctx context.Context, in *GetApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociation, error)
// ListDefaultAssociations returns all of the default associations of the application.
ListDefaultAssociations(ctx context.Context, in *ListApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociations, error)
// SetDefaultAssociation updates or creates the default association on the FPort of the application.
SetDefaultAssociation(ctx context.Context, in *SetApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociation, error)
// DeleteDefaultAssociation removes the default association on the FPort of the application.
DeleteDefaultAssociation(ctx context.Context, in *ApplicationPackageDefaultAssociationIdentifiers, opts ...grpc.CallOption) (*types.Empty, error)
}
type applicationPackageRegistryClient struct {
cc *grpc.ClientConn
}
func NewApplicationPackageRegistryClient(cc *grpc.ClientConn) ApplicationPackageRegistryClient {
return &applicationPackageRegistryClient{cc}
}
func (c *applicationPackageRegistryClient) List(ctx context.Context, in *EndDeviceIdentifiers, opts ...grpc.CallOption) (*ApplicationPackages, error) {
out := new(ApplicationPackages)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/List", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) GetAssociation(ctx context.Context, in *GetApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociation, error) {
out := new(ApplicationPackageAssociation)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/GetAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) ListAssociations(ctx context.Context, in *ListApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociations, error) {
out := new(ApplicationPackageAssociations)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/ListAssociations", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) SetAssociation(ctx context.Context, in *SetApplicationPackageAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageAssociation, error) {
out := new(ApplicationPackageAssociation)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/SetAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) DeleteAssociation(ctx context.Context, in *ApplicationPackageAssociationIdentifiers, opts ...grpc.CallOption) (*types.Empty, error) {
out := new(types.Empty)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/DeleteAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) GetDefaultAssociation(ctx context.Context, in *GetApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociation, error) {
out := new(ApplicationPackageDefaultAssociation)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/GetDefaultAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) ListDefaultAssociations(ctx context.Context, in *ListApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociations, error) {
out := new(ApplicationPackageDefaultAssociations)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/ListDefaultAssociations", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) SetDefaultAssociation(ctx context.Context, in *SetApplicationPackageDefaultAssociationRequest, opts ...grpc.CallOption) (*ApplicationPackageDefaultAssociation, error) {
out := new(ApplicationPackageDefaultAssociation)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/SetDefaultAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *applicationPackageRegistryClient) DeleteDefaultAssociation(ctx context.Context, in *ApplicationPackageDefaultAssociationIdentifiers, opts ...grpc.CallOption) (*types.Empty, error) {
out := new(types.Empty)
err := c.cc.Invoke(ctx, "/ttn.lorawan.v3.ApplicationPackageRegistry/DeleteDefaultAssociation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ApplicationPackageRegistryServer is the server API for ApplicationPackageRegistry service.
type ApplicationPackageRegistryServer interface {
// List returns the available packages for the end device.
List(context.Context, *EndDeviceIdentifiers) (*ApplicationPackages, error)
// GetAssociation returns the association registered on the FPort of the end device.
GetAssociation(context.Context, *GetApplicationPackageAssociationRequest) (*ApplicationPackageAssociation, error)
// ListAssociations returns all of the associations of the end device.
ListAssociations(context.Context, *ListApplicationPackageAssociationRequest) (*ApplicationPackageAssociations, error)
// SetAssociation updates or creates the association on the FPort of the end device.
SetAssociation(context.Context, *SetApplicationPackageAssociationRequest) (*ApplicationPackageAssociation, error)
// DeleteAssociation removes the association on the FPort of the end device.
DeleteAssociation(context.Context, *ApplicationPackageAssociationIdentifiers) (*types.Empty, error)
// GetDefaultAssociation returns the default association registered on the FPort of the application.
GetDefaultAssociation(context.Context, *GetApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociation, error)
// ListDefaultAssociations returns all of the default associations of the application.
ListDefaultAssociations(context.Context, *ListApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociations, error)
// SetDefaultAssociation updates or creates the default association on the FPort of the application.
SetDefaultAssociation(context.Context, *SetApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociation, error)
// DeleteDefaultAssociation removes the default association on the FPort of the application.
DeleteDefaultAssociation(context.Context, *ApplicationPackageDefaultAssociationIdentifiers) (*types.Empty, error)
}
// UnimplementedApplicationPackageRegistryServer can be embedded to have forward compatible implementations.
type UnimplementedApplicationPackageRegistryServer struct {
}
func (*UnimplementedApplicationPackageRegistryServer) List(ctx context.Context, req *EndDeviceIdentifiers) (*ApplicationPackages, error) {
return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) GetAssociation(ctx context.Context, req *GetApplicationPackageAssociationRequest) (*ApplicationPackageAssociation, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetAssociation not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) ListAssociations(ctx context.Context, req *ListApplicationPackageAssociationRequest) (*ApplicationPackageAssociations, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListAssociations not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) SetAssociation(ctx context.Context, req *SetApplicationPackageAssociationRequest) (*ApplicationPackageAssociation, error) {
return nil, status.Errorf(codes.Unimplemented, "method SetAssociation not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) DeleteAssociation(ctx context.Context, req *ApplicationPackageAssociationIdentifiers) (*types.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteAssociation not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) GetDefaultAssociation(ctx context.Context, req *GetApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociation, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDefaultAssociation not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) ListDefaultAssociations(ctx context.Context, req *ListApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociations, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListDefaultAssociations not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) SetDefaultAssociation(ctx context.Context, req *SetApplicationPackageDefaultAssociationRequest) (*ApplicationPackageDefaultAssociation, error) {
return nil, status.Errorf(codes.Unimplemented, "method SetDefaultAssociation not implemented")
}
func (*UnimplementedApplicationPackageRegistryServer) DeleteDefaultAssociation(ctx context.Context, req *ApplicationPackageDefaultAssociationIdentifiers) (*types.Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteDefaultAssociation not implemented")
}
func RegisterApplicationPackageRegistryServer(s *grpc.Server, srv ApplicationPackageRegistryServer) {
s.RegisterService(&_ApplicationPackageRegistry_serviceDesc, srv)
}
func _ApplicationPackageRegistry_List_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(EndDeviceIdentifiers)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).List(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/List",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).List(ctx, req.(*EndDeviceIdentifiers))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_GetAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetApplicationPackageAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).GetAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/GetAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).GetAssociation(ctx, req.(*GetApplicationPackageAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_ListAssociations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListApplicationPackageAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).ListAssociations(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/ListAssociations",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).ListAssociations(ctx, req.(*ListApplicationPackageAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_SetAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetApplicationPackageAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).SetAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/SetAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).SetAssociation(ctx, req.(*SetApplicationPackageAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_DeleteAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ApplicationPackageAssociationIdentifiers)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).DeleteAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/DeleteAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).DeleteAssociation(ctx, req.(*ApplicationPackageAssociationIdentifiers))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_GetDefaultAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetApplicationPackageDefaultAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).GetDefaultAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/GetDefaultAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).GetDefaultAssociation(ctx, req.(*GetApplicationPackageDefaultAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_ListDefaultAssociations_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListApplicationPackageDefaultAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).ListDefaultAssociations(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/ListDefaultAssociations",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).ListDefaultAssociations(ctx, req.(*ListApplicationPackageDefaultAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_SetDefaultAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetApplicationPackageDefaultAssociationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).SetDefaultAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/SetDefaultAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).SetDefaultAssociation(ctx, req.(*SetApplicationPackageDefaultAssociationRequest))
}
return interceptor(ctx, in, info, handler)
}
func _ApplicationPackageRegistry_DeleteDefaultAssociation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ApplicationPackageDefaultAssociationIdentifiers)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApplicationPackageRegistryServer).DeleteDefaultAssociation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ttn.lorawan.v3.ApplicationPackageRegistry/DeleteDefaultAssociation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApplicationPackageRegistryServer).DeleteDefaultAssociation(ctx, req.(*ApplicationPackageDefaultAssociationIdentifiers))
}
return interceptor(ctx, in, info, handler)
}
var _ApplicationPackageRegistry_serviceDesc = grpc.ServiceDesc{
ServiceName: "ttn.lorawan.v3.ApplicationPackageRegistry",
HandlerType: (*ApplicationPackageRegistryServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "List",
Handler: _ApplicationPackageRegistry_List_Handler,
},
{
MethodName: "GetAssociation",
Handler: _ApplicationPackageRegistry_GetAssociation_Handler,
},
{
MethodName: "ListAssociations",
Handler: _ApplicationPackageRegistry_ListAssociations_Handler,
},
{
MethodName: "SetAssociation",
Handler: _ApplicationPackageRegistry_SetAssociation_Handler,
},
{
MethodName: "DeleteAssociation",
Handler: _ApplicationPackageRegistry_DeleteAssociation_Handler,
},
{
MethodName: "GetDefaultAssociation",
Handler: _ApplicationPackageRegistry_GetDefaultAssociation_Handler,
},
{
MethodName: "ListDefaultAssociations",
Handler: _ApplicationPackageRegistry_ListDefaultAssociations_Handler,
},
{
MethodName: "SetDefaultAssociation",
Handler: _ApplicationPackageRegistry_SetDefaultAssociation_Handler,
},
{
MethodName: "DeleteDefaultAssociation",
Handler: _ApplicationPackageRegistry_DeleteDefaultAssociation_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "lorawan-stack/api/applicationserver_packages.proto",
}
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Handlers dealing with network configurations
"""
import six
import traceback
import web
from nailgun.api.v1.handlers.base import BaseHandler
from nailgun.api.v1.handlers.base import content_json
from nailgun.objects.serializers.network_configuration \
import NeutronNetworkConfigurationSerializer
from nailgun.objects.serializers.network_configuration \
import NovaNetworkConfigurationSerializer
from nailgun.api.v1.validators.network \
import NeutronNetworkConfigurationValidator
from nailgun.api.v1.validators.network \
import NovaNetworkConfigurationValidator
from nailgun import consts
from nailgun import objects
from nailgun.errors import errors
from nailgun.logger import logger
from nailgun.openstack.common import jsonutils
from nailgun.task.manager import CheckNetworksTaskManager
from nailgun.task.manager import VerifyNetworksTaskManager
class ProviderHandler(BaseHandler):
"""Base class for network configuration handlers
"""
def check_net_provider(self, cluster):
if cluster.net_provider != self.provider:
raise self.http(
400, u"Wrong net provider - environment uses '{0}'".format(
cluster.net_provider
)
)
def check_if_network_configuration_locked(self, cluster):
if cluster.is_locked:
raise self.http(403, "Network configuration can't be changed "
"after, or in deploy.")
class NovaNetworkConfigurationHandler(ProviderHandler):
"""Network configuration handler
"""
validator = NovaNetworkConfigurationValidator
serializer = NovaNetworkConfigurationSerializer
provider = "nova_network"
@content_json
def GET(self, cluster_id):
""":returns: JSONized network configuration for cluster.
:http: * 200 (OK)
* 404 (cluster not found in db)
"""
cluster = self.get_object_or_404(objects.Cluster, cluster_id)
self.check_net_provider(cluster)
return self.serializer.serialize_for_cluster(cluster)
@content_json
def PUT(self, cluster_id):
""":returns: JSONized Task object.
:http: * 202 (network checking task created)
* 404 (cluster not found in db)
"""
data = jsonutils.loads(web.data())
if data.get("networks"):
data["networks"] = [
n for n in data["networks"] if n.get("name") != "fuelweb_admin"
]
cluster = self.get_object_or_404(objects.Cluster, cluster_id)
self.check_net_provider(cluster)
self.check_if_network_configuration_locked(cluster)
task_manager = CheckNetworksTaskManager(cluster_id=cluster.id)
task = task_manager.execute(data)
if task.status != consts.TASK_STATUSES.error:
try:
if 'networks' in data:
self.validator.validate_networks_update(
jsonutils.dumps(data)
)
if 'dns_nameservers' in data:
self.validator.validate_dns_servers_update(
jsonutils.dumps(data)
)
objects.Cluster.get_network_manager(
cluster
).update(cluster, data)
except Exception as exc:
# set task status to error and update its corresponding data
data = {'status': consts.TASK_STATUSES.error,
'progress': 100,
'message': six.text_type(exc)}
objects.Task.update(task, data)
logger.error(traceback.format_exc())
raise self.http(202, objects.Task.to_json(task))
class NeutronNetworkConfigurationHandler(ProviderHandler):
"""Neutron Network configuration handler
"""
validator = NeutronNetworkConfigurationValidator
serializer = NeutronNetworkConfigurationSerializer
provider = "neutron"
@content_json
def GET(self, cluster_id):
""":returns: JSONized network configuration for cluster.
:http: * 200 (OK)
* 404 (cluster not found in db)
"""
cluster = self.get_object_or_404(objects.Cluster, cluster_id)
self.check_net_provider(cluster)
return self.serializer.serialize_for_cluster(cluster)
@content_json
def PUT(self, cluster_id):
data = jsonutils.loads(web.data())
if data.get("networks"):
data["networks"] = [
n for n in data["networks"] if n.get("name") != "fuelweb_admin"
]
cluster = self.get_object_or_404(objects.Cluster, cluster_id)
self.check_net_provider(cluster)
self.check_if_network_configuration_locked(cluster)
task_manager = CheckNetworksTaskManager(cluster_id=cluster.id)
task = task_manager.execute(data)
if task.status != consts.TASK_STATUSES.error:
try:
if 'networks' in data:
self.validator.validate_networks_update(
jsonutils.dumps(data)
)
if 'networking_parameters' in data:
self.validator.validate_neutron_params(
jsonutils.dumps(data),
cluster_id=cluster_id
)
objects.Cluster.get_network_manager(
cluster
).update(cluster, data)
except Exception as exc:
# set task status to error and update its corresponding data
data = {'status': 'error',
'progress': 100,
'message': six.text_type(exc)}
objects.Task.update(task, data)
logger.error(traceback.format_exc())
raise self.http(202, objects.Task.to_json(task))
class NetworkConfigurationVerifyHandler(ProviderHandler):
"""Network configuration verify handler base
"""
@content_json
def PUT(self, cluster_id):
""":IMPORTANT: this method should be rewritten to be more RESTful
:returns: JSONized Task object.
:http: * 202 (network checking task failed)
* 200 (network verification task started)
* 404 (cluster not found in db)
"""
cluster = self.get_object_or_404(objects.Cluster, cluster_id)
self.check_net_provider(cluster)
raise self.http(202, self.launch_verify(cluster))
def launch_verify(self, cluster):
data = self.validator.validate_networks_update(web.data())
data["networks"] = [
n for n in data["networks"] if n.get("name") != "fuelweb_admin"
]
vlan_ids = [{
'name': n['name'],
'vlans': objects.Cluster.get_network_manager(
cluster
).generate_vlan_ids_list(
data, cluster, n)
} for n in data['networks']]
task_manager = VerifyNetworksTaskManager(cluster_id=cluster.id)
try:
task = task_manager.execute(data, vlan_ids)
except errors.CantRemoveOldVerificationTask:
raise self.http(400, "You cannot delete running task manually")
return objects.Task.to_json(task)
class NovaNetworkConfigurationVerifyHandler(NetworkConfigurationVerifyHandler):
"""Nova-Network configuration verify handler
"""
validator = NovaNetworkConfigurationValidator
provider = "nova_network"
class NeutronNetworkConfigurationVerifyHandler(
NetworkConfigurationVerifyHandler):
"""Neutron network configuration verify handler
"""
validator = NeutronNetworkConfigurationValidator
provider = "neutron"
|
<filename>routes/views/groupProduct.js
var keystone = require('keystone');
var GroupProduct = keystone.list('GroupProduct');
exports = module.exports = function (req, res) {
var view = new keystone.View(req, res);
var locals = res.locals;
locals.section = 'groupPrduct';
locals.GroupProduct = [];
view.on('init', function (next) {
var q = GroupProduct.model.find().sort('name');
q.exec(function (err, results) {
locals.GroupProduct = results;
next(err);
});
});
// add a GroupProduct
view.on('post',{}, function (next) {
var Body = req.body;
var application = new GroupProduct.model();
var updater = application.getUpdateHandler(req);
updater.process(req.body, {
flashErrors: true
}, function (err) {
if (err) {
locals.validationErrors = err.errors;
} else {
locals.enquirySubmitted = true;
}
next();
});
});
// Delete a GroupProduct
view.on('get', { remove: 'GroupProduct' }, function (next) {
if (!req.user) {
req.flash('error', 'Bạn phải đăng nhập trước khi muốn xóa sản phẩm.');
return next();
}
GroupProduct.model.findOne({
_id: req.query.group
})
.exec(function (err, group) {
if (err) {
if (err.name === 'CastError') {
req.flash('error', 'Sản phẩm ' + req.query.group + ' không thể tìm thấy.');
return next();
}
return res.err(err);
}
if (!group) {
req.flash('error', 'Sản phẩm ' + req.query.group + ' không thể tìm thấy.');
return next();
}
group.remove(function (err) {
if (err) return res.err(err);
req.flash('success', 'Nhóm sản phẩm đã được xóa.');
return res.redirect('/groupProduct');
});
});
});
view.render('product/groupProduct');
}
|
<gh_stars>1-10
Ext.define('GeekFlicks.view.Movies', {
extend: 'Ext.grid.Panel',
id: "movies_editor",
alias: 'widget.movieseditor',
store: 'Movies',
initComponent: function () {
//note: store removed
this.columns = [{
header: 'Title',
dataIndex: 'title',
flex: 1
}, {
header: 'Year',
dataIndex: 'year',
flex: 1
}];
this.callParent(arguments);
}
});
|
#!/bin/bash
# Тестирование реальных сервисов
server=127.0.0.1:11211
driver=memcache
x_chi() {
PYTHONPATH=. bin/chi -D $driver -S $server $*
}
x_chi set t:k1 -d Привет!
x_chi get t:k1
x_chi keys "t:*"
x_chi erase "t:*"
echo "Ключ удалён"
x_chi get t:k1
x_chi remove t:k1
x_chi get t:k1
|
// Copyright 2021 arcadium.dev <<EMAIL>>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sql
import (
"testing"
)
func TestOpenSuccess(t *testing.T) {
// FIXME
}
func TestOpenFailures(t *testing.T) {
t.Parallel()
t.Run("Test driver name failure", func(t *testing.T) {
// FIXME
})
t.Run("Test dsn failure", func(t *testing.T) {
// FIXME
})
t.Run("Test migration failure", func(t *testing.T) {
// FIXME
})
}
func TestConnectSuccess(t *testing.T) {
// FIXME
}
func TestConnectFailure(t *testing.T) {
// FIXME
}
|
clear
source ./lawenv2/bin/activate
pip install --editable .
export FLASK_APP=BDLawsViz
export FLASK_DEBUG=true
python -m flask run
deactivate lawenv2
|
#!/bin/sh
git log --pretty --summary HEAD master . | git2cl | sed 's/\*\s\:\s//' | sed 's/\<\(.*\)\(@\)\(.*\)/\1[at]\3/'
|
if [[ "$TRAVIS_TAG" == v* ]]; then
export CHANNEL="main"
else
export CHANNEL="dev"
fi
echo "Uploading to $CHANNEL"
anaconda -t $ANACONDA_TOKEN upload --force --user csdms --channel $CHANNEL $HOME/miniconda/conda-bld/**/basic-modeling-interface*bz2
echo "Done."
|
<gh_stars>0
/********************************************************************************
Copyright (C) 2013 <NAME> <<EMAIL>>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
********************************************************************************/
#include "SettingsPane.h"
//============================================================================
// SettingsPane class
SettingsPane::SettingsPane()
: hwnd(NULL)
{
}
SettingsPane::~SettingsPane()
{
}
/**
* Should be called whenever the user modifies a setting that has not been saved to disk.
*/
void SettingsPane::SetChangedSettings(bool isModified)
{
API->SetChangedSettings(isModified);
}
void SettingsPane::SetAbortApplySettings(bool abort)
{
API->SetAbortApplySettings(abort);
}
void SettingsPane::SetCanOptimizeSettings(bool canOptimize)
{
if (HasDefaults() && canOptimize)
AppWarning(L"Defaults button hidden by optimize button in %s", GetCategory());
API->SetCanOptimizeSettings(canOptimize);
}
void SettingsPane::OptimizeSettings()
{
}
INT_PTR CALLBACK SettingsPane::DialogProc(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam)
{
// Get the pointer to our class instance
SettingsPane *instance = (SettingsPane *)GetWindowLongPtr(hwnd, DWLP_USER);
// Initialize pane if this is the first message
if(message == WM_INITDIALOG)
{
SetWindowLongPtr(hwnd, DWLP_USER, (LONG_PTR)lParam);
instance = (SettingsPane *)lParam;
instance->hwnd = hwnd; // Not sure which processes first, this message or returning from CreateWindow()
instance->ProcMessage(message, wParam, lParam);
return TRUE; // Always return true
}
// Forward message to pane
if(instance)
return instance->ProcMessage(message, wParam, lParam);
return FALSE;
}
bool SettingsPane::HasDefaults() const
{
return false;
}
void SettingsPane::SetDefaults()
{
}
|
#!/bin/bash
#colors vars
LBLUE='\033[1;34m'
LRED='\033[1;31m'
LGREEN='\033[1;32m'
RED='\033[0;31m'
YELLOW='\033[1;33m'
NONE='\033[0m'
PURPLE='\033[1;35m'
CYAN='\033[0;36m'
GREEN='\033[0;32m'
#
var=1000
end(){
echo -e "\n${NONE}[${LGREEN}${int}${NONE}] Selected"
read -r -p "$(tput setaf 7)Press Enter to Continue.."
rm -rf wifiifaces.csv
echo "$int" > int.txt
}
notvalid(){
clear
unset $int
echo -e "${LRED}Your selection was not valid"
int="An Error has occured with your Interface"
sleep 3
end
}
valid(){
rm -rf var.txt
clear
sleep 0.5
end
}
inputvalidation(){
if echo $int | grep -E '[ "]' >/dev/null
then
notvalid
else
valid
fi
}
output(){
unset $int
clear
echo -e "Select an Interface:\n"
iwconfig 2>&1 | grep -oP "^\w+" > wifiifaces.csv
##
cut -d "," -f 14 wifiifaces.csv | nl -n ln -w 6
while [ ${var} -gt "$(wc -l wifiifaces.csv | cut -d " " -f 1)" ] || [ ${var}} -lt 1 ]; do
echo -e "\nSelect a Network"
read -r -p "$(tput setaf 7)" var
done
int=$(sed -n "${var}p" < wifiifaces.csv | cut -d "," -f 14 )
rm -rf wifiifaces.csv
clear
}
output
inputvalidation
|
#!/bin/bash
set -euo pipefail
TMP_DIR=$(mktemp -d /tmp/bsp-layout-install.XXXXX);
## Clone to local directory
if [[ ! "$1" == "local" ]]; then
git clone https://github.com/phenax/bsp-layout.git $TMP_DIR;
cd $TMP_DIR;
fi
sudo make install || exit 1;
# Check for dependencies
for dep in bc bspc; do
!(which $dep >/dev/null 2>&1) && echo "[Missing dependency] bsp-layout needs $dep installed";
done;
rm -rf "$TMP_DIR";
exit 0;
|
#!/usr/bin/env bash
API_KEY=$(node test/end-to-end/commands/get-controller-api-key.js)
echo "CONTROLLER API KEY--------------> ${API_KEY}"
node test/end-to-end/fixtures/${fixture}/server.js --name=${service_name} --hostName=${service_host_name} --port=${service_port} --path=${domapic_path} --controller=http://${controller_host_name}:3000 --auth=false --logLevel=debug --save
|
#!/bin/sh
# ref: http://apr.apache.org/download.cgi?Preferred=http%3A%2F%2Fftp.twaren.net%2FUnix%2FWeb%2Fapache%2F
URL_APR_UTIL='http://ftp.twaren.net/Unix/Web/apache//apr/apr-util-1.6.1.tar.gz'
BASENAME='/usr/bin/basename'
MAKE='/usr/bin/make'
RM='/bin/rm'
SED='/bin/sed'
TAR='/bin/tar'
WGET='/usr/bin/wget'
# ----------------------------------------------------------------------
FILE_APR_UTIL=`${BASENAME} ${URL_APR_UTIL}`
DIR_APR_UTIL=`echo -n ${FILE_APR_UTIL} | ${SED} 's/\.tar\.gz//g'`
cd /tmp
# get source tarball
if [ ! -f "${FILE_APR_UTIL}" ]; then
${WGET} -4 ${URL_APR_UTIL}
if [ ! -f "${FILE_APR_UTIL}" ]; then
echo "Sorry, can't get ${FILE_APR_UTIL} for install apr-util now."
exit
fi
fi
# pre-check apr
if [ ! -d '/service/apr' -o ! -f '/service/apr/bin/apr-1-config' ]; then
echo "Sorry, please install apr first."
exit
fi
# ----------------------------------------------------------------------
# pre-install libs
if [ ! -f '/usr/share/doc/libssl-dev/copyright' ]; then
/usr/bin/apt-get -y install libssl-dev
fi
# ----------------------------------------------------------------------
# remove old directory
if [ -d "${DIR_APR_UTIL}" ]; then
${RM} -rf ${DIR_APR_UTIL}
fi
# unpack source tarball
${TAR} xzvf ${FILE_APR_UTIL}
# ----------------------------------------------------------------------
# build and install
cd ${DIR_APR_UTIL}
./configure \
--prefix=/service/apr-util \
--with-crypto \
--with-apr=/service/apr
if [ ! -f 'Makefile' -o ! -f 'test/Makefile' ]; then
echo 'Sorry, error occurs before.'
exit
fi
${MAKE}
if [ ! -f 'apu-config.out' ]; then
echo 'Sorry, error occurs before.'
exit
fi
${MAKE} install
if [ ! -f '/service/apr-util/bin/apu-1-config' ]; then
echo 'Sorry, error occurs before.'
exit
fi
|
set -e
source "${BASH_SOURCE%/*}/utils/exists.sh"
found=$(commandExists "git")
if [ "$found" -eq 0 ]; then
echo "git is not available"
exit
else
current="\*"
master="master"
echo "Looking for local merged branches..."
filtered=$(git branch --merged | egrep -v "(^$current|$master)" || true)
if [ -z "$filtered" ]
then
echo "Did not find any local merged branches."
else
echo "Cleaning local merged branches: $filtered"
git branch -d $filtered
fi
echo "Done"
echo "Pruning remote branches"
git fetch --prune
echo "Done"
fi
|
cd openwrt/bin/targets/*/*
mkdir ssrp
# plugin list in ../../../packages/mipsel_24kc
pkglist="base/shadowsocksr-libev-alt_*.ipk base/pdnsd-alt_*.ipk base/microsocks_*.ipk base/dns2socks_*.ipk base/shadowsocksr-libev-ssr-local_*.ipk base/simple-obfs_*.ipk base/tcping_*.ipk base/v2ray-plugin_*.ipk base/xray_*.ipk base/trojan_*.ipk base/ipt2socks_*.ipk base/redsocks2_*.ipk base/luci-app-ssr-plus_*.ipk base/luci-i18n-ssr-plus-zh-cn_*.ipk luci/luci-compat_*.ipk base/xray-core_*.ipk gli_pub/shadowsocks-libev-ss-*.ipk"
for pkg in $pkglist
do
file=../../../packages/mipsel_24kc/$pkg
ls=`ls $file 2>/dev/null`
if [ -z $ls ]
then
echo "$pkg does not exists."
else
echo "Copying $pkg to ssrp..."
cp -f $file ./ssrp/
fi
done
cat << EOF > ./ssrp/install-ssrp.sh
opkg update
opkg install luci luci-i18n-base-zh-cn ttyd luci-app-ttyd luci-i18n-ttyd-zh-cn luci-compat luci-lib-ipkg wget htop
opkg install ./shadowsocksr-libev-alt_*.ipk
opkg install ./pdnsd-alt_*.ipk
opkg install ./microsocks_*.ipk
opkg install ./dns2socks_*.ipk
opkg install ./shadowsocksr-libev-ssr-local_*.ipk
opkg install ./simple-obfs_*.ipk
opkg install ./tcping_*.ipk
opkg install ./v2ray-plugin_*.ipk
opkg install ./ipt2socks_*.ipk
opkg install ./xray-core_*.ipk
opkg install ./shadowsocks-libev-ss-local_*.ipk
opkg install ./shadowsocks-libev-ss-server_*.ipk
opkg install ./shadowsocks-libev-ss-redir_*.ipk
opkg install ./luci-app-ssr-plus_*.ipk
opkg install ./luci-i18n-ssr-plus-zh-cn_*.ipk
EOF
chmod +x ./ssrp/install-ssrp.sh
tar czvf mt1300-ssrp.tar.gz ./ssrp
rm -rf ./ssrp
|
def toUpperCase(str) {
return str.toUpperCase();
}
str = "hello world!";
console.log(toUpperCase(str));
|
<filename>frontend/web/mixer/js/visualizer.js<gh_stars>10-100
// Visualizer stuff here
var analyser1;
var analyserCanvas1;
var rafID = null;
function cancelVisualizerUpdates() {
window.webkitCancelAnimationFrame( rafID );
}
function updateAnalyser( analyserNode, drawContext ) {
var SPACER_WIDTH = 3;
var BAR_WIDTH = 1;
var OFFSET = 100;
var CUTOFF = 23;
var CANVAS_WIDTH = 800;
var CANVAS_HEIGHT = 120;
var numBars = Math.round(CANVAS_WIDTH / SPACER_WIDTH);
var freqByteData = new Uint8Array(analyserNode.frequencyBinCount);
analyserNode.getByteFrequencyData(freqByteData);
drawContext.clearRect(0, 0, CANVAS_WIDTH, CANVAS_HEIGHT);
drawContext.fillStyle = '#F6D565';
drawContext.lineCap = 'round';
var multiplier = analyserNode.frequencyBinCount / numBars;
// Draw rectangle for each frequency bin.
for (var i = 0; i < numBars; ++i) {
var magnitude = 0;
var offset = Math.floor( i * multiplier );
// gotta sum/average the block, or we miss narrow-bandwidth spikes
for (var j = 0; j< multiplier; j++)
magnitude += freqByteData[offset + j];
magnitude = magnitude / multiplier;
var magnitude2 = freqByteData[i * multiplier];
drawContext.fillStyle = "hsl( " + Math.round((i*360)/numBars) + ", 100%, 50%)";
drawContext.fillRect(i * SPACER_WIDTH, CANVAS_HEIGHT, BAR_WIDTH, -magnitude);
}
}
function updateVisualizer(time) {
updateAnalyser( analyser1, analyserCanvas1 );
rafID = window.webkitRequestAnimationFrame( updateVisualizer );
}
var visualizerActive = false;
var visualizerNode = null;
function visualizeDrums(canvasElement) {
if ( visualizerActive ) {
cancelVisualizerUpdates();
visualizerNode.noteOff(0);
visualizerNode = null;
analyser1 = null;
analyserCanvas1 = null;
visualizerActive = false;
return "visualize!";
}
visualizerActive = true;
visualizerNode = audioContext.createBufferSource();
visualizerNode.buffer = drumsBuffer;
visualizerNode.loop = true;
analyser1 = audioContext.createAnalyser();
analyser1.fftSize = 2048;
analyser1.maxDecibels = 0;
analyserCanvas1 = canvasElement.getContext('2d');
visualizerNode.connect( audioContext.destination );
visualizerNode.connect( analyser1 );
visualizerNode.noteOn(0);
updateVisualizer(0);
return "stop!";
}
|
brew --version
brew update
brew list
$ brew install wget
|
<filename>src/maltreatment_nlp/patterns.py
import re
hit_pat = r'hit|attack|struck|beat|punch'
_family = (
r'((his|her|their|(pt|patient|client)\W?s?|mom\W?s?|dad\W?s?)\W*)?'
r'((older|younger|elder|bio|biological|adopted|adoptive)\W*)?'
r'((step|ex)\W*)*'
r'('
r'father|dad|brother|bro|mom|mother|sis|sister|aunt|uncle|relative|parents?'
r'{}|family|care\W?giver'
r'|grandfather|grandpa|grandma|grandmother|cousin|nephew|niece'
r')\b'
)
strict_family = _family.format('') # no husband/wife/bf/gf in this context
# parent possibly has one of these:
family = _family.format(r'|bf|boy\W?friend|girl\W?friend|gf|husband|wife|partner')
by_family = fr'by\W*(an?\W*)?{family}'
from_family = fr'(by|from)\W*(an?\W*)?{family}'
target = r'\b(him|her|them|me|child|son|daughter|pt|patient|client)\b'
ABUSE_PAT = re.compile(
r'('
rf'(emotion|child|physical|sexual|verbal)\w*\W*(abused?|abusive|haras|molest[ei])(\s+\w+){{0,5}}\s+{from_family}'
rf'|{family}(\s+\w+){{0,2}}\s+(emotion|child|physical|sexual|verbal)\w*\W*(abused?|abusive|haras|molest[ei])'
r')',
re.I
)
ABUSIVE_PAT = re.compile(
r'('
rf'abusive\s+({strict_family}|childhood|adolescence|growing\W?up)'
rf'|{strict_family}\s+abusive\s+(emotionally|physically|verbally|sexually)'
r')',
re.I
)
GENERAL_ABUSE_PAT = re.compile(
rf'\b(abus[ei]|molest[ei]|assault)\w*',
re.I
)
PERPETRATOR_PAT = re.compile(
rf'perpetrators?(\s+\w+){{0,5}}\s+{family}',
re.I
)
SIGNS_PAT = re.compile(
r'('
r'document( (his|her))? bruis\w+'
rf'|bruis\w+(\s+\w+){{0,5}}\s+caused\s+{from_family}'
rf'|bruis\w+\s+{from_family}'
rf'|bruis\w+(\s+\w+){{0,5}}\s+(home|back|return\w*|on( (his|her|their))? return)(\s+\w+){{0,5}}\s+{from_family}'
rf'|(home|back|return\w*|on( (his|her|their))? return)(\s+\w+){{0,5}}\s+bruis\w+(\s+\w+){{0,5}}\s+{from_family}'
r')',
re.I
)
FEAR_PAT = re.compile( # not currently used
rf'(fear(ful)?|scared|afraid|intimated)\s+(by|of)\s+{family}',
re.I
)
HISTORY_PAT = re.compile(
rf'('
rf'\b((history|hx|signs|victim)\s+of|h/o)\w*(\s+\w+){{0,5}}\s+'
rf'(abus|rap(e|ing)|maltreat|assault|haras|molest|{hit_pat})\w*' # removed harm: always self-harm
rf')',
re.I
)
SUSPICIOUS_ABUSE_PAT = re.compile(
rf'('
rf'\b(possib|sometime|alleg|forc|disclos|suspici|past)\w*(\s+\w+){{0,5}}\s+'
rf'(abus|maltreat|rap(e|ing)|assault|haras|molest)\w*' # removed hit: too many FPs
rf')',
re.I
)
NEGLECT_PAT = re.compile(
r'(medical|child)\W*neglect',
re.I
)
HITTING_PAT = re.compile(
rf'('
# x hit pt
rf'{family}(\s+(has|had|will|have|did|would|used to|possibly)){{0,5}}'
rf'(\s+\w+\s+({target}\s+)?and)?'
rf'\s+({hit_pat})'
rf'\w*\s*{target}'
# hit by
rf'|\b({hit_pat})\s*{by_family}'
rf')',
re.I
)
CPS_PAT = re.compile(
r'('
r'child\W*protect\w+\W*service'
r'|\bcps\b'
r'|dep(t|artment)(\W+\w+){0,3}\W*(child|family)(\W+\w+){0,3}\W*service'
r'|dcfs\W*report'
r')',
re.I
)
CHILD_MALTREATMENT_PAT = re.compile(
r'child\W*maltreatment',
re.I
)
CODE_PAT = re.compile(
r'('
r'sexual\s+abuse\s+of\s+child\s+or\s+adolescent'
r'|sexual\s+abuse\s+of\s+adolescent'
r'|confirmed\s+victim\s+of\s+sexual\s+abuse\s+in\s+childhood'
r'|sexual\s+abuse\s+victim'
r')',
re.I
)
ALL_PATTERNS = {
'ABUSE_PAT': ABUSE_PAT,
'CODE_PAT': CODE_PAT,
'FEAR_PAT': FEAR_PAT,
'ABUSIVE_PAT': ABUSIVE_PAT,
'NEGLECT_PAT': NEGLECT_PAT,
'HITTING_PAT': HITTING_PAT,
'CPS_PAT': CPS_PAT,
'SUSPICIOUS_ABUSE_PAT': SUSPICIOUS_ABUSE_PAT,
'SIGNS_PAT': SIGNS_PAT,
'GENERAL_ABUSE_PAT': GENERAL_ABUSE_PAT,
'PERPETRATOR_PAT': PERPETRATOR_PAT,
'HISTORY_PAT': HISTORY_PAT,
'CHILD_MALTREATMENT_PAT': CHILD_MALTREATMENT_PAT,
}
ALL = list(ALL_PATTERNS.values())
|
add_lunch_combo aim_kuntao-userdebug
|
<reponame>XiongAmao/webapp-scaffold
/**
* vw 适配方案
* 参考: https://www.j4ml.com/t/27847
*/
const postcssConfig = [
require('postcss-import')(),
/**
* browsersList在package.json中设置即可
*/
require('autoprefixer')(),
/**
* 可以用于绘制固定比例的容器
*/
require('postcss-aspect-ratio-mini')(),
/**
* 解决移动端1px线变粗的问题, 不支持圆角, 圆角可以用transform和伪类实现
*/
require('postcss-write-svg')({
utf8: false
}),
/**
* viewportWidth: 375, // 设计图尺寸
* unitPrecision: 3, // 指定`px`转换为视窗单位值的小数位数
* selectorBlackList: [], // 需要匹配某一些类或者标签不进行vw的转换,可添加多个类
* minPixelValue: 1 // 小于或等于`1px`不转换为视窗单位
*/
require('postcss-px-to-viewport')({
viewportWidth: 375,
unitPrecision: 3,
selectorBlackList: ['.ignore'],
minPixelValue: 1
})
]
module.exports = postcssConfig
|
#!/bin/bash
build() {
export INLINE_RUNTIME_CHUNK=false
export GENERATE_SOURCEMAP=false
yarn build
}
build
|
<filename>player/src/main/java/fr/unice/polytech/si3/qgl/soyouz/classes/marineland/entities/Reef.java<gh_stars>0
package fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities;
public class Reef extends ShapedEntity implements Entity, Collidable
{
@Override
public boolean equals(Object obj)
{
return super.equals(obj) && obj instanceof Reef;
}
@Override
public int hashCode()
{
return super.hashCode();
}
}
|
<reponame>eshork/go-mongoid
package mongoid
import (
"mongoid/log"
"reflect"
"go.mongodb.org/mongo-driver/bson"
)
// makeDocument creates a new object of type docType, populated with the given srcDoc
func makeDocument(docType *ModelType, srcDoc bson.M) IDocumentBase {
log.Trace("makeDocument()")
typeRef := reflect.Indirect(reflect.ValueOf(docType.rootTypeRef)) // model.rootTypeRef is always a ptr to an example object, so we need to use Indirect()
ret := reflect.New(typeRef.Type()) // finally have a solid object type, so make one
retAsIDocumentBase := ret.Interface().(IDocumentBase) // convert into a IDocumentBase interface
retAsIDocumentBase.initDocumentBase(retAsIDocumentBase, srcDoc) // call the self init
return retAsIDocumentBase
}
// initDocumentBase configures this IDocumentBase with a self-reference and an initial state
// Self-reference is used to :
// - store the original object-type
// - store a copy of the initial object values for future change tracking
func (d *Base) initDocumentBase(selfRef IDocumentBase, initialBSON BsonDocument) {
d.rootTypeRef = selfRef
if d.rootTypeRef == nil {
panic("cannot initDocumentBase without a valid selfRef handle")
}
if initialBSON != nil {
structValuesFromBsonM(selfRef, initialBSON)
// benefits to using d.setPreviousValueBSON instead of d.refreshPreviousValueBSON here:
// - skip a call to ToBson(), since we already have a BSON formatted representation of the desired state (ie, faster)
// - tests have more opportunity to uncover issues with to/from bson converters and the value initialization code
d.setPreviousValueBSON(initialBSON)
}
}
|
<reponame>warlock2207/warlock
/**
* Copyright 2018-2020 stylefeng & fengshuonan (https://gitee.com/stylefeng)
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.company.warlock.modular.userHall.contorller;
import cn.hutool.core.collection.CollectionUtil;
import cn.stylefeng.roses.core.base.controller.BaseController;
import cn.stylefeng.roses.core.reqres.response.ResponseData;
import cn.stylefeng.roses.core.util.ToolUtil;
import cn.stylefeng.roses.kernel.model.exception.RequestEmptyException;
import cn.stylefeng.roses.kernel.model.exception.ServiceException;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.company.warlock.config.properties.GunsProperties;
import com.company.warlock.core.common.annotion.BussinessLog;
import com.company.warlock.core.common.annotion.Permission;
import com.company.warlock.core.common.constant.Const;
import com.company.warlock.core.common.constant.dictmap.UserDict;
import com.company.warlock.core.common.constant.factory.ConstantFactory;
import com.company.warlock.core.common.exception.BizExceptionEnum;
import com.company.warlock.core.common.page.LayuiPageFactory;
import com.company.warlock.core.log.LogObjectHolder;
import com.company.warlock.core.shiro.ShiroKit;
import com.company.warlock.core.util.Convert;
import com.company.warlock.modular.system.entity.Dict;
import com.company.warlock.modular.system.entity.User;
import com.company.warlock.modular.system.entity.UserDock;
import com.company.warlock.modular.system.factory.UserFactory;
import com.company.warlock.modular.system.model.UserDto;
import com.company.warlock.modular.system.service.UserService;
import com.company.warlock.modular.system.warpper.UserWrapper;
import com.company.warlock.modular.userHall.service.UserDockService;
import com.company.warlock.modular.userHall.service.UserLevelService;
import com.company.warlock.modular.userHall.wapper.UserDockWrapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
import javax.validation.Valid;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* 系统管理员控制器
*
* @author fengshuonan
* @Date 2017年1月11日 下午1:08:17
*/
@Controller
@RequestMapping("/userLevel")
public class UserLevelController extends BaseController {
private static String PREFIX = "/modular/userLevel/";
@Autowired
private GunsProperties gunsProperties;
@Autowired
private UserService userService;
@Autowired
private UserLevelService userLevelService;
/**
* 跳转到查看子用户列表页面
*
*/
@RequestMapping("")
public String index() {
//获取当前用户等级
Long userId= ShiroKit.getUser().getId();
Map map= userLevelService.selectUserlevel(userId);
String level="2";
String nextLevel="3";
String nextLevelNum="3";
if(map==null){
//没有查询到则为没有父级 为第一级别用户 显示二级添加用户
}else{
level= map.get("USER_LEVEL").toString();
nextLevel=Long.toString(Long.valueOf(level)+1);
nextLevelNum=nextLevel;
}
level = Convert.cvt(Long.valueOf(level),false);
nextLevel=Convert.cvt(Long.valueOf(nextLevel),false);
super.setAttr("level",level);
super.setAttr("nextLevel",nextLevel);
super.setAttr("nextLevelNum",nextLevelNum);
return PREFIX + "userLevel.html";
}
/**
* 跳转到查看子用户列表页面
*
*/
@RequestMapping("/nextUser")
public String nextUser(@RequestParam(required = false) Long userId) {
//获取当前用户等级
//Long userId= ShiroKit.getUser().getId();
Map map= userLevelService.selectUserlevel(userId);
String level="2";
String nextLevel="3";
String nextLevelNum="3";
if(map==null){
//没有查询到则为没有父级 为第一级别用户 显示二级添加用户
}else{
level= map.get("USER_LEVEL").toString();
nextLevel=Long.toString(Long.valueOf(level)+1);
nextLevelNum=nextLevel;
}
level = Convert.cvt(Long.valueOf(level),false);
nextLevel=Convert.cvt(Long.valueOf(nextLevel),false);
super.setAttr("level",level);
super.setAttr("nextLevel",nextLevel);
super.setAttr("nextLevelNum",nextLevelNum);
super.setAttr("userId",Long.toString(userId));
return PREFIX + "nextUserLevel.html";
}
@RequestMapping("/backLevel")
public String backLevel(@RequestParam(required = false) Long userId) {
//获取当前用户等级
//Long userId= ShiroKit.getUser().getId();
Map map= userLevelService.selectUserlevel(userId);
String level="2";
String nextLevel="3";
String nextLevelNum="3";
String parentId="";
if(map==null){
return index();
//没有查询到则为没有父级 为第一级别用户 显示二级添加用户
}else{
parentId= map.get("PARENT_ID").toString();
Map map2= userLevelService.selectUserlevel(Long.valueOf(parentId));
if(map2==null){
return index();
}
level= map2.get("USER_LEVEL").toString();
nextLevel=Long.toString(Long.valueOf(level)+1);
nextLevelNum=nextLevel;
}
level = Convert.cvt(Long.valueOf(level),false);
nextLevel=Convert.cvt(Long.valueOf(nextLevel),false);
super.setAttr("level",level);
super.setAttr("nextLevel",nextLevel);
super.setAttr("nextLevelNum",nextLevelNum);
super.setAttr("userId",parentId);
return PREFIX + "nextUserLevel.html";
}
/**
* 查询管理员列表
*
*
*/
@RequestMapping("/list")
@Permission
@ResponseBody
public Object list(@RequestParam(required = false) String name,
@RequestParam(required = false) String beginTime,
@RequestParam(required = false) String endTime,
@RequestParam(required = false) String userId) {
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
if(ToolUtil.isOneEmpty(beginTime,endTime)){
Date date=new Date();
beginTime =format.format(date);
endTime=beginTime;
}
beginTime+="00:00:00";
endTime+="23:59:59";
//只查询角色为接收任务类型的人
//loginUserId
Long loginUserId=null;
if(ToolUtil.isEmpty(userId)||"undefined".equals(userId)){
loginUserId=ShiroKit.getUser().getId();
}else{
loginUserId=Long.valueOf(userId);
}
if (ToolUtil.isEmpty(loginUserId)) {
throw new ServiceException(BizExceptionEnum.REQUEST_NULL);
}
Page<Map<String, Object>> users = userLevelService.selectLevelUsers(null, name, loginUserId,beginTime,endTime);
List<Map<String, Object>> list= users.getRecords();
for (int i = 0; i <list.size() ; i++) {
Map<String, Object> map =list.get(i);
String sonUserId= map.get("userId").toString();
Map<String, Object> taskMap= userLevelService.selectTaskCount(sonUserId,beginTime,endTime);
String taskFinish="0";
String userFinish="0";
String taskOvertime="0";
String taskNum="0";
if(taskMap.get("1")!=null){
taskFinish=taskMap.get("1").toString();
}
if(taskMap.get("2")!=null){
userFinish=taskMap.get("2").toString();
}
if(taskMap.get("3")!=null){
taskOvertime=taskMap.get("3").toString();
}
if(taskMap.get("6")!=null){
taskNum=taskMap.get("6").toString();
}
map.put("TASK_FINISH",taskFinish);
map.put("USER_FINISH",userFinish);
map.put("TASK_OVERTIME",taskOvertime);
map.put("TASKNUM",taskNum);
}
Page wrapped = new UserWrapper(users).wrap();
return LayuiPageFactory.createPageInfo(wrapped);
}
/**
* 跳转到查看管理员列表的页面
*
* @author fengshuonan
* @Date 2018/12/24 22:43
*/
@RequestMapping("/user_add")
public String addView(@RequestParam("nextLevel") String level, Model model) {
super.setAttr("level",level);
super.setAttr("intlevel",Integer.parseInt(level));
return PREFIX + "user_add.html";
}
/**
* 跳转到编辑管理员页面
*
* @author fengshuonan
* @Date 2018/12/24 22:43
*/
@Permission
@RequestMapping("/user_edit")
public String userEdit(@RequestParam Long userId) {
if (ToolUtil.isEmpty(userId)) {
throw new ServiceException(BizExceptionEnum.REQUEST_NULL);
}
User user = this.userService.getById(userId);
LogObjectHolder.me().set(user);
return PREFIX + "user_edit.html";
}
/**
* 获取用户详情
*
* @author fengshuonan
* @Date 2018/12/24 22:43
*/
@RequestMapping("/getUserInfo")
@ResponseBody
public Object getUserInfo(@RequestParam Long userId) {
if (ToolUtil.isEmpty(userId)) {
throw new RequestEmptyException();
}
this.userService.assertAuthSon(userId);
User user = this.userService.getById(userId);
Map<String, Object> map = UserFactory.removeUnSafeFields(user);
HashMap<Object, Object> hashMap = CollectionUtil.newHashMap();
hashMap.putAll(map);
hashMap.put("roleName", ConstantFactory.me().getRoleName(user.getRoleId()));
hashMap.put("deptName", ConstantFactory.me().getDeptName(user.getDeptId()));
return ResponseData.success(hashMap);
}
/**
* 添加管理员
*
* @author fengshuonan
* @Date 2018/12/24 22:44
*/
@RequestMapping("/add")
@BussinessLog(value = "添加用户", key = "account", dict = UserDict.class)
@ResponseBody
public ResponseData add(@Valid UserDto user, BindingResult result,String level) {
if (result.hasErrors()) {
throw new ServiceException(BizExceptionEnum.REQUEST_NULL);
}
//判断level大于等于3设置地区为当前用户地区
if(Integer.parseInt(level)>3){
//查询当前用户地区
User u= userService.getById(ShiroKit.getUser().getId());
user.setRegion(u.getRegion());
}
this.userLevelService.addUser(user,level);
return SUCCESS_TIP;
}
/*
*//**
* 修改管理员
*
* @author fengshuonan
* @Date 2018/12/24 22:44
*/
@RequestMapping("/edit")
@BussinessLog(value = "修改管理员", key = "account", dict = UserDict.class)
@ResponseBody
public ResponseData edit(@Valid UserDto user, BindingResult result) {
if (result.hasErrors()) {
throw new ServiceException(BizExceptionEnum.REQUEST_NULL);
}
this.userService.editSonUser(user);
return SUCCESS_TIP;
}
/**
* 删除管理员(逻辑删除)
*
* @author fengshuonan
* @Date 2018/12/24 22:44
*//*
@RequestMapping("/delete")
@BussinessLog(value = "删除管理员", key = "userId", dict = UserDict.class)
@Permission
@ResponseBody
public ResponseData delete(@RequestParam Long userId) {
if (ToolUtil.isEmpty(userId)) {
throw new ServiceException(BizExceptionEnum.REQUEST_NULL);
}
this.userService.deleteUser(userId);
return SUCCESS_TIP;
}*/
@ResponseBody
@RequestMapping(value = "/selectServer" )
public JSONObject selectServer(HttpServletRequest request){
List<Dict> list= ConstantFactory.me().getALlRegion();
JSONObject ServerJson =new JSONObject();
for (int i = 0; i < list.size(); i++) {
Dict dict=list.get(i);
ServerJson.put(dict.getCode(),dict.getName());
}
return ServerJson;
}
}
|
sudo mount -o remount,rw / && sudo sed -i 's/NODE_OPTIONS=--max_old_space_size=512/NODE_OPTIONS=--max_old_space_size=256/g' /lib/systemd/system/nodered.service
sudo systemctl daemon-reload
|
import { getModelForClass, mapProp, prop } from '../../src/typegoose';
export class SideNote {
@prop()
public content: string;
@prop()
public link?: string;
}
enum ProjectValue {
WORKING = 'working',
UNDERDEVELOPMENT = 'underdevelopment',
BROKEN = 'broken'
}
class InternetUser {
@mapProp({ of: String, default: {} })
public socialNetworks?: Map<string, string>;
@mapProp({ of: SideNote })
public sideNotes?: Map<string, SideNote>;
@mapProp({ of: String, enum: ProjectValue })
public projects: Map<string, ProjectValue>;
}
export const model = getModelForClass(InternetUser);
|
import pandas as pd
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.model_selection import train_test_split
from sklearn.ensemble import RandomForestClassifier
# Read the data
df = pd.read_csv('customer_reviews.csv')
# Preprocess the data
X = df['review']
y = df['label']
# Split into Training and Testing Sets
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=42
)
# Create the features matrix
vect = CountVectorizer()
X_train_dtm = vect.fit_transform(X_train)
X_test_dtm = vect.transform(X_test)
# Train the model
clf = RandomForestClassifier(n_estimators=100)
clf.fit(X_train_dtm, y_train)
# Score the model
score = clf.score(X_test_dtm, y_test)
print('Model accuracy: %f' % score)
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.hadoop.rdf.io.input.readers;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.jena.hadoop.rdf.io.RdfIOConstants;
import org.apache.jena.hadoop.rdf.io.input.util.RdfIOUtils;
import org.apache.jena.hadoop.rdf.io.input.util.TrackableInputStream;
import org.apache.jena.hadoop.rdf.io.input.util.TrackedInputStream;
import org.apache.jena.hadoop.rdf.io.input.util.TrackedPipedRDFStream;
import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
import org.apache.jena.riot.Lang;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.riot.ReaderRIOT;
import org.apache.jena.riot.lang.PipedRDFIterator;
import org.apache.jena.riot.lang.PipedRDFStream;
import org.apache.jena.riot.system.ParserProfile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An abstract implementation for a record reader that reads records from whole
* files i.e. the whole file must be kept together to allow tuples to be
* successfully read. This only supports reading from file splits currently.
* <p>
* The keys produced are the approximate position in the file at which a tuple
* was found and the values will be node tuples. Positions are approximate
* because they are recorded after the point at which the most recent tuple was
* parsed from the input thus they reflect the approximate position in the
* stream immediately after which the triple was found.
* </p>
* <p>
* You should also be aware that with whole file formats syntax compressions in
* the format may mean that there are multiple triples produced with the same
* position and thus key.
* </p>
*
*
*
* @param <TValue>
* Value type
* @param <T>
* Tuple type
*/
public abstract class AbstractWholeFileNodeTupleReader<TValue, T extends AbstractNodeTupleWritable<TValue>> extends RecordReader<LongWritable, T> {
private static final Logger LOG = LoggerFactory.getLogger(AbstractLineBasedNodeTupleReader.class);
private CompressionCodec compressionCodecs;
private TrackedInputStream input;
private LongWritable key;
private long length;
private T tuple;
private TrackedPipedRDFStream<TValue> stream;
private PipedRDFIterator<TValue> iter;
private Thread parserThread;
private boolean finished = false;
private boolean ignoreBadTuples = true;
private boolean parserFinished = false;
private Throwable parserError = null;
@Override
public void initialize(InputSplit genericSplit, TaskAttemptContext context) throws IOException {
LOG.debug("initialize({}, {})", genericSplit, context);
// Assuming file split
if (!(genericSplit instanceof FileSplit))
throw new IOException("This record reader only supports FileSplit inputs");
FileSplit split = (FileSplit) genericSplit;
// Configuration
Configuration config = context.getConfiguration();
this.ignoreBadTuples = config.getBoolean(RdfIOConstants.INPUT_IGNORE_BAD_TUPLES, true);
if (this.ignoreBadTuples)
LOG.warn(
"Configured to ignore bad tuples, parsing errors will be logged and further parsing aborted but no user visible errors will be thrown. Consider setting {} to false to disable this behaviour",
RdfIOConstants.INPUT_IGNORE_BAD_TUPLES);
// Figure out what portion of the file to read
if (split.getStart() > 0)
throw new IOException("This record reader requires a file split which covers the entire file");
final Path file = split.getPath();
long totalLength = file.getFileSystem(context.getConfiguration()).getFileStatus(file).getLen();
CompressionCodecFactory factory = new CompressionCodecFactory(config);
this.compressionCodecs = factory.getCodec(file);
LOG.info(String.format("Got split with start %d and length %d for file with total length of %d", new Object[] { split.getStart(), split.getLength(),
totalLength }));
if (totalLength > split.getLength())
throw new IOException("This record reader requires a file split which covers the entire file");
// Open the file and prepare the input stream
FileSystem fs = file.getFileSystem(config);
FSDataInputStream fileIn = fs.open(file);
this.length = split.getLength();
if (this.compressionCodecs != null) {
// Compressed input
input = new TrackedInputStream(this.compressionCodecs.createInputStream(fileIn));
} else {
// Uncompressed input
input = new TrackedInputStream(fileIn);
}
// Set up background thread for parser
iter = this.getPipedIterator();
this.stream = this.getPipedStream(iter, this.input);
ParserProfile profile = RdfIOUtils.createParserProfile(context, file);
Runnable parserRunnable = this.createRunnable(this, this.input, stream, this.getRdfLanguage(), profile);
this.parserThread = new Thread(parserRunnable);
this.parserThread.setDaemon(true);
this.parserThread.start();
}
/**
* Gets the RDF iterator to use
*
* @return Iterator
*/
protected abstract PipedRDFIterator<TValue> getPipedIterator();
/**
* Gets the RDF stream to parse to
*
* @param iterator
* Iterator
* @return RDF stream
*/
protected abstract TrackedPipedRDFStream<TValue> getPipedStream(PipedRDFIterator<TValue> iterator, TrackableInputStream input);
/**
* Gets the RDF language to use for parsing
*
* @return
*/
protected abstract Lang getRdfLanguage();
/**
* Creates the runnable upon which the parsing will run
*
* @param input
* Input
* @param stream
* Stream
* @param lang
* Language to use for parsing
* @return Parser runnable
*/
private Runnable createRunnable(@SuppressWarnings("rawtypes") final AbstractWholeFileNodeTupleReader reader, final InputStream input,
final PipedRDFStream<TValue> stream, final Lang lang, final ParserProfile profile) {
return new Runnable() {
@Override
public void run() {
try {
ReaderRIOT riotReader = RDFDataMgr.createReader(lang);
riotReader.setParserProfile(profile);
riotReader.read(input, null, lang.getContentType(), stream, null);
reader.setParserFinished(null);
} catch (Throwable e) {
reader.setParserFinished(e);
}
}
};
}
/**
* Sets the parser thread finished state
*
* @param e
* Error (if any)
*/
private void setParserFinished(Throwable e) {
synchronized (this.parserThread) {
this.parserError = e;
this.parserFinished = true;
}
}
/**
* Waits for the parser thread to have reported as finished
*
* @throws InterruptedException
*/
private void waitForParserFinished() throws InterruptedException {
do {
synchronized (this.parserThread) {
if (this.parserFinished)
return;
}
Thread.sleep(50);
} while (true);
}
/**
* Creates an instance of a writable tuple from the given tuple value
*
* @param tuple
* Tuple value
* @return Writable tuple
*/
protected abstract T createInstance(TValue tuple);
@Override
public boolean nextKeyValue() throws IOException {
// Reuse key for efficiency
if (key == null) {
key = new LongWritable();
}
if (this.finished)
return false;
try {
if (this.iter.hasNext()) {
Long l = this.stream.getPosition();
if (l != null) {
this.key.set(l);
// For compressed input the actual length from which we
// calculate progress is likely less than the actual
// uncompressed length so we may need to increment the
// length as we go along
// We always add 1 more than the current length because we
// don't want to report 100% progress until we really have
// finished
if (this.compressionCodecs != null && l > this.length)
this.length = l + 1;
}
this.tuple = this.createInstance(this.iter.next());
return true;
} else {
// Need to ensure that the parser thread has finished in order
// to determine whether we finished without error
this.waitForParserFinished();
if (this.parserError != null) {
LOG.error("Error parsing whole file, aborting further parsing", this.parserError);
if (!this.ignoreBadTuples)
throw new IOException("Error parsing whole file at position " + this.input.getBytesRead() + ", aborting further parsing",
this.parserError);
}
this.key = null;
this.tuple = null;
this.finished = true;
// This is necessary so that when compressed input is used we
// report 100% progress once we've reached the genuine end of
// the stream
if (this.compressionCodecs != null)
this.length--;
return false;
}
} catch (Throwable e) {
// Failed to read the tuple on this line
LOG.error("Error parsing whole file, aborting further parsing", e);
if (!this.ignoreBadTuples) {
this.iter.close();
throw new IOException("Error parsing whole file at position " + this.input.getBytesRead() + ", aborting further parsing", e);
}
this.key = null;
this.tuple = null;
this.finished = true;
return false;
}
}
@Override
public LongWritable getCurrentKey() {
return this.key;
}
@Override
public T getCurrentValue() {
return this.tuple;
}
@Override
public float getProgress() {
float progress = 0.0f;
if (this.key == null) {
// We've either not started or we've finished
progress = (this.finished ? 1.0f : 0.0f);
} else if (this.key.get() == Long.MIN_VALUE) {
// We don't have a position so we've either in-progress or finished
progress = (this.finished ? 1.0f : 0.5f);
} else {
// We're some way through the file
progress = this.key.get() / (float) this.length;
}
LOG.debug("getProgress() --> {}", progress);
return progress;
}
@Override
public void close() throws IOException {
this.iter.close();
this.input.close();
this.finished = true;
}
}
|
#!/bin/bash
BASEDIR=$PWD
CPPOPTS="-fPIC -std=c++11 -Wall -I /usr/local/include"
LDOPTS="-L /usr/local/lib"
cd thrift
thrift -gen erl -gen cpp tproxy.thrift
cd $BASEDIR
cd tproxy
if [ -e ./rel/tproxy/bin/tproxy ]; then
./rel/tproxy/bin/tproxy stop
fi
rm -rf rel
mkdir -p rel
./rebar compile
cd rel
../rebar create-node nodeid=tproxy
cp ../reltool.config .
../rebar generate && ./tproxy/bin/tproxy start
cd $BASEDIR
CPP_FILES="client.cpp gen-cpp/tproxy_constants.cpp gen-cpp/tproxy.cpp gen-cpp/tproxy_types.cpp"
cd client
for f in $CPP_FILES ; do
g++ $CPPOPTS -c $f
done
g++ $LDOPTS -o client client.o tproxy*.o -lthrift
rm *.o
cd $BASEDIR
SV_CPP_FILES="server.cpp gen-cpp/tproxy_constants.cpp gen-cpp/tproxy.cpp gen-cpp/tproxy_types.cpp"
cd server
for f in $SV_CPP_FILES ; do
g++ $CPPOPTS -c $f
done
g++ $LDOPTS -o server server.o tproxy*.o -lthrift
rm *.o
cd $BASEDIR
|
#!/bin/bash
#
# Adopted from https://github.com/tmcdonell/travis-scripts/blob/dfaac280ac2082cd6bcaba3217428347899f2975/update-accelerate-buildbot.sh
set -e
if [ "$BUILD_LIBCUML" == "1" ]; then
CUDA_REL=${CUDA_VERSION%.*}
export UPLOADFILE=`conda build conda/recipes/libcuml -c conda-forge -c numba -c conda-forge/label/rc_ucx -c nvidia -c rapidsai -c pytorch -c defaults --python=${PYTHON} --output`
LABEL_OPTION="--label main"
echo "LABEL_OPTION=${LABEL_OPTION}"
test -e ${UPLOADFILE}
# Restrict uploads to master branch
if [ ${BUILD_MODE} != "branch" ]; then
echo "Skipping upload"
return 0
fi
if [ -z "$MY_UPLOAD_KEY" ]; then
echo "No upload key"
return 0
fi
echo "Upload"
echo ${UPLOADFILE}
anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-rapidsai} ${LABEL_OPTION} --skip-existing ${UPLOADFILE}
fi
|
<gh_stars>0
package util
import (
"encoding/json"
"path/filepath"
"github.com/flosch/pongo2"
log "github.com/sirupsen/logrus"
"github.com/spf13/viper"
)
var (
inited bool
loader *pongo2.LocalFilesystemLoader
pageSet *pongo2.TemplateSet
)
type TemplateContext map[string]interface{}
func RenderHTMLTemplate(template string, context map[string]interface{}) (output []byte, renderErr error) {
clientParams, renderErr := json.Marshal(context)
if renderErr != nil {
return
}
context["json_params"] = string(clientParams)
return RenderTemplate("content", template, context)
}
func RenderEmailTemplate(template string, context map[string]interface{}) (subjectOutput, plainOutput, htmlOutput []byte, renderErr error) {
subjectPath := filepath.Join(template, "subject")
plainPath := filepath.Join(template, "plain")
htmlPath := filepath.Join(template, "html")
subjectOutput, renderErr = RenderTemplate("email", subjectPath, context)
if renderErr != nil {
return
}
plainOutput, renderErr = RenderTemplate("email", plainPath, context)
if renderErr != nil {
return
}
htmlOutput, renderErr = RenderTemplate("email", htmlPath, context)
return
}
func RenderTemplate(style, template string, context map[string]interface{}) (output []byte, renderErr error) {
templatePath := filepath.Join(viper.GetString("template.path"), style, template)
ensureTemplates()
templateBody, templateError := pageSet.FromCache(templatePath)
if templateError != nil {
renderErr = templateError
return
}
output, renderErr = templateBody.ExecuteBytes(context)
if renderErr != nil {
log.Error(renderErr)
}
return
}
func ensureTemplates() {
if !inited {
loader = pongo2.MustNewLocalFileSystemLoader(viper.GetString("template.path"))
pageSet = pongo2.NewSet("KarmaChameleon", loader)
pageSet.Debug = viper.GetBool("template.debug")
for index, element := range viper.GetStringMap("template.globals") {
pageSet.Globals[index] = element
}
inited = true
}
}
|
#!/usr/bin/env bats
@test "Validate status code for varnish" {
run curl -s -o /dev/null -w "%{http_code}" localhost:6081
[[ $output = "503" ]]
}
|
#! /bin/sh
#
# Copyright (c) 2004-2006 The Trustees of Indiana University and Indiana
# University Research and Technology
# Corporation. All rights reserved.
# Copyright (c) 2004-2005 The Regents of the University of California.
# All rights reserved.
# Copyright (c) 2006-2012 Cisco Systems, Inc. All rights reserved.
# $COPYRIGHT$
#
# Additional copyrights may follow
#
# $HEADER$
#
#
# This file generates a Fortran code to bridge between an explicit F90
# generic interface and the F77 implementation.
#
# This file is automatically generated by either of the scripts
# ../xml/create_mpi_f90_medium.f90.sh or
# ../xml/create_mpi_f90_large.f90.sh
#
. "$1/fortran_kinds.sh"
# This entire file is only generated in medium/large modules. So if
# we're not at least medium, bail now.
check_size medium
if test "$output" = "0"; then
exit 0
fi
# Ok, we should continue.
allranks="0 $ranks"
output() {
procedure=$1
rank=$2
type=$4
proc="$1$2D$3"
cat <<EOF
subroutine ${proc}(fh, buf, status, ierr)
include "mpif-config.h"
integer, intent(in) :: fh
${type}, intent(in) :: buf
integer, dimension(MPI_STATUS_SIZE), intent(out) :: status
integer, intent(out) :: ierr
call ${procedure}(fh, buf, status, ierr)
end subroutine ${proc}
EOF
}
for rank in $allranks
do
case "$rank" in 0) dim='' ; esac
case "$rank" in 1) dim=', dimension(*)' ; esac
case "$rank" in 2) dim=', dimension(1,*)' ; esac
case "$rank" in 3) dim=', dimension(1,1,*)' ; esac
case "$rank" in 4) dim=', dimension(1,1,1,*)' ; esac
case "$rank" in 5) dim=', dimension(1,1,1,1,*)' ; esac
case "$rank" in 6) dim=', dimension(1,1,1,1,1,*)' ; esac
case "$rank" in 7) dim=', dimension(1,1,1,1,1,1,*)' ; esac
output MPI_File_write_all_end ${rank} CH "character${dim}"
output MPI_File_write_all_end ${rank} L "logical${dim}"
for kind in $ikinds
do
output MPI_File_write_all_end ${rank} I${kind} "integer*${kind}${dim}"
done
for kind in $rkinds
do
output MPI_File_write_all_end ${rank} R${kind} "real*${kind}${dim}"
done
for kind in $ckinds
do
output MPI_File_write_all_end ${rank} C${kind} "complex*${kind}${dim}"
done
done
|
<filename>packages/react-core/src/components/ClipboardCopy/__tests__/ClipboardCopyButton.test.tsx
import React from 'react';
import { render, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { ClipboardCopyButton } from '../ClipboardCopyButton';
const props = {
id: 'my-id',
textId: 'my-text-id',
className: 'fancy-copy-button',
onClick: jest.fn(),
exitDelay: 1000,
entryDelay: 2000,
maxWidth: '500px',
position: 'right' as 'right',
'aria-label': 'click this button to copy text'
};
test('copy button render', () => {
const { asFragment } = render(<ClipboardCopyButton {...props}>Copy Me</ClipboardCopyButton>);
expect(asFragment()).toMatchSnapshot();
});
test('copy button onClick', () => {
const onclick = jest.fn();
render(
<ClipboardCopyButton {...props} onClick={onclick}>
Copy to Clipboard
</ClipboardCopyButton>
);
userEvent.click(screen.getByRole('button'));
expect(onclick).toHaveBeenCalled();
});
|
#!/bin/bash
## Bash Script to deploy an F5 ARM template into Azure, using azure cli 1.0 ##
## Example Command: ./deploy_via_bash.sh --adminUsername azureuser --authenticationType password --adminPasswordOrKey <value> --dnsLabel <value> --instanceName f5vm01 --numberOfExternalIps 1 --instanceType Standard_DS3_v2 --imageName Best1Gbps --bigIpVersion 15.0.100000 --bigIpModules ltm:nominal --vnetAddressPrefix 10.0 --declarationUrl NOT_SPECIFIED --ntpServer 0.pool.ntp.org --timeZone UTC --customImage OPTIONAL --allowUsageAnalytics Yes --numberOfAdditionalNics 1 --additionalNicLocation <value> --resourceGroupName <value> --azureLoginUser <value> --azureLoginPassword <value>
# Assign Script Parameters and Define Variables
# Specify static items below, change these as needed or make them parameters
region="westus"
restrictedSrcAddress="*"
tagValues='{"application":"APP","environment":"ENV","group":"GROUP","owner":"OWNER","cost":"COST"}'
# Parse the command line arguments, primarily checking full params as short params are just placeholders
while [[ $# -gt 1 ]]; do
case "$1" in
--adminUsername)
adminUsername=$2
shift 2;;
--authenticationType)
authenticationType=$2
shift 2;;
--adminPasswordOrKey)
adminPasswordOrKey=$2
shift 2;;
--dnsLabel)
dnsLabel=$2
shift 2;;
--instanceName)
instanceName=$2
shift 2;;
--numberOfExternalIps)
numberOfExternalIps=$2
shift 2;;
--instanceType)
instanceType=$2
shift 2;;
--imageName)
imageName=$2
shift 2;;
--bigIpVersion)
bigIpVersion=$2
shift 2;;
--bigIpModules)
bigIpModules=$2
shift 2;;
--vnetAddressPrefix)
vnetAddressPrefix=$2
shift 2;;
--declarationUrl)
declarationUrl=$2
shift 2;;
--ntpServer)
ntpServer=$2
shift 2;;
--timeZone)
timeZone=$2
shift 2;;
--customImage)
customImage=$2
shift 2;;
--restrictedSrcAddress)
restrictedSrcAddress=$2
shift 2;;
--tagValues)
tagValues=$2
shift 2;;
--allowUsageAnalytics)
allowUsageAnalytics=$2
shift 2;;
--numberOfAdditionalNics)
numberOfAdditionalNics=$2
shift 2;;
--additionalNicLocation)
additionalNicLocation=$2
shift 2;;
--resourceGroupName)
resourceGroupName=$2
shift 2;;
--region)
region=$2
shift 2;;
--azureLoginUser)
azureLoginUser=$2
shift 2;;
--azureLoginPassword)
azureLoginPassword=$2
shift 2;;
--)
shift
break;;
esac
done
#If a required parameter is not passed, the script will prompt for it below
required_variables="adminUsername authenticationType adminPasswordOrKey dnsLabel instanceName numberOfExternalIps instanceType imageName bigIpVersion bigIpModules vnetAddressPrefix declarationUrl ntpServer timeZone customImage allowUsageAnalytics numberOfAdditionalNics additionalNicLocation resourceGroupName "
for variable in $required_variables
do
if [ -z ${!variable} ] ; then
read -p "Please enter value for $variable:" $variable
fi
done
echo "Disclaimer: Scripting to Deploy F5 Solution templates into Cloud Environments are provided as examples. They will be treated as best effort for issues that occur, feedback is encouraged."
sleep 3
# Login to Azure, for simplicity in this example using username and password supplied as script arguments --azureLoginUser and --azureLoginPassword
# Perform Check to see if already logged in
az account show > /dev/null 2>&1
if [[ $? != 0 ]] ; then
az login -u $azureLoginUser -p $azureLoginPassword
fi
# Create ARM Group
az group create -n $resourceGroupName -l $region
# Deploy ARM Template, right now cannot specify parameter file and parameters inline via Azure CLI
template_file="./azuredeploy.json"
parameter_file="./azuredeploy.parameters.json"
az group deployment create --verbose --no-wait --template-file $template_file -g $resourceGroupName -n $resourceGroupName --parameters "{\"adminUsername\":{\"value\":\"$adminUsername\"},\"authenticationType\":{\"value\":\"$authenticationType\"},\"adminPasswordOrKey\":{\"value\":\"$adminPasswordOrKey\"},\"dnsLabel\":{\"value\":\"$dnsLabel\"},\"instanceName\":{\"value\":\"$instanceName\"},\"numberOfExternalIps\":{\"value\":$numberOfExternalIps},\"instanceType\":{\"value\":\"$instanceType\"},\"imageName\":{\"value\":\"$imageName\"},\"bigIpVersion\":{\"value\":\"$bigIpVersion\"},\"bigIpModules\":{\"value\":\"$bigIpModules\"},\"vnetAddressPrefix\":{\"value\":\"$vnetAddressPrefix\"},\"declarationUrl\":{\"value\":\"$declarationUrl\"},\"ntpServer\":{\"value\":\"$ntpServer\"},\"timeZone\":{\"value\":\"$timeZone\"},\"customImage\":{\"value\":\"$customImage\"},\"restrictedSrcAddress\":{\"value\":\"$restrictedSrcAddress\"},\"tagValues\":{\"value\":$tagValues},\"allowUsageAnalytics\":{\"value\":\"$allowUsageAnalytics\"},\"numberOfAdditionalNics\":{\"value\":$numberOfAdditionalNics},\"additionalNicLocation\":{\"value\":\"$additionalNicLocation\"}}"
|
<filename>application/screens/PostDetails.js
import React, {Component} from 'react';
import { NavigationActions } from 'react-navigation';
import { Container, Header, Content, List, ListItem, Thumbnail, Text, Left, Body, Right, Button, Tab, Tabs } from 'native-base';
import { ImageBackground, Dimensions, View, TouchableOpacity, SafeAreaView, ScrollView, FlatList, StatusBar, AsyncStorage, Linking, Image } from 'react-native';
import Ionicons from 'react-native-vector-icons/Ionicons';
import Icon from 'react-native-vector-icons/SimpleLineIcons';
import { LinearGradient } from 'expo-linear-gradient';
import PostRating from '../components/PostRating';
import PostForm from '../forms/PostForm';
import CommentsCount from '../forms/CommentsCount';
import { KeyboardAwareScrollView } from 'react-native-keyboard-aware-scroll-view';
import * as firebase from 'firebase';
import ConfigApp from '../utils/ConfigApp';
import ColorsApp from '../utils/ColorsApp';
import Strings from '../utils/Strings';
import BannerAd from '../components/BannerAd';
import HTML from 'react-native-render-html';
import {Grid, Row, Col } from 'react-native-easy-grid';
import Modal from 'react-native-modalbox';
import Toast from 'react-native-root-toast';
var styles = require('../../assets/files/Styles');
var {height, width} = Dimensions.get('window');
export default class PostDetails extends Component {
static navigationOptions = {
headerShown: false
};
constructor(props) {
super(props)
const {params} = props.navigation.state;
this.state = {
item: params.item,
isLoading: true,
isVisible: false,
isOpen: false,
isDisabled: false,
swipeToClose: false,
sliderValue: 0.3
};
}
savePosts = async (post_id, post_title, post_image, post_date, tag_title, post_description, uid) => {
try {
let post = {
userId: uid,
post_id: post_id,
post_title: post_title,
post_image: post_image,
post_date: post_date,
tag_title: tag_title,
post_description: post_description
}
const posts = await AsyncStorage.getItem('posts') || '[]';
let postsFav = JSON.parse(posts);
postsItems = postsFav.filter(function(e){ return e.post_id !== post_id && e.userId == uid })
postsItems.push(post);
AsyncStorage.setItem('posts', JSON.stringify(postsItems)).then(() => {
Toast.show(Strings.ST53.toUpperCase(), {duration: Toast.durations.SHORT, position: Toast.positions.CENTER, shadow: false, animation: true})
});
} catch(error) {
}
};
closeModal(){
this.refs.modal3.close();
}
CommentsByPost=(post_id)=>
{
this.props.navigation.navigate('PostCommentsScreen', { postId: post_id});
}
render() {
const {item} = this.state;
var user = firebase.auth().currentUser;
return (
<Container style={styles.background_general}>
<StatusBar barStyle="light-content"/>
<LinearGradient colors={['rgba(0,0,0,1)', 'rgba(0,0,0,0.5)', 'rgba(0,0,0,0.0)']} style={{position: 'absolute', top: 0, zIndex: 100, paddingTop: 55, paddingHorizontal: 30, width: width}}>
<Grid >
<Col style={{alignItems: 'flex-start', alignContent: 'flex-start', justifyContent: 'flex-start'}}>
<TouchableOpacity onPress={() => this.props.navigation.goBack()}>
<Ionicons name="md-arrow-back" style={{fontSize: 27, color: '#FFFFFF'}}/>
</TouchableOpacity>
</Col>
<Col size={2} style={{alignItems: 'center', alignContent: 'center', justifyContent: 'center'}}>
<Text numberOfLines={1} style={{fontSize: 16, color: '#fff', fontWeight: 'bold' }}>{Strings.ST130.toUpperCase()}</Text>
</Col>
<Col style={{alignItems: 'flex-end', alignContent: 'flex-end', justifyContent: 'flex-end'}}>
<TouchableOpacity onPress={this.savePosts.bind(this, item.post_id, item.post_title, item.post_image, item.post_date, item.tag_title, item.post_description, user.uid)}>
<Ionicons name="md-star" style={{fontSize: 27, color: '#FFFFFF'}}/>
</TouchableOpacity>
</Col>
</Grid>
</LinearGradient>
<ScrollView>
<KeyboardAwareScrollView>
<ImageBackground source={{uri: ConfigApp.URL+'images/'+item.post_image}} style={styles.background_diets}>
<LinearGradient colors={['rgba(0,0,0,0.10)','rgba(0,0,0,0.45)', 'rgba(0,0,0,0.85)']} style={styles.gradient_diets}>
<Text style={styles.postDetail_tag}>{item.tag_title.toUpperCase()}</Text>
<Text style={styles.postDetail_title}>{item.post_title}</Text>
<PostRating postId={item.post_id}/>
<View style={{height: 10}}></View>
</LinearGradient>
</ImageBackground>
<View style={{backgroundColor: ColorsApp.PRIMARY, width: width, paddingVertical: 5}}>
<ListItem icon style={{borderBottomWidth: 0}}>
<Body style={{borderBottomWidth: 0}}>
<TouchableOpacity onPress={this.CommentsByPost.bind(this, item.post_id)}>
<Text style={{fontSize: 14, fontWeight: '300', color: '#fff' }}>{Strings.ST84.toUpperCase()} <CommentsCount postId={item.post_id} /></Text>
</TouchableOpacity>
</Body>
<Right style={{borderBottomWidth: 0}}>
<TouchableOpacity onPress={() => this.refs.modal3.open()} activeOpacity={1} style={{justifyContent: 'center', flexDirection: 'row' }}>
<Text style={{fontSize: 14, color: '#fff', fontWeight: '300', paddingRight: 6}}> {Strings.ST83.toUpperCase()}</Text>
<Ionicons active name="ios-add-circle" style={{ fontSize: 16, color: '#fff'}} />
</TouchableOpacity>
</Right>
</ListItem>
</View>
<View style={{margin: 15, marginBottom: 5}}>
<HTML html={item.post_description} onLinkPress={(evt, href) => { Linking.openURL(href); }} />
<View style={{marginTop: 15, height: 1, backgroundColor: '#eee'}}></View>
<View style={{marginTop: 15, paddingBottom: 15, alignContent: 'center', justifyContent: 'flex-start', flexDirection: 'row'}}>
<Ionicons active name="md-calendar" style={{ fontSize: 14, color: '#666', paddingRight: 10}} />
<Text style={styles.postDetail_date}>{item.post_date.toUpperCase()}</Text>
<Ionicons active name="md-folder-open" style={{ fontSize: 14, color: '#666', paddingRight: 10}} />
<Text style={styles.postDetail_date}>{item.tag_title.toUpperCase()}</Text>
</View>
<View style={{height: 1, backgroundColor: '#eee'}}></View>
</View>
<Modal style={[styles.modal, styles.modal3]} position={"center"} ref={"modal3"} swipeArea={20} swipeToClose={this.state.swipeToClose} onClosed={this.onClose} onOpened={this.onOpen} onClosingState={this.onClosingState} isDisabled={this.state.isDisabled} coverScreen={true}>
<View style={{marginTop: 8, marginBottom: 8}}>
<Text style={styles.commentTitle}>{Strings.ST83.toUpperCase()}</Text>
<PostForm postId={item.post_id} closeModal={() => this.closeModal()}/>
</View>
</Modal>
<View style={{height: 100}}/>
</KeyboardAwareScrollView>
</ScrollView>
<BannerAd/>
</Container>
);
}
}
|
<reponame>Doan-NV/realworld
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { Tag } from '../entity/tag.entity';
@Injectable()
export class TagsService {
constructor(
@InjectRepository(Tag)
private readonly tagRepository: Repository<Tag>,
) {}
async getTags(): Promise<any> {
const tags = await this.tagRepository.find();
console.log(tags);
// return await this.tagRepository.find();
}
}
|
#!/bin/bash -eux
{
./main runsearchtests models/v53-140-5x64.txt.gz false false 0 false | tee tests/results/runSearchTests-iNCHW-cNCHW.txt
./main runsearchtests models/v53-140-5x64.txt.gz true false 0 false | tee tests/results/runSearchTests-iNHWC-cNCHW.txt
./main runsearchtests models/v53-140-5x64.txt.gz true true 0 false | tee tests/results/runSearchTests-iNHWC-cNHWC.txt
./main runsearchtests models/v53-140-5x64.txt.gz true false 1 false | tee tests/results/runSearchTests-s1.txt
./main runsearchtests models/run4-s67105280-d24430742-b6c96.txt.gz false false 0 false | tee tests/results/runSearchTests-r4-iNCHW-cNCHW.txt
./main runsearchtests models/run4-s67105280-d24430742-b6c96.txt.gz true false 0 false | tee tests/results/runSearchTests-r4-iNHWC-cNCHW.txt
./main runsearchtests models/run4-s67105280-d24430742-b6c96.txt.gz true true 0 false | tee tests/results/runSearchTests-r4-iNHWC-cNHWC.txt
./main runsearchtests models/run4-s67105280-d24430742-b6c96.txt.gz true false 1 false | tee tests/results/runSearchTests-r4-s1.txt
./main runsearchtestsv3 models/grun2-b6c96-s128700160-d49811312.txt.gz false false 0 false | tee tests/results/runSearchTestsV3-g2-iNCHW-cNCHW.txt
./main runsearchtestsv3 models/grun2-b6c96-s128700160-d49811312.txt.gz true false 0 false | tee tests/results/runSearchTestsV3-g2-iNHWC-cNCHW.txt
./main runsearchtestsv3 models/grun2-b6c96-s128700160-d49811312.txt.gz true true 0 false | tee tests/results/runSearchTestsV3-g2-iNHWC-cNHWC.txt
./main runsearchtestsv3 models/grun2-b6c96-s128700160-d49811312.txt.gz true false 1 false | tee tests/results/runSearchTestsV3-g2-s1.txt
./main runselfplayinittests models/grun50-b6c96-s156348160-d118286860.txt.gz | tee tests/results/runSelfplayInitTests.txt
exit 0
}
|
from setuptools import setup
setup(
name='event-bus',
version='1.0.4',
packages=['event_bus'],
url='https://github.com/summer-wu/event-bus',
license='MIT',
author='Your Name',
author_email='your_email@example.com',
description='A simple python event bus.',
download_url='https://github.com/seanpar203/event-bus/archive/1.0.2.tar.gz',
)
# Command to build the distribution file: python3 setup.py bdist_wheel
# Command to install the package using pip: pip3.7 install dist/event_bus-1.0.4-py3-none-any.whl
|
<reponame>huangbin082/Bin
package com.leetcode;
import org.testng.annotations.Test;
public class Solution_424Test {
@Test
public void testCharacterReplacement() {
Solution_424 solution_424 = new Solution_424();
System.out.println(solution_424.characterReplacement("KRSCDCSONAJNHLBMDQGIFCPEKPOHQIHLTDIQGEKLRLCQNBOHNDQGHJPNDQPERNFSSSRDEQLFPCCCARFMDLHADJADAGNNSBNCJQOF", 4));
}
}
|
$(document).ready(function() {
$('.scrollTo').click( function() {
var page = $(this).attr('href');
var speed = 1200;
$('html, body').animate( { scrollTop: $(page).offset().top }, speed );
return false;
});
});
|
# frozen_string_literal: true
require 'test_helper'
require 'fileutils'
require 'find'
require 'shellwords'
class SprocketsRailsTest < Minitest::Test
def test_sprockets_digest_asset_refs
root = 'test/dummy_rails'
compiled = Dir.chdir root do
silence_stderr_if !ENV['VERBOSE'] do
Bundler.with_original_env do
system({ 'BUNDLE_GEMFILE' => File.join(GEM_PATH, 'Gemfile'),
'RAILS_ENV' => 'production' },
'bundle && bundle exec rake assets:precompile')
end
end
end
assert compiled, 'Could not precompile assets'
Dir.glob(File.join(root, 'public', 'assets', 'app*.{css,js}')) do |path|
File.read(path)
.scan(/url\("?[^"]+\.(?:jpg|png|eot|woff2?|ttf|svg)[^"]*"?\)/) do |m|
assert_match(/-[0-9a-f]{12,}\./, m)
end
end
ensure
FileUtils.rm_rf %W[#{root}/public/assets/ #{root}/tmp/cache/], secure: true
end
end
|
<gh_stars>0
import { coerce } from '../bytes.js'
import * as Digest from './digest.js'
export const code = 0x0
export const name = 'identity'
/**
* @param {Uint8Array} input
* @returns {Digest.Digest<typeof code, number>}
*/
export const digest = (input) => Digest.create(code, coerce(input))
/** @type {import('./interface').SyncMultihashHasher<typeof code>} */
export const identity = { code, name, digest }
|
<reponame>schroedtert/jpsreport-python
import logging
logger = logging.getLogger("JPSreport")
FORMAT = "[%(asctime)s] [%(levelname)s]: %(message)s"
logging.basicConfig(format=FORMAT)
def log_debug(msg, **kwargs):
logger.debug(msg, **kwargs)
def log_info(msg, **kwargs):
logger.info(msg, **kwargs)
def log_warning(msg, **kwargs):
logger.warning(msg, **kwargs)
def log_error(msg, **kwargs):
logger.error(msg, **kwargs)
|
DROP TABLE B_IM_CHAT CASCADE CONSTRAINTS
/
DROP TABLE B_IM_MESSAGE CASCADE CONSTRAINTS
/
DROP TABLE B_IM_RELATION CASCADE CONSTRAINTS
/
DROP TABLE B_IM_RECENT CASCADE CONSTRAINTS
/
DROP SEQUENCE SQ_B_IM_CHAT
/
DROP SEQUENCE SQ_B_IM_MESSAGE
/
DROP SEQUENCE SQ_B_IM_RELATION
/
DROP SEQUENCE SQ_B_IM_RECENT
/
|
from tqdm import tqdm
def apply_with_progress_bar(desc=None):
def decorator(key_func):
def func_wrapper(iterable):
pbar = tqdm(total=len(iterable), desc=desc)
for obj in iterable:
pbar.update()
key_func(obj)
pbar.close()
return func_wrapper
return decorator
|
#!/usr/bin/env bats
load helpers
function setup() {
teardown_busybox
setup_busybox
}
function teardown() {
teardown_busybox
}
@test "events --stats" {
# run busybox detached
runc run -d --console /dev/pts/ptmx test_busybox
[ "$status" -eq 0 ]
# check state
wait_for_container 15 1 test_busybox
# generate stats
runc events --stats test_busybox
[ "$status" -eq 0 ]
[[ "${lines[0]}" == [\{]"\"type\""[:]"\"stats\""[,]"\"id\""[:]"\"test_busybox\""[,]* ]]
[[ "${lines[0]}" == *"data"* ]]
}
@test "events --interval default " {
# run busybox detached
runc run -d --console /dev/pts/ptmx test_busybox
[ "$status" -eq 0 ]
# check state
wait_for_container 15 1 test_busybox
# spawn two sub processes (shells)
# the first sub process is an event logger that sends stats events to events.log
# the second sub process waits for an event that incudes test_busybox then
# kills the test_busybox container which causes the event logger to exit
(__runc events test_busybox > events.log) &
(
retry 10 1 eval "grep -q 'test_busybox' events.log"
teardown_running_container test_busybox
) &
wait # wait for the above sub shells to finish
[ -e events.log ]
run cat events.log
[ "$status" -eq 0 ]
[[ "${lines[0]}" == [\{]"\"type\""[:]"\"stats\""[,]"\"id\""[:]"\"test_busybox\""[,]* ]]
[[ "${lines[0]}" == *"data"* ]]
}
@test "events --interval 1s " {
# run busybox detached
runc run -d --console /dev/pts/ptmx test_busybox
[ "$status" -eq 0 ]
# check state
wait_for_container 15 1 test_busybox
# spawn two sub processes (shells)
# the first sub process is an event logger that sends stats events to events.log once a second
# the second sub process tries 3 times for an event that incudes test_busybox
# pausing 1s between each attempt then kills the test_busybox container which
# causes the event logger to exit
(__runc events --interval 1s test_busybox > events.log) &
(
retry 3 1 eval "grep -q 'test_busybox' events.log"
teardown_running_container test_busybox
) &
wait # wait for the above sub shells to finish
[ -e events.log ]
run eval "grep -q 'test_busybox' events.log"
[ "$status" -eq 0 ]
}
@test "events --interval 100ms " {
# run busybox detached
runc run -d --console /dev/pts/ptmx test_busybox
[ "$status" -eq 0 ]
# check state
wait_for_container 15 1 test_busybox
#prove there is no carry over of events.log from a prior test
[ ! -e events.log ]
# spawn two sub processes (shells)
# the first sub process is an event logger that sends stats events to events.log once every 100ms
# the second sub process tries 3 times for an event that incudes test_busybox
# pausing 100s between each attempt then kills the test_busybox container which
# causes the event logger to exit
(__runc events --interval 100ms test_busybox > events.log) &
(
retry 3 0.100 eval "grep -q 'test_busybox' events.log"
teardown_running_container test_busybox
) &
wait # wait for the above sub shells to finish
[ -e events.log ]
run eval "grep -q 'test_busybox' events.log"
[ "$status" -eq 0 ]
}
|
#!/bin/bash
## config
iofilename=${1:-"met_vs_time.root"}
xbin_boundary=${2:-"3"}
ybin_boundary=${3:-"200"}
blind_data=${4:-1}
signif_dump=${5:-"tmp_dump.txt"}
## run macro
root -l -b -q dumpSignificanceABCD.C\(\"${iofilename}\",\"${xbin_boundary}\",\"${ybin_boundary}\",${blind_data},\"${signif_dump}\"\)
## Final message
echo "Finished DumpingSignificanceABCD for: ${iofilename}"
|
SELECT salaries.salary
FROM salaries
ORDER BY salaries.salary DESC
LIMIT 3;
|
#!/usr/bin/env bash
set -e
scriptdir=$(dirname ${BASH_SOURCE[0]})
pushd "$scriptdir"
mkdir -p ../ios/Exponent/Generated/
node_modules/.bin/gulp generate-dynamic-macros --platform ios --buildConstantsPath ../ios/Exponent/Supporting/EXBuildConstants.plist --infoPlistPath ../ios/Exponent/Supporting --expoKitPath ..
node_modules/.bin/gulp cleanup-dynamic-macros --platform ios --infoPlistPath ../ios/Exponent/Supporting --expoKitPath ..
popd
|
<gh_stars>1-10
import request from '@/utils/request'
// 获取购物车列表
export function huoQuGouWuCheList(query) {
return request({
url: '/cart/list',
method: 'get',
params:query
})
}
// 移出购物车
export function yiChuGouWuChe(query) {
return request({
url: '/cart/del?cartId='+query.cartId,
method: 'get',
})
}
// 生成订单
export function shengChengDingDan(data) {
return request({
url: '/order/add',
method: 'post',
headers:{
"Content-Type":"application/json"
},
data:data
})
}
|
export JAVA_HOME=${JAVA_HOME:-/c/jdk}
export PATH=${JAVA_HOME}/bin:${PATH}
choco install jdk8 -params 'installdir=c:\\jdk' -y
|
class AccountsController < ApplicationController
respond_to :html
before_action :prepare
def prepare
if params[:budget_id]
@budget = current_user.budgets.where(id: params[:budget_id]).first
add_crumb 'Budgets', budgets_path
add_crumb @budget.name, budget_path(@budget)
add_crumb 'Accounts', budget_accounts_path(@budget)
end
account_id = params[:account_id] ||= params[:id]
if account_id
@account = current_user.accounts.where(id: account_id).first
end
end
def index
if @budget
@accounts = Account.joins(:budget).where("budgets.user_id = '%s' AND budget_id = '%s'", current_user.id, @budget.id).order('budgets.name ASC')
else
@accounts = Account.joins(:budget).where("budgets.user_id = '%s'", current_user.id).order('budgets.name ASC')
end
end
def edit
@collections = current_user.collections.where.not(id: current_user.accounts.where('collection_id is not null').where(budget_id: @budget.id).pluck(:collection_id))
render_not_found if @account.nil?
end
def show
add_crumb @account.name, budget_account_path(@budget, @account)
@transactions = Transaction.where(account_id: @account.id).order('date DESC')
unless @account.collection_id.blank?
@items = Item.joins(:collection).where("collections.id = '%s'", @account.collection_id).order('date DESC')
else
@items = []
end
end
def update
@collection = current_user.collections.where(id: params[:account][:collection_id]).where.not(id: current_user.accounts.where('collection_id is null').pluck(:id)).first
render_not_found and return if @account.nil?
render_forbidden and return if @collection.nil?
@account.collection = @collection
@account.save!
redirect_to budget_accounts_path(@budget)
end
def unlink
render_not_found and return if @account.nil?
@account.collection = nil
@account.save!
redirect_to budget_accounts_path(@budget)
end
def auto_sync
render_not_found and return if @account.nil?
@account.auto_sync = true
@account.save!
redirect_to budget_account_path(@budget, @account)
end
def manual_sync
render_not_found and return if @account.nil?
@account.auto_sync = false
@account.save!
redirect_to budget_account_path(@budget, @account)
end
def sync
Budget.sync_budget(current_user)
redirect_to budgets_path
end
end
|
package com.sach.reflections.service;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
public class DiFactory {
private Map<String, Object> beans;
public DiFactory(URL url) throws Exception {
beans = new HashMap<>();
loadBeans(url);
}
private void loadBeans(URL url) throws Exception {
try (BufferedReader br = new BufferedReader(new InputStreamReader(url.openStream()))) {
String line = null;
while ((line = br.readLine()) != null) {
// skip the commented lines
if (line.startsWith("#")) {
continue;
}
String parts[] = line.split(",");
Class<?> beanClass = Class.forName(parts[1]);
beans.put(parts[0], beanClass.newInstance());
}
}
}
@SuppressWarnings("unchecked")
public <T> T getBean(String type) {
return (T) beans.get(type);
}
}
|
#!/bin/bash
# This script reapplies an GCC header fix to the limits.h header so that the fixed header
# recurse down to the real libc limits.h.
#
# This script is needed because the code, in the GCC code base, that applies this patch
# checks for a already-installed libc installed limits.h header, but due to the order of
# installation we needed an (cut-down) installation of GCC before we can build libc.
die() {
echo "Error: $1"
exit 1
}
[ ! -f variables ] && die "'variables': No such file"
. variables
[ -z "${TARGET_ARCH}" ] && die "TARGET_ARCH not defined."
echo "Applying limits.h header patch..."
LIBGCC_FILE=$(${TARGET_ARCH}-gcc -print-libgcc-file-name)
LIBGCC_PATH=$(dirname ${LIBGCC_FILE})
LIMITX_HEADER=${LIBGCC_PATH}/plugin/include/limitx.h
LIMITY_HEADER=${LIBGCC_PATH}/plugin/include/limity.h
GLIMIT_HEADER=${LIBGCC_PATH}/plugin/include/glimits.h
FIXED_LIMITS_HEADER=${LIBGCC_PATH}/include-fixed/limits.h
cat ${LIMITX_HEADER} ${GLIMIT_HEADER} ${LIMITY_HEADER} > ${FIXED_LIMITS_HEADER}
echo "Done."
|
<gh_stars>0
package de.patrick246.dhbw.cg.learnopengl.renderable;
import de.patrick246.dhbw.cg.learnopengl.Material;
import de.patrick246.dhbw.cg.learnopengl.RenderPass;
import de.patrick246.dhbw.cg.learnopengl.opengl.Shader;
import de.patrick246.dhbw.cg.learnopengl.opengl.Vao;
import org.joml.Matrix4f;
import org.joml.Vector3f;
import static org.lwjgl.opengl.GL43C.GL_TRIANGLES;
import static org.lwjgl.opengl.GL43C.glDrawArrays;
public class Plane implements Renderable {
private Shader shader;
private Matrix4f modelMatrix;
private Material material;
private Vao vao;
public Plane(Shader shader, Material material) {
this.shader = shader;
this.material = material;
this.modelMatrix = new Matrix4f().identity();
this.vao = new Vao(generateVertices())
.setTextureCoords(generateTextureCoords())
.setNormals(generateNormals())
.fillBuffer();
}
private float[] generateVertices() {
return new float[]{
-0.5f, 0.5f, 0f,
0.5f, -0.5f, 0f,
-0.5f, -0.5f, 0f,
0.5f, -0.5f, 0f,
-0.5f, 0.5f, 0f,
0.5f, 0.5f, 0f,
};
}
private float[] generateNormals() {
return new float[]{
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
0.0f, 0.0f, -1.0f,
};
}
private float[] generateTextureCoords() {
return new float[]{
1f, 0f,
0f, 1f,
1f, 1f,
0f, 1f,
1f, 0f,
0f, 0f,
};
}
public Plane scale(Vector3f factor) {
this.modelMatrix.scale(factor);
return this;
}
public Plane translate(Vector3f translation) {
this.modelMatrix.translate(translation);
return this;
}
public Plane rotate(float angle, Vector3f axis) {
this.modelMatrix.rotate(angle, axis);
return this;
}
public Plane setTextureScale(float scale) {
float[] textureCoords = generateTextureCoords();
for(int i = 0; i < textureCoords.length; i++) {
textureCoords[i] *= scale;
}
this.vao.setTextureCoords(textureCoords).fillBuffer();
return this;
}
@Override
public Shader getShader() {
return shader;
}
@Override
public void render(Shader s, RenderPass currentPass) {
s
.setMat4f("model", modelMatrix)
.setMat4f("modelNormal", modelMatrix.invert(new Matrix4f()).transpose());
this.material.apply(s);
this.vao.bind();
glDrawArrays(GL_TRIANGLES, 0, 6);
}
}
|
# rpm fusion
sudo dnf install https://download1.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm
sudo dnf install https://download1.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$(rpm -E %fedora).noarch.rpm
sudo rpm --import https://raw.githubusercontent.com/UnitedRPMs/unitedrpms/master/URPMS-GPG-PUBLICKEY-Fedora
sudo dnf -y install https://github.com/UnitedRPMs/unitedrpms/releases/download/19/unitedrpms-$(rpm -E %fedora)-19.fc$(rpm -E %fedora).noarch.rpm
sudo dnf install terminator unrar zsh git
sudo dnf install htop
chsh -s $(which zsh)
# https://github.com/zsh-users/antigen
curl -L https://raw.githubusercontent.com/zsh-users/antigen/master/bin/antigen.zsh > ~/.plugins/antigen.zsh
echo "# Load Antigen
source ~/.plugins/antigen.zsh
# Load Antigen configurations
antigen init ~/.antigenrc
source ~/.profile" > .zshrc
echo "antigen use oh-my-zsh
# Load bundles from the default repo (oh-my-zsh)
antigen bundle git
antigen bundle command-not-found
antigen bundle docker
# Load bundles from external repos
antigen bundle zsh-users/zsh-completions
antigen bundle zsh-users/zsh-autosuggestions
antigen bundle zsh-users/zsh-syntax-highlighting
# Select theme
antigen theme romkatv/powerlevel10k
# Tell Antigen that you're done
antigen apply
" > .antigenrc
## https://github.com/EliverLara/terminator-themes
sudo dnf install most
echo 'export PAGER='most'' >> .profile
sudo dnf install gparted flameshot
sudo dnf config-manager --add-repo https://brave-browser-rpm-release.s3.brave.com/x86_64/
sudo rpm --import https://brave-browser-rpm-release.s3.brave.com/brave-core.asc
sudo dnf install brave-browser
# install chrome
sudo dnf install fedora-workstation-repositories
sudo dnf config-manager --set-enabled google-chrome
sudo dnf install google-chrome-stable
sudo dnf install telegram
# skype
sudo curl -o /etc/yum.repos.d/skype-stable.repo https://repo.skype.com/rpm/stable/skype-stable.repo
sudo dnf install skypeforlinux
sudo dnf install timeshift
sudo dnf install backintime-gnome
# install cpu-autofreq to manage battery life
# https://github.com/AdnanHodzic/auto-cpufreq
sudo dnf install thermald
# for cpu usuage
# https://github.com/fzerorubigd/persian-fonts-linux
sh -c "$(curl -fsSL https://raw.githubusercontent.com/fzerorubigd/persian-fonts-linux/master/farsifonts.sh)"
# cht.sh
curl https://cht.sh/:cht.sh | sudo tee /usr/local/bin/cht.sh
chmod +x /usr/local/bin/cht.sh
# install tldr ++
curl -OL https://github.com/isacikgoz/tldr/releases/download/v0.5.0/tldr_0.5.0_linux_amd64.tar.gz
sudo tar xvf tldr_0.5.0_linux_amd64.tar.gz -C /usr/bin
sudo dnf install kdiff3
git config --global merge.tool "kdiff3"
git config --global user.name "Amir Pourmand"
git config --global user.email "pourmand1376@gmail.com"
curl -LO https://raw.githubusercontent.com/GitCredentialManager/git-credential-manager/main/src/linux/Packaging.Linux/install-from-source.sh &&
sh ./install-from-source.sh &&
git-credential-manager-core configure
git config --global credential.credentialStore secretservice
sudo dnf install vlc
sudo dnf install smplayer
sudo dnf install smplayer-themes
sudo dnf copr enable joseexposito/touchegg
sudo dnf install touchegg
sudo flatpak install touche
# sudo dnf install vokoscreenNG
# I use obs-studio instead
#docker
sudo dnf config-manager \
--add-repo \
https://download.docker.com/linux/fedora/docker-ce.repo
sudo dnf install docker-ce docker-ce-cli containerd.io
# https://github.com/agalwood/Motrix/releases/
# download motrix rpm
cd /tmp && wget https://github.com/agalwood/Motrix/releases/download/v1.6.11/Motrix-1.6.11.x86_64.rpm
sudo rpm -i Motrix-1.6.11.x86_64.rpm
# install kite for python syntax help
bash -c "$(wget -q -O - https://linux.kite.com/dls/linux/current)"
# Video Recording software
sudo dnf install obs-studio
# for editing Kdenlive
# for audio -> audacity
|
module.exports = function(config){
config.set({
basePath : './',
files : [
'public/bower_components/angular/angular.js',
'public/bower_components/angular-route/angular-route.js',
'public/bower_components/angular-sanitize/angular-sanitize.js',
'public/bower_components/angular-animate/angular-animate.js',
'public/bower_components/angular-cookies/angular-cookies.js',
'public/bower_components/angular-bootstrap/ui-bootstrap.js',
'public/bower_components/angular-bootstrap/ui-bootstrap-tpls.js',
'public/bower_components/angular-translate/angular-translate.js',
'public/bower_components/angular-translate-loader-static-files/angular-translate-loader-static-files.js',
'public/bower_components/angular-translate-loader-partial/angular-translate-loader-partial.js',
'public/bower_components/textAngular/dist/textAngular-rangy.min.js',
'public/bower_components/textAngular/dist/textAngular-sanitize.min.js',
'public/bower_components/textAngular/dist/textAngular.min.js',
'public/bower_components/angularjs-geolocation/dist/angularjs-geolocation.min.js',
'public/bower_components/ngmap/build/scripts/ng-map.min.js',
'public/bower_components/angular-loading-bar/build/loading-bar.min.js',
'public/bower_components/bootstrap-ui-datetime-picker/dist/datetime-picker.min.js',
'public/bower_components/ng-sortable/dist/ng-sortable.min.js',
'public/bower_components/angular-mocks/angular-mocks.js',
'public/app/**/*.js',
'public/app/app.js'
],
exclude: [
'public/lib/xlsx.full.min.js'
],
autoWatch : true,
colors: true,
frameworks: ['jasmine'],
browsers: ['PhantomJS'],
//browsers : ['Chrome'],
//browsers : ['Chrome','PhantomJS'],
//browsers : ['Chrome','PhantomJS', 'Firefox'],
plugins : [
'karma-chrome-launcher',
'karma-phantomjs-launcher',
'karma-firefox-launcher',
'karma-jasmine',
'karma-junit-reporter',
'karma-coverage'
],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
// do not include tests or libraries
// (these files will be instrumented by Istanbul)
'public/app/controllers/**/*.js': 'coverage',
'public/app/directives/**/*.js': 'coverage',
'public/app/filters/**/*.js': 'coverage',
'public/app/services/**/*.js': 'coverage',
'public/app/app.js': 'coverage'
},
reporters: [
'progress',
'junit',
'coverage', ],
// configure the reporter
coverageReporter: {
reporters: [
{ type: 'html' }
]
},
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
//logLevel: config.LOG_DEBUG,
//loggers: [{type: 'console'}],
junitReporter : {
outputFile: 'test_out/unit.xml',
suite: 'unit'
}
});
};
|
function fibonacci(n) {
if (n <= 1) return n;
let fnMinus2 = 0;
let fnMinus1 = 1;
let fn = 1;
for (let i = 2; i <= n; i++) {
fn = fnMinus2 + fnMinus1;
fnMinus2 = fnMinus1;
fnMinus1 = fn;
}
return fn;
}
|
#!/bin/bash
parent_path=$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd -P)
cd "${parent_path}"
EXPERIMENTS_DIR="./experiments"
mkdir -p ${EXPERIMENTS_DIR}
FEATURES_DIR="./data/features/"
mkdir -p ${FEATURES_DIR}
LYMPH_DATA="./data/lymphography.data"
LYMPH_DATA_FEATURES="${FEATURES_DIR}/lymphography.data.features"
declare -a LYMPH_RARE_CLASSES=([1]=0 [4]=0) # values of one are ignoarable, only needs to be non-empty
echo "$(date -u): Running on lymphography data (${LYMPH_DATA_FEATURES})"
echo "$(date -u): Preprocessing: ignore first column (class)"
echo "$(date -u): Resulting features are located at ${LYMPH_DATA_FEATURES}"
cut -d, -f1 --complement ${LYMPH_DATA} > ${LYMPH_DATA_FEATURES}
# get total number of rare classes 1 and 4
TOTAL_OUTLIERS=$(cut -f1 -d, ${LYMPH_DATA} | sort | uniq -c | sed 's/^ \+//g'\
| cut -f1 -d ' ' | sed '2,3d' | awk '{ SUM += $1 } END { print SUM }')
for k in 7 15 16 22 30; do
echo -e "$(date -u): Running with k=${k}"
./greedy_outlier_entropy -k ${k} -d ${LYMPH_DATA_FEATURES} > "${EXPERIMENTS_DIR}/result.lymphography.k${k}"
# compute coverage
## read outlier indices into an array
outliers=$(sed '4q;d' ${EXPERIMENTS_DIR}/result.lymphography.k${k}\
| sed 's/Outliers: //'\
| sed 's/, /\\n/g')
readarray outlierIndices <<< $(echo -e ${outliers})
## compute percentage of covered outliers
covered=0
for i in ${outlierIndices[@]}; do
index=$((${i} + 1)) # output of outlier detection tool is zero-index
class=$(cut -d, -f1 ${LYMPH_DATA} | sed "${index}q;d")
if [[ -n "${LYMPH_RARE_CLASSES[${class}]}" ]]; then
((++covered))
fi
done
coverage=$(awk "BEGIN { print ${covered} / ${TOTAL_OUTLIERS} }")
echo -e "$(date -u): \tOutlier coverage = ${coverage}. Absolute = ${covered}"
done
echo "$(date -u): Finished lymphography data. Results are located at ${EXPERIMENTS_DIR}/"
|
#! /bin/bash
bucket=_replace_with_your_bucket_name_
for file in od4es.json network.json seed.json data-nodes.json master-nodes.json client-nodes.json; do
echo "Sending $file"
aws s3 rm s3://$bucket/$file
aws s3 cp $file s3://$bucket/$file
done
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AppModule = void 0;
const path_1 = require("path");
const common_1 = require("@nestjs/common");
const typeorm_1 = require("@nestjs/typeorm");
const serve_static_1 = require("@nestjs/serve-static");
const auth_module_1 = require("./modules/auth.module");
const acme_module_1 = require("./modules/acme.module");
const users_module_1 = require("./modules/users.module");
const database_module_1 = require("./modules/database.module");
const tls_service_1 = require("./services/tls.service");
const proxy_service_1 = require("./services/proxy.service");
const settings_service_1 = require("./services/settings.service");
const tls_model_1 = require("./models/tls.model");
const proxy_model_1 = require("./models/proxy.model");
const settings_model_1 = require("./models/settings.model");
const tls_controller_1 = require("./controllers/tls.controller");
const response_service_1 = require("./services/response.service");
const magic_proxy_service_1 = require("./services/magic.proxy.service");
const proxy_controller_1 = require("./controllers/proxy.controller");
const settings_controller_1 = require("./controllers/settings.controller");
let AppModule = class AppModule {
};
AppModule = __decorate([
(0, common_1.Module)({
imports: [
database_module_1.DatabaseModule,
users_module_1.UsersModule,
auth_module_1.AuthModule,
acme_module_1.AcmeModule,
typeorm_1.TypeOrmModule.forFeature([proxy_model_1.ProxyModel]),
typeorm_1.TypeOrmModule.forFeature([settings_model_1.SettingsModel]),
typeorm_1.TypeOrmModule.forFeature([tls_model_1.TlsModel]),
serve_static_1.ServeStaticModule.forRoot({
rootPath: (0, path_1.join)(__dirname, 'assets')
})
],
providers: [
proxy_service_1.ProxyService,
settings_service_1.SettingsService,
tls_service_1.TlsService,
magic_proxy_service_1.MagicProxyService,
response_service_1.ResponseService
],
controllers: [
proxy_controller_1.ProxyController,
settings_controller_1.SettingsController,
tls_controller_1.TlsController
],
exports: [
proxy_service_1.ProxyService,
settings_service_1.SettingsService,
tls_service_1.TlsService,
magic_proxy_service_1.MagicProxyService,
response_service_1.ResponseService
]
})
], AppModule);
exports.AppModule = AppModule;
//# sourceMappingURL=app.module.js.map
|
#!/bin/bash
# Abort if an error is encountered or on undefined variables
set -eu
# Download MIRP sources
git clone https://github.com/MolSSI/MIRP.git
# Build dependencies in a separate directory
mkdir mirp_deps
cd mirp_deps
bash ../MIRP/build_scripts/build_deps.sh native
cd ../
# mirp_deps directory should now contain
# mirp_deps_native subdirectory
# Now build MIRP (change v0.5 to whichever version you have)
mkdir mirp_build
cd mirp_build
cmake -DCMAKE_PREFIX_PATH="$(pwd)/../mirp_deps/mirp_deps_v0.5_native" \
-DCMAKE_C_COMPILER=gcc -DCMAKE_CXX_COMPILER=g++ \
-DCMAKE_INSTALL_PREFIX="$(pwd)/../mirp_install" \
-DCMAKE_BUILD_TYPE=Release \
../MIRP
make install
|
var _;
_ = Int16Array.length;
_ = Int16Array.name;
_ = Int16Array.prototype;
_ = Int16Array.BYTES_PER_ELEMENT;
|
/* eslint arrow-body-style: ["error", "as-needed"] */
import { sykepengesoknadstatuser } from '@navikt/digisyfo-npm';
import { selectSoknaderData } from '../../../sykepengesoknad/data/soknader/soknaderSelectors';
const { NY, FREMTIDIG } = sykepengesoknadstatuser;
const selectSlice = state => state.sykepengesoknader;
export const selectSykepengesoknaderData = state => selectSlice(state).data
.filter(soknad => !selectSoknaderData(state).map(s => s.id).includes(soknad.id));
const selectHenter = state => selectSlice(state).henter;
const selectHentet = state => selectSlice(state).hentet;
const selectHentingFeilet = state => selectSlice(state).hentingFeilet;
export const erForsteSykepengesoknad = state => selectSykepengesoknaderData(state)
.map(soknad => soknad.status)
.filter(status => [NY, FREMTIDIG].includes(status))
.length === selectSykepengesoknaderData(state).length;
export const skalHenteSykepengesoknader = state => !selectHenter(state)
&& !selectHentet(state)
&& !selectHentingFeilet(state);
|
<gh_stars>0
module DashboardHelper
# Take the hash necessary for the line graph and replace 200 with 'Successful'
# and anything else with 'Failed' and return newly constructed hash
def sanitise_hash_for_line_graph(hash)
new_hash = {}
hash.map { |k,v|
n = k[0] == 200 ? 'Successful' : 'Failed'
new_hash[[n, k[1]]] = v
}
new_hash
end
end
|
#!/usr/bin/env bash
DEBIAN_FRONTEND=noninteractive apt-get -q -y install zfsutils-linux
/sbin/modprobe zfs
zpool create -f subutai /dev/mapper/main-zfs
zfs create -o mountpoint="/var/lib/lxc" subutai/fs
zpool set autoexpand=on subutai
DEBIAN_FRONTEND=noninteractive apt-get -q -y install lxc dirmngr
apt-key adv --recv-keys --keyserver keyserver.ubuntu.com C6B2AC7FBEB649F1
|
#!/usr/bin/env python3
import os
import sys
import numpy as np
from .tagcode import tagcode_to_unicode
def samples_from_gnt(f):
header_size = 10
# read samples from f until no bytes remaining
while True:
header = np.fromfile(f, dtype='uint8', count=header_size)
if not header.size: break
sample_size = header[0] + (header[1]<<8) + (header[2]<<16) + (header[3]<<24)
tagcode = header[5] + (header[4]<<8)
width = header[6] + (header[7]<<8)
height = header[8] + (header[9]<<8)
assert header_size + width*height == sample_size
bitmap = np.fromfile(f, dtype='uint8', count=width*height).reshape((height, width))
yield bitmap, tagcode
def read_gnt_in_directory(gnt_dirpath):
for file_name in os.listdir(gnt_dirpath):
if file_name.endswith('.gnt'):
file_path = os.path.join(gnt_dirpath, file_name)
with open(file_path, 'rb') as f:
for bitmap, tagcode in samples_from_gnt(f):
yield bitmap, tagcode
def main():
import png
if len(sys.argv) != 3:
print("usage: {} gntfile outputdir".format(sys.argv[0]))
_, gntfile, outputdir = sys.argv
try:
os.makedirs(outputdir)
except FileExistsError:
pass
with open(gntfile) as f:
for i, (bitmap, tagcode) in enumerate(samples_from_gnt(f)):
character = tagcode_to_unicode(tagcode)
png.from_array(bitmap, 'L').save(os.path.join(outputdir, '{} {}.png'.format(character, i)))
if __name__ == "__main__":
main()
|
#!/bin/csh
set EQ = $1
set DATADIR = $2
set PLOTDIR = $3
set C_DIR = $4
set SHELL_DIR = $5
set PHASE = $6
set work_dir = $DATADIR/${EQ}
set SRCDIR = $SHELL_DIR
set script_name = c01.preprocess_of_${PHASE}_for_$EQ
echo "---> preprocess for EWF for $EQ PHASE:$PHASE "
set INFILE = $work_dir/INFILE_${PHASE}
set event_file = eventStation.$EQ
set PHASE = `grep -w PHASE $INFILE |awk '{print $2}'`
set COMP = `grep -w COMP $INFILE |awk '{print $2}'`
set polarity_file = eventinfo.polarity.${PHASE}.${COMP}
set LONG_BEG = `grep -w LONG_BEG $INFILE |awk '{print $2}'`
set LONG_LEN = `grep -w LONG_LEN $INFILE |awk '{print $2}'`
set PHASE_BEG = `grep -w PHASE_BEG $INFILE |awk '{print $2}'`
set PHASE_LEN = `grep -w PHASE_LEN $INFILE |awk '{print $2}'`
set NOISE_BEG = `grep -w NOISE_BEG $INFILE |awk '{print $2}'`
set NOISE_LEN = `grep -w PHASE_LEN $INFILE |awk '{print $2}'`
set DISTMIN = `grep -w DIST_MIN_MAX $INFILE |awk '{print $2}'`
# for Sdiff get min dist
if($PHASE == "Sdiff" && $DISTMIN == "non") then
set DISTMIN = `csh $SHELL_DIR/c06.get_S_Sdiff_boundary_distance.sh $EQ $work_dir $PHASE $SRCDIR|awk '{print $2+1}'`
echo "---> Sdiff min distance is $DISTMIN"
endif
if($PHASE == "Pdiff" && $DISTMIN == "non") then
set DISTMIN = `csh $SHELL_DIR/c06.get_S_Sdiff_boundary_distance.sh $EQ $work_dir $PHASE $SRCDIR|awk '{print $2+1}'`
echo "---> Pdiff min distance is $DISTMIN"
endif
set DISTMAX = `grep -w DIST_MIN_MAX $INFILE |awk '{print $3}'`
# for S get max dist
if($PHASE == "S" && $DISTMAX == "non") then
set DISTMAX = `csh $SHELL_DIR/c06.get_S_Sdiff_boundary_distance.sh $EQ $work_dir $PHASE $SRCDIR|awk '{print $2}'`
echo "---> S max distance is $DISTMAX"
endif
if($PHASE == "P" && $DISTMAX == "non") then
set DISTMAX = `csh $SHELL_DIR/c06.get_S_Sdiff_boundary_distance.sh $EQ $work_dir $PHASE $SRCDIR|awk '{print $2}'`
echo "---> P max distance is $DISTMAX"
endif
set velocity_or_displacement = `grep -w velocity_or_displacement $INFILE |awk 'NR==1 {print $2}'`
set SNR_CUT = `grep -w SNR_CUT $INFILE | awk 'NR==1 {print $2}'`
set CCC_CUT = `grep -w CCC_CUT $INFILE | awk 'NR==1 {print $2}'`
set filter_flag = `grep filter_flag $INFILE | awk '{print $2}'`
set DELTA = `grep -w DELTA $work_dir/INFILE | awk 'NR==1 {print $2}'`
set Reprocessing_Flag = `grep Reprocessing_Flag $work_dir/INFILE | awk 'NR==1 {print $2}'`
# ===================================================
# copy data from eq2 to local server
# ===================================================
set INPUT = ( $EQ $DATADIR $work_dir $PLOTDIR $SRCDIR)
set c0_INPUT = ( $INPUT $DISTMIN $DISTMAX $PHASE $COMP $filter_flag )
date
if( `echo $PHASE |grep P` == "") then
csh $SRCDIR/c0.prepare_data.sh $c0_INPUT
else
csh $SRCDIR/c0.prepare_data_for_P.sh $c0_INPUT
endif
date
# ===================================================
# work on polarity info
# ===================================================
set INPUT = ( $EQ $DATADIR $PLOTDIR $C_DIR $SHELL_DIR )
set c15_INPUT = ($INPUT $DISTMIN $DISTMAX $PHASE $COMP $filter_flag )
# for postprocessing, we use polarity flag from picks
csh $SRCDIR/c15.get_polarity.sh $c15_INPUT
date
# ===================================================
# work on c code and go for it
# ===================================================
if(! -e $work_dir/EWM ) then
cd $C_DIR/lib
echo "we are current making source"
make clean > & /dev/null
make > & /dev/null
cp EWM $work_dir/
endif
echo "makign source done"
cd $work_dir
set c_input = $work_dir/input.c
echo $EQ $PHASE $DELTA > ! $c_input
date
set c_logfile = $work_dir/logfile.c
./EWM $c_input >& $c_logfile
date
exit 0
|
#!/usr/bin/env bash
cd $GITHUB_WORKSPACE
COMMIT_ID=$(cat $GITHUB_EVENT_PATH | jq -r '.pull_request.head.sha')
echo "COMMIT ID: $COMMIT_ID"
PR_BODY=$(cat "$GITHUB_EVENT_PATH" | jq -r .pull_request.body)
if [[ "$PR_BODY" == *"[do-not-scan]"* ]]; then
echo "[do-not-scan] found in PR description. Skipping PHPCS scan."
exit 0
fi
stars=$(printf "%-30s" "*")
export RTBOT_WORKSPACE="/home/rtbot/github-workspace"
hosts_file="$GITHUB_WORKSPACE/.github/hosts.yml"
# Delete all the folders to be skipped to ignore them from being scanned.
if [[ -n "$SKIP_FOLDERS" ]]; then
folders=(${SKIP_FOLDERS//,/ })
for folder in ${folders[@]}; do
path_of_folder="$GITHUB_WORKSPACE/$folder"
[[ -d "$path_of_folder" ]] && rm -rf $path_of_folder
done
fi
rsync -a "$GITHUB_WORKSPACE/" "$RTBOT_WORKSPACE"
rsync -a /root/vip-go-ci-tools/ /home/rtbot/vip-go-ci-tools
chown -R rtbot:rtbot /home/rtbot/
GITHUB_REPO_NAME=${GITHUB_REPOSITORY##*/}
GITHUB_REPO_OWNER=${GITHUB_REPOSITORY%%/*}
if [[ -n "$VAULT_GITHUB_TOKEN" ]] || [[ -n "$VAULT_TOKEN" ]]; then
export GH_BOT_TOKEN=$(vault read -field=token secret/rtBot-token)
fi
phpcs_standard=''
defaultFiles=(
'.phpcs.xml'
'phpcs.xml'
'.phpcs.xml.dist'
'phpcs.xml.dist'
)
phpcsfilefound=1
for phpcsfile in "${defaultFiles[@]}"; do
if [[ -f "$RTBOT_WORKSPACE/$phpcsfile" ]]; then
phpcs_standard="--phpcs-standard=$RTBOT_WORKSPACE/$phpcsfile"
phpcsfilefound=0
fi
done
if [[ $phpcsfilefound -ne 0 ]]; then
if [[ -n "$1" ]]; then
phpcs_standard="--phpcs-standard=$1"
else
phpcs_standard="--phpcs-standard=WordPress"
fi
fi
[[ -z "$SKIP_FOLDERS" ]] && skip_folders_option='' || skip_folders_option="--skip-folders='$SKIP_FOLDERS'"
/usr/games/cowsay "Hello world"
/usr/games/cowsay "Running with the flag $phpcs_standard"
echo "Running the following command"
echo "/home/rtbot/vip-go-ci-tools/vip-go-ci/vip-go-ci.php --repo-owner=$GITHUB_REPO_OWNER --repo-name=$GITHUB_REPO_NAME --commit=$COMMIT_ID --token=\$GH_BOT_TOKEN --phpcs-path=/home/rtbot/vip-go-ci-tools/phpcs/bin/phpcs --local-git-repo=/home/rtbot/github-workspace --phpcs=true $phpcs_standard $skip_folders_option --lint=true"
gosu rtbot bash -c "/home/rtbot/vip-go-ci-tools/vip-go-ci/vip-go-ci.php --repo-owner=$GITHUB_REPO_OWNER --repo-name=$GITHUB_REPO_NAME --commit=$COMMIT_ID --token=$GH_BOT_TOKEN --phpcs-path=/home/rtbot/vip-go-ci-tools/phpcs/bin/phpcs --local-git-repo=/home/rtbot/github-workspace --phpcs=true $phpcs_standard $skip_folders_option --lint=true"
|
#!/bin/bash
source etc/hosts.cfg
for var in $(grep -o "HOSTS_.*=" etc/hosts.cfg|sed 's/=//g'); do
eval qtd="\${#${var}[@]}"
for((i=0;$i<${qtd};i++)); do
eval echo \${${var}[$i]}
done
done
|
from .CFactory import CFactory
from .CNetwork import CNetwork
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.superscript2 = void 0;
var superscript2 = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M3.032 13l0.9-3h4.137l0.9 3h1.775l-3-10h-3.488l-3 10h1.776zM5.432 5h1.137l0.9 3h-2.937l0.9-3zM11 13l2.5-4 2.5 4h-5z"
}
}, {
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M13.5 2h-1c-0.276 0-0.5-0.224-0.5-0.5s0.224-0.5 0.5-0.5h2c0.276 0 0.5-0.224 0.5-0.5s-0.224-0.5-0.5-0.5h-2c-0.827 0-1.5 0.673-1.5 1.5 0 0.384 0.145 0.734 0.383 1 0.275 0.307 0.674 0.5 1.117 0.5h1c0.276 0 0.5 0.224 0.5 0.5s-0.224 0.5-0.5 0.5h-2c-0.276 0-0.5 0.224-0.5 0.5s0.224 0.5 0.5 0.5h2c0.827 0 1.5-0.673 1.5-1.5 0-0.384-0.145-0.734-0.383-1-0.275-0.307-0.674-0.5-1.117-0.5z"
}
}]
};
exports.superscript2 = superscript2;
|
class Location < ActiveRecord::Base
has_many :variants, inverse_of: :location
#attempt to cast both chromosomes to integers
#if either fails, that's the later chromosome as it wasn't a number
#otherwise compare int to into or string to string
#if still even, compare starting locations
def <=>(other)
cmp = Location.chromosome_sort_val(
self.chromosome,
other.chromosome
)
cmp.zero? ? self.start <=> other.start : cmp
end
private
def self.chromosome_sort_val(a, b)
chr1 = Integer(a) rescue a
chr2 = Integer(b) rescue b
if chr1.class == chr2.class
chr1 <=> chr2
elsif chr1.is_a?(String)
1
else
-1
end
end
end
|
#!/bin/bash
# (c) Artur.Klauser@computer.org
#
# This script installs support for building multi-architecture docker images
# with docker buildx on CI/CD pipelines like Github Actions or Travis. It is
# assumed that you start of with a fresh VM every time you run this and have to
# install everything necessary to support 'docker buildx build' from scratch.
#
# Example usage in Travis stage:
#
# jobs:
# include:
# - stage: Deploy docker image
# script:
# - source ./multi-arch-docker-ci.sh
# - set -ex; build_ci_images::main; set +x
#
# Platforms: linux/amd64, linux/arm64, linux/riscv64, linux/ppc64le,
# linux/s390x, linux/386, linux/arm/v7, linux/arm/v6
# More information about Linux environment constraints can be found at:
# https://nexus.eddiesinentropy.net/2020/01/12/Building-Multi-architecture-Docker-Images-With-Buildx/
# Setup ci environment
function _version() {
printf '%02d' $(echo "$1" | tr . ' ' | sed -e 's/ 0*/ /g') 2>/dev/null
}
function setup_ci_environment::install_docker_buildx() {
# Check kernel version.
local -r kernel_version="$(uname -r)"
if [[ "$(_version "$kernel_version")" < "$(_version '4.8')" ]]; then
echo "Kernel $kernel_version too old - need >= 4.8."
exit 1
fi
## Install up-to-date version of docker, with buildx support.
local -r docker_apt_repo='https://download.docker.com/linux/ubuntu'
curl -fsSL "${docker_apt_repo}/gpg" | sudo apt-key add -
local -r os="$(lsb_release -cs)"
sudo add-apt-repository "deb [arch=amd64] $docker_apt_repo $os stable"
sudo apt-get update
sudo apt-get -y -o Dpkg::Options::="--force-confnew" install docker-ce
# Enable docker daemon experimental support (for 'docker pull --platform').
local -r config='/etc/docker/daemon.json'
if [[ -e "$config" ]]; then
sudo sed -i -e 's/{/{ "experimental": true, /' "$config"
else
echo '{ "experimental": true }' | sudo tee "$config"
fi
sudo systemctl restart docker
# Install QEMU multi-architecture support for docker buildx.
docker run --rm --privileged multiarch/qemu-user-static --reset -p yes
# Enable docker CLI experimental support (for 'docker buildx').
export DOCKER_CLI_EXPERIMENTAL=enabled
# Instantiate docker buildx builder with multi-architecture support.
docker buildx create --name mybuilder
docker buildx use mybuilder
# Start up buildx and verify that all is OK.
docker buildx inspect --bootstrap
}
# Log in to Docker Hub for deployment.
# Env:
# DOCKER_USERNAME ... user name of Docker Hub account
# DOCKER_PASSWORD ... password of Docker Hub account
function setup_ci_environment::login_to_docker_hub() {
echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin
}
# Run buildx build and push.
# Env:
# DOCKER_PLATFORMS ... space separated list of Docker platforms to build.
# Args:
# Optional additional arguments for 'docker buildx build'.
function build_ci_images::buildx() {
docker buildx build \
--platform "${DOCKER_PLATFORMS// /,}" \
--push \
--progress plain \
-f Dockerfile.multi-arch \
"$@" \
.
}
# Build and push docker images for all tags.
# Env:
# DOCKER_PLATFORMS ... space separated list of Docker platforms to build.
# DOCKER_BASE ........ docker image base name to build
# TAGS ............... space separated list of docker image tags to build.
function build_ci_images::build_and_push_all() {
for tag in $TAGS; do
build_ci_images::buildx -t "$DOCKER_BASE:$tag"
done
}
# Test all pushed docker images.
# Env:
# DOCKER_PLATFORMS ... space separated list of Docker platforms to test.
# DOCKER_BASE ........ docker image base name to test
# TAGS ............... space separated list of docker image tags to test.
function build_ci_images::test_all() {
for platform in $DOCKER_PLATFORMS; do
for tag in $TAGS; do
image="${DOCKER_BASE}:${tag}"
msg="Testing docker image $image on platform $platform"
line="${msg//?/=}"
printf '\n%s\n%s\n%s\n' "${line}" "${msg}" "${line}"
docker pull -q --platform "$platform" "$image"
echo -n "Image architecture: "
docker run --rm --entrypoint /bin/sh "$image" -c 'uname -m'
# Run test on the built image.
#docker run --rm --entrypoint [] "$image" command yarn version
done
done
}
# Setup ci environment
function setup_ci_environment::main() {
cp Dockerfile Dockerfile.multi-arch
setup_ci_environment::install_docker_buildx
setup_ci_environment::login_to_docker_hub
}
# Build images
function build_ci_images::main() {
# Set platforms to build.
export DOCKER_BASE=${DOCKER_REGISTRY}/${TRAVIS_REPO_SLUG#*/}
echo ${DOCKER_SLUG}
build_ci_images::build_and_push_all
build_ci_images::test_all
}
|
#! /usr/bin/env bats
#
# Author: Bert Van Vreckem <bert.vanvreckem@gmail.com>
#
# Test a Vsftpd server
#
# Variables
#
sut_ip=172.16.0.11 # IP of the System Under Test
admin_user=hilmi # User with admin privileges
admin_password=hilmiemrebayat
testfile="tst${RANDOM}"
# Useful return codes
# FTP
ftp_pathname_created=257
# curl
curl_ok=0
curl_err_access_denied=9
curl_err_retr_failed=9
curl_err_failed_to_log_in=67
curl_err_resource_doesnt_exist=78
#
# {{{Helper functions
# Check that a user has read acces to a share
# Usage: assert_read_access SHARE USER PASSWORD
assert_read_access() {
local share=${1}
local user=${2}
local password=${3}
run curl "ftp://${sut_ip}/${share}/" --user ${user}:${password}
[ "${curl_ok}" -eq "${status}" ]
}
# Check that a user has NO read access to a share
# Usage: assert_no_read_access SHARE USER PASSWORD
assert_no_read_access() {
local share=${1}
local user=${2}
local password=${3}
run curl "ftp://${sut_ip}/${share}/" --user ${user}:${password}
[ "${curl_err_access_denied}" -eq "${status}" ]
}
# Check that a user has write access to a share.
# Usage: assert_write_access SHARE USER PASSWORD
assert_write_access() {
local share=${1}
local user=${2}
local password=${3}
run curl "ftp://${sut_ip}/${share}/" \
--request "MKD ${testfile}" \
--user ${user}:${password}
echo "${output}" | grep "RETR response: ${ftp_pathname_created}"
run curl "ftp://${sut_ip}/${share}/" \
--request "RMD ${testfile}" \
--user ${user}:${password}
}
# Check that a user has NO write access to a share.
# Writing can be blocked in (at least) two ways:
# - the USER has no read access => curl gives an "access denied" error
# - the USER has read acces, but can't write => curl gives a "RETR failed"
# error with an FTP error code denoting "file unavailable"
# Usage: assert_no_write_access USER SHARE
assert_no_write_access() {
local share=${1}
local user=${2}
local password=${3}
run curl "ftp://${sut_ip}/${share}/" \
--request "MKD ${testfile}" \
--user ${user}:${password}
if [ "${curl_err_access_denied}" -eq "${status}" ]; then
# user has no read access
return 0
elif [ "${curl_err_retr_failed}" -eq "${status}" ]; then
# user can read, but has no write access
echo ${output} | grep "${ftp_file_unavailable}"
fi
}
# }}}
#
# Tests
#
# Preliminaries
@test 'VSFTPD service should be running' {
sudo systemctl status vsftpd.service
}
@test 'VSFTPD service should be enabled at boot' {
sudo systemctl is-enabled vsftpd.service
}
@test 'The ’curl’ command should be installed' {
which curl
}
@test 'The SELinux status should be ‘enforcing’' {
[ -n "$(sestatus) | grep 'enforcing'" ]
}
@test 'FTP traffic should pass through the firewall' {
firewall-cmd --list-all | grep 'services.*ftp\b'
}
# Configuration
@test 'VSFTPD configuration should be syntactically correct' {
# skip # slow test
run sudo vsftpd -olisten=NO /etc/vsftpd/vsftpd.conf
[ -z "${output}" ]
}
@test 'Anonymous user should not be able to see shares' {
# skip #slow test
run curl ftp://${sut_ip}/
[ "${curl_err_failed_to_log_in}" -eq "${status}" ]
}
# Read/write access
@test 'read access for share ‘public’' {
# Share User Password
assert_read_access public alexanderd alexanderd
assert_read_access public anc anc
assert_read_access public benoitp benoitp
assert_read_access public christophev christophev
assert_read_access public elenaa elenaa
assert_read_access public evyt evyt
assert_read_access public krisv krisv
assert_read_access public leend leend
assert_read_access public nehirb nehirb
assert_read_access public stefaanv stefaanv
assert_read_access public stevenh stevenh
assert_read_access public stevenv stevenv
assert_read_access public svena svena
assert_read_access public ${admin_user} ${admin_password}
}
@test 'write access for share ‘public’' {
# Share User Password
assert_write_access public alexanderd alexanderd
assert_write_access public anc anc
assert_write_access public benoitp benoitp
assert_write_access public christophev christophev
assert_write_access public elenaa elenaa
assert_write_access public evyt evyt
assert_write_access public krisv krisv
assert_write_access public leend leend
assert_write_access public nehirb nehirb
assert_write_access public stefaanv stefaanv
assert_write_access public stevenh stevenh
assert_write_access public stevenv stevenv
assert_write_access public svena svena
assert_write_access public ${admin_user} ${admin_password}
}
@test 'read access for share ‘management’' {
# Share User Password
assert_no_read_access management alexanderd alexanderd
assert_no_read_access management anc anc
assert_no_read_access management benoitp benoitp
assert_no_read_access management christophev christophev
assert_read_access management elenaa elenaa
assert_no_read_access management evyt evyt
assert_read_access management krisv krisv
assert_no_read_access management leend leend
assert_no_read_access management nehirb nehirb
assert_no_read_access management stefaanv stefaanv
assert_read_access management stevenh stevenh
assert_no_read_access management stevenv stevenv
assert_no_read_access management svena svena
assert_no_read_access management ${admin_user} ${admin_password}
}
@test 'write access for share ‘management’' {
# Share User Password
assert_no_write_access management alexanderd alexanderd
assert_no_write_access management anc anc
assert_no_write_access management benoitp benoitp
assert_no_write_access management christophev christophev
assert_write_access management elenaa elenaa
assert_no_write_access management evyt evyt
assert_write_access management krisv krisv
assert_no_write_access management leend leend
assert_no_write_access management nehirb nehirb
assert_no_write_access management stefaanv stefaanv
assert_write_access management stevenh stevenh
assert_no_write_access management stevenv stevenv
assert_no_write_access management svena svena
assert_no_write_access management ${admin_user} ${admin_password}
}
@test 'read access for share ‘technical’' {
# Share User Password
assert_read_access technical alexanderd alexanderd
assert_read_access technical anc anc
assert_read_access technical benoitp benoitp
assert_read_access technical christophev christophev
assert_read_access technical elenaa elenaa
assert_read_access technical evyt evyt
assert_read_access technical krisv krisv
assert_read_access technical leend leend
assert_read_access technical nehirb nehirb
assert_read_access technical stefaanv stefaanv
assert_read_access technical stevenh stevenh
assert_read_access technical stevenv stevenv
assert_read_access technical svena svena
assert_read_access technical ${admin_user} ${admin_password}
}
@test 'write access for share ‘technical’' {
# Share User Password
assert_write_access technical alexanderd alexanderd
assert_write_access technical anc anc
assert_no_write_access technical benoitp benoitp
assert_no_write_access technical christophev christophev
assert_no_write_access technical elenaa elenaa
assert_write_access technical evyt evyt
assert_no_write_access technical krisv krisv
assert_write_access technical leend leend
assert_no_write_access technical nehirb nehirb
assert_write_access technical stefaanv stefaanv
assert_no_write_access technical stevenh stevenh
assert_write_access technical stevenv stevenv
assert_no_write_access technical svena svena
assert_no_write_access technical ${admin_user} ${admin_password}
}
@test 'read access for share ‘sales’' {
assert_no_read_access sales anc anc
assert_read_access sales benoitp benoitp
assert_no_read_access sales christophev christophev
assert_read_access sales elenaa elenaa
assert_no_read_access sales evyt evyt
assert_read_access sales krisv krisv
assert_no_read_access sales leend leend
assert_no_read_access sales nehirb nehirb
assert_no_read_access sales stefaanv stefaanv
assert_read_access sales stevenh stevenh
assert_no_read_access sales stevenv stevenv
assert_read_access sales svena svena
assert_no_read_access sales ${admin_user} ${admin_password}
}
@test 'write access for share ‘sales’' {
# Share User Password
assert_no_write_access sales alexanderd alexanderd
assert_no_write_access sales anc anc
assert_write_access sales benoitp benoitp
assert_no_write_access sales christophev christophev
assert_no_write_access sales elenaa elenaa
assert_no_write_access sales evyt evyt
assert_no_write_access sales krisv krisv
assert_no_write_access sales leend leend
assert_no_write_access sales nehirb nehirb
assert_no_write_access sales stefaanv stefaanv
assert_no_write_access sales stevenh stevenh
assert_no_write_access sales stevenv stevenv
assert_write_access sales svena svena
assert_no_write_access sales ${admin_user} ${admin_password}
}
@test 'read access for share ‘it’' {
# Share User Password
assert_no_read_access it alexanderd alexanderd
assert_no_read_access it anc anc
assert_no_read_access it benoitp benoitp
assert_read_access it christophev christophev
assert_read_access it elenaa elenaa
assert_no_read_access it evyt evyt
assert_read_access it krisv krisv
assert_no_read_access it leend leend
assert_read_access it nehirb nehirb
assert_no_read_access it stefaanv stefaanv
assert_read_access it stevenh stevenh
assert_no_read_access it stevenv stevenv
assert_no_read_access it svena svena
assert_read_access it ${admin_user} ${admin_password}
}
@test 'write access for share ‘it’' {
# Share User Password
assert_no_write_access it alexanderd alexanderd
assert_no_write_access it anc anc
assert_no_write_access it benoitp benoitp
assert_write_access it christophev christophev
assert_no_write_access it elenaa elenaa
assert_no_write_access it evyt evyt
assert_no_write_access it krisv krisv
assert_no_write_access it leend leend
assert_write_access it nehirb nehirb
assert_no_write_access it stefaanv stefaanv
assert_no_write_access it stevenh stevenh
assert_no_write_access it stevenv stevenv
assert_no_write_access it svena svena
assert_write_access it ${admin_user} ${admin_password}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.