text stringlengths 1 1.05M |
|---|
<reponame>chylex/Hardcore-Ender-Expansion
package chylex.hee.test;
import java.util.Collection;
import chylex.hee.system.util.MathUtil;
import com.google.common.base.Objects;
public final class Assert{
@FunctionalInterface
public static interface AssertionSuccess{
void call();
}
@FunctionalInterface
public static interface AssertionFailure{
void call(IllegalStateException ex);
}
private static AssertionSuccess onSuccess = () -> {};
private static AssertionFailure onFail = (ex) -> { throw ex; };
public static void setSuccessCallback(AssertionSuccess callback){
Assert.onSuccess = callback;
}
public static void setFailCallback(AssertionFailure callback){
Assert.onFail = callback;
}
/**
* Always fails.
*/
public static void fail(){
fail("Triggered failure.");
}
/**
* Always fails.
*/
public static void fail(String message){
onFail.call(new IllegalStateException(message));
}
/**
* Fails if the provided value is not true.
*/
public static void isTrue(boolean value){
isTrue(value, "Expected value to be true.");
}
/**
* Fails if the provided value is not true.
*/
public static void isTrue(boolean value, String message){
if (value)onSuccess.call();
else onFail.call(new IllegalStateException(message));
}
/**
* Fails if the provided value is not false.
*/
public static void isFalse(boolean value){
isFalse(value, "Expected value to be false.");
}
/**
* Fails if the provided value is not false.
*/
public static void isFalse(boolean value, String message){
if (!value)onSuccess.call();
else onFail.call(new IllegalStateException(message));
}
/**
* Fails if the provided value is not null.
*/
public static void isNull(Object value){
isNull(value, "Expected value to be null, got $ instead.");
}
/**
* Fails if the provided value is not null.
* Use $ to put the string value of the object into the message.
*/
public static void isNull(Object value, String message){
if (value == null)onSuccess.call();
else onFail.call(new IllegalStateException(message.replace("$", value.toString())));
}
/**
* Fails if the provided value is null.
*/
public static void notNull(Object value){
notNull(value, "Expected value to not be null.");
}
/**
* Fails if the provided value is null.
*/
public static void notNull(Object value, String message){
if (value != null)onSuccess.call();
else onFail.call(new IllegalStateException(message));
}
/**
* Fails if the value is not an instance of targetClass.
*/
public static void instanceOf(Object value, Class<?> targetClass){
instanceOf(value, targetClass, "Expected value to be instance of $2, instead the class is $1.");
}
/**
* Fails if the value is not an instance of targetClass.
* Use $1 to put the current object class into the message, and $2 for the target class.
*/
public static void instanceOf(Object value, Class<?> targetClass, String message){
if (value != null && targetClass.isAssignableFrom(value.getClass()))onSuccess.call();
else onFail.call(new IllegalStateException(message.replace("$1", value == null ? "<null>" : value.getClass().getName()).replace("$2", targetClass.getName())));
}
/**
* Fails if the value is not present in the collection.
*/
public static void contains(Collection<?> collection, Object value){
contains(collection, value, "Expected object $1 to be present in the collection. Collection contents: $2");
}
/**
* Fails if the value is not present in the collection.
* Use $1 to substitute the value and $2 to substitute the collection contents in the message.
*/
public static void contains(Collection<?> collection, Object value, String message){
if (collection != null && collection.contains(value))onSuccess.call();
else onFail.call(new IllegalStateException(message.replace("$1", value == null ? "<null>" : value.toString()).replace("$2", collection == null ? "<null>" : collection.toString())));
}
/**
* Fails if the value is present in the collection, or if the collection is null.
*/
public static void notContains(Collection<?> collection, Object value){
notContains(collection, value, "Expected object $1 to not be present in the collection. Collection contents: $2");
}
/**
* Fails if the value is not present in the collection, or if the collection is null.
* Use $1 to substitute the value and $2 to substitute the collection contents in the message.
*/
public static void notContains(Collection<?> collection, Object value, String message){
if (collection == null || !collection.contains(value))onSuccess.call();
else onFail.call(new IllegalStateException(message.replace("$1", value == null ? "<null>" : value.toString()).replace("$2", collection.toString())));
}
/**
* Fails if the value is not equal to target (supports wrapped primitives).
*/
public static void equal(Object value, Object target){
equal(value, target, "Expected value to be equal to $2, got $1 instead.");
}
/**
* Fails if the value is not equal to target (supports wrapped primitives).
* Use $1 to substitute the value and $2 to substitute the target in the message.
*/
public static void equal(Object value, Object target, String message){
if (Objects.equal(value, target) || arePrimitivesEqual(value, target))onSuccess.call();
else onFail.call(new IllegalStateException(message.replace("$1", value == null ? "null" : value.toString()).replace("$2", target == null ? "null" : target.toString())));
}
/**
* Compares two objects using their primitive values.
*/
private static boolean arePrimitivesEqual(Object value1, Object value2){
if (value1 instanceof Number && value2 instanceof Number){
if (value1 instanceof Double || value1 instanceof Float)return MathUtil.floatEquals(((Number)value1).floatValue(), ((Number)value2).floatValue());
else return ((Number)value1).longValue() == ((Number)value2).longValue();
}
else if (value1 instanceof Boolean && value2 instanceof Boolean){
return ((Boolean)value1).booleanValue() == ((Boolean)value2).booleanValue();
}
else if (value1 instanceof Character && value2 instanceof Character){
return ((Character)value1).charValue() == ((Character)value2).charValue();
}
else return false;
}
private Assert(){}
}
|
#!/bin/bash
mkdir ./mpi_batch
cd ./mpi_batch
echo "Writing mpi_hello_world.c file"
cat << 'EOF' > mpi_hello_world.c
#include <mpi.h>
#include <stdio.h>
int main(int argc, char** argv) {
// Initialize the MPI environment
MPI_Init(NULL, NULL);
// Get the number of processes
int world_size;
MPI_Comm_size(MPI_COMM_WORLD, &world_size);
// Get the rank of the process
int world_rank;
MPI_Comm_rank(MPI_COMM_WORLD, &world_rank);
// Get the name of the processor
char processor_name[MPI_MAX_PROCESSOR_NAME];
int name_len;
MPI_Get_processor_name(processor_name, &name_len);
// Print off a hello world message
printf("Hello world from processor %s, rank %d out of %d processors\n",
processor_name, world_rank, world_size);
// Finalize the MPI environment.
MPI_Finalize();
}
EOF
echo "Writing run_mpi.sh file"
cat << 'EOF' > run_mpi.sh
#!/bin/bash
if [ -f /etc/bashrc ]; then
. /etc/bashrc
fi
module load gcc-9.2.0
module load mpi/hpcx
# Create host file
batch_hosts=hosts.batch
rm -rf $batch_hosts
IFS=';' read -ra ADDR <<< "$AZ_BATCH_NODE_LIST"
for i in "${ADDR[@]}"; do echo $i >> $batch_hosts;done
# Determine hosts to run on
src=$(tail -n1 $batch_hosts)
dst=$(head -n1 $batch_hosts)
echo "Src: $src"
echo "Dst: $dst"
NP=$(($NODES*$PPN))
#Runnning the following command
echo "mpirun -np $NP -oversubscripe --host ${src}:${PPN},${dst}:${PPN} --map-by ppr:${PPN}:node --mca btl tcp,vader,self --mca coll_hcoll_enable 0 --mca btl_tcp_if_include lo,eth0 --mca pml ^ucx ${AZ_BATCH_APP_PACKAGE_mpi_batch_1_0_0}/mpi_batch/mpi_hello_world"
# Run two node MPI tests
mpirun -np $NP --oversubscribe --host ${src}:${PPN},${dst}:${PPN} --map-by ppr:${PPN}:node --mca btl tcp,vader,self --mca coll_hcoll_enable 0 --mca btl_tcp_if_include lo,eth0 --mca pml ^ucx ${AZ_BATCH_APP_PACKAGE_mpi_batch_1_0_0}/mpi_batch/mpi_hello_world
EOF
if [ -f /etc/bashrc ]; then
. /etc/bashrc
fi
module load gcc-9.2.0
module load mpi/hpcx
echo "Compiling mpi_hello_world.c file"
mpicc -o mpi_hello_world mpi_hello_world.c
echo "Deleting source file"
rm mpi_hello_world.c
cd ..
echo "Creating zip file"
zip -r mpi_batch.zip mpi_batch
|
"""Rare words in company purposes.
This script requires the `dasem` module
"""
from __future__ import print_function
from os import write
import signal
from six import b
from nltk import WordPunctTokenizer
from dasem.fullmonty import Word2Vec
from dasem.text import Decompounder
from cvrminer.cvrmongo import CvrMongo
from cvrminer.text import PurposeProcessor
from cvrminer.virksomhed import Virksomhed
# Ignore broken pipe errors
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
decompounder = Decompounder()
purpose_processor = PurposeProcessor()
w2v = Word2Vec()
word_tokenizer = WordPunctTokenizer()
n = 1
cvr_mongo = CvrMongo()
for company in cvr_mongo.iter_companies():
virksomhed = Virksomhed(company)
purposes = virksomhed.formaal
for purpose in purposes:
cleaned_purpose = purpose_processor.clean(purpose)
words = word_tokenizer.tokenize(cleaned_purpose)
for word in words:
word = word.lower()
if word not in w2v.model:
phrase = decompounder.decompound_word(word)
for subphrase in phrase.split(' '):
if subphrase not in w2v.model:
write(1, subphrase.encode('utf-8') + b('\n'))
|
const generateRandomNumbers = (length) => {
let result = [];
for (let i = 0; i < length; i++) {
result.push(Math.floor(Math.random() * 100));
}
return result;
};
console.log(generateRandomNumbers(10)); |
<reponame>KyllianGautier/treasure-map
import { TreasureMap } from './treasure-map';
import { Player } from './player';
import { Mountain } from './mountain';
import { Treasure } from './treasure';
describe('Player', () => {
let treasureMap: TreasureMap;
beforeEach(() => {
treasureMap = new TreasureMap(5, 6);
});
describe('Player orientation', () => {
it('Change direction right', async () => {
const player = new Player('Alphonse', 2, 3, 'South', [], treasureMap);
treasureMap.addPlayer(player);
player['turnRound']('D');
expect(player.direction).toBe('West');
player['turnRound']('D');
expect(player.direction).toBe('North');
player['turnRound']('D');
expect(player.direction).toBe('East');
player['turnRound']('D');
expect(player.direction).toBe('South');
});
it('Change direction left', async () => {
const player = new Player('Alphonse', 2, 3, 'South', [], treasureMap);
treasureMap.addPlayer(player);
player['turnRound']('G');
expect(player.direction).toBe('East');
player['turnRound']('G');
expect(player.direction).toBe('North');
player['turnRound']('G');
expect(player.direction).toBe('West');
player['turnRound']('G');
expect(player.direction).toBe('South');
});
});
describe('Player movement', () => {
it('Move forward', async () => {
const player = new Player('Alphonse', 3, 3, 'South', [], treasureMap);
treasureMap.addPlayer(player);
player['goForward']();
expect(player.column).toBe(3);
expect(player.row).toBe(4);
});
it('Move on a wall north', async () => {
const player = new Player('Alphonse', 3, 0, 'North', [], treasureMap);
treasureMap.addPlayer(player);
player['goForward']();
expect(player.column).toBe(3);
expect(player.row).toBe(0);
});
it('Move on a wall east', async () => {
const player = new Player('Alphonse', 4, 3, 'East', [], treasureMap);
treasureMap.addPlayer(player);
player['goForward']();
expect(player.column).toBe(4);
expect(player.row).toBe(3);
});
it('Move on a wall south', async () => {
const player = new Player('Alphonse', 2, 5, 'South', [], treasureMap);
treasureMap.addPlayer(player);
player['goForward']();
expect(player.column).toBe(2);
expect(player.row).toBe(5);
});
it('Move on a wall west', async () => {
const player = new Player('Alphonse', 0, 3, 'West', [], treasureMap);
treasureMap.addPlayer(player);
player['goForward']();
expect(player.column).toBe(0);
expect(player.row).toBe(3);
});
it('Move on a mountain', async () => {
const player = new Player('Alphonse', 2, 2, 'South', [], treasureMap);
const mountain = new Mountain(2, 3);
treasureMap.addPlayer(player);
treasureMap.addMountain(mountain);
player['goForward']();
expect(player.column).toBe(2);
expect(player.row).toBe(2);
});
it('Move on a player', async () => {
const player = new Player('Alphonse', 2, 2, 'North', [], treasureMap);
const opponent = new Player('Germaine', 2, 1, 'South', [], treasureMap);
treasureMap.addPlayer(player);
treasureMap.addPlayer(opponent);
player['goForward']();
expect(player.column).toBe(2);
expect(player.row).toBe(2);
});
});
describe('Player gets treasure', () => {
it('Get simple treasure', async () => {
const player = new Player('Alphonse', 2, 3, 'South', [], treasureMap);
const treasure = new Treasure(2, 4, 1);
treasureMap.addPlayer(player);
treasureMap.addTreasure(treasure);
player['goForward']();
expect(player.treasureCount).toBe(1);
expect(treasure.quantity).toBe(0);
});
it('Get empty treasure', async () => {
const player = new Player('Alphonse', 2, 3, 'South', [], treasureMap);
const treasure = new Treasure(2, 4, 0);
treasureMap.addPlayer(player);
treasureMap.addTreasure(treasure);
player['goForward']();
expect(player.treasureCount).toBe(0);
expect(treasure.quantity).toBe(0);
});
});
describe('Player action', () => {
it('Player does an action', async () => {
const player = new Player('Alphonse', 2, 3, 'South', ['A'], treasureMap);
treasureMap.addPlayer(player);
player.doAction();
expect(player.actions.length).toBe(0);
});
it('Player has no more action to do', async () => {
const player = new Player('Alphonse', 2, 3, 'South', [], treasureMap);
treasureMap.addPlayer(player);
player.doAction();
expect(player.actions.length).toBe(0);
});
});
});
|
#!/bin/bash
#
# Auto-Install Apps and Tools for Manjaro/ArchLinux
#
# Resources:
# https://wiki.archlinux.org/index.php/Secure_Shell#Protection
#
# @author Dumitru Uzun (DUzun.me)
#
if ! pacman -Qi fakeroot > /dev/null; then
sudo pacman -Sq base-devel
fi
if ! command -v yay > /dev/null; then
sudo pacman -Sq yay
fi
_i_='yay -S --noconfirm'
_d_=$(dirname "$0");
# NumLock On at boot
sudo "$_d_/numlock_on_boot.sh";
# Setup ~/.bin
sudo "$_d_/set_home_bin_path.sh";
# Set some values in sysctl
sudo "$_d_/sysctl.sh";
# Enable BBR tcp_congestion_control
sudo "$_d_/enable_bbr.sh";
# The file /usr/bin/x-terminal-emulator is usually nonexistent on ArchLinux Systems, you have to link it manually.
[ -x /usr/bin/x-terminal-emulator ] || sudo ln -sT xterm /usr/bin/x-terminal-emulator
[ -x ./.bin/pacman-refresh-keys ] && ./.bin/pacman-refresh-keys
# Dropdown console
$_i_ guake
guake &
if ! ps -C rngd > /dev/null; then
$_i_ rng-tools
# echo RNGD_OPTS="-r /dev/urandom" | sudo tee /etc/conf.d/rngd
sudo systemctl enable rngd
sudo systemctl start rngd
fi
$_i_ dialog
$_i_ git
curl -L https://raw.github.com/git/git/master/contrib/completion/git-prompt.sh > ~/.bash_git
grep "/.bash_git" ~/.bashrc || {
echo >> ~/.bashrc;
echo '[ -f ~/.bash_git ] && . ~/.bash_git' >> ~/.bashrc;
}
# Unlock id_rsa key with KWallet
f=~/.config/autostart-scripts/ssh-add.sh
if [ ! -s "$f" ] && [ -s "$_d_/autostart-scripts/ssh-add.sh" ];
then
cat "$_d_/autostart-scripts/ssh-add.sh" > "$f"
chmod +x "$f"
fi
# $_i_ redshift # not required any more, see "Night Mode" in settings
$_i_ synergy1-bin
# $_i_ dropbox
# $_i_ kde-servicemenus-dropbox
# Create an account at https://e.pcloud.com/#page=register&invite=BOUkZ4oWYRy
$_i_ pcloud-drive # pCloud drive client on Electron
$_i_ speedcrunch # advanced calculator
# $_i_ odrive-bin # Google Drive client on Electron
# $_i_ yandex-disk
# $_i_ yandex-disk-indicator
$_i_ brave-browser
# $_i_ google-chrome
# https://chrome.google.com/webstore/detail/plasma-integration/cimiefiiaegbelhefglklhhakcgmhkai
# https://addons.mozilla.org/en-US/firefox/addon/plasma-integration/
$_i_ plasma-browser-integration
$_i_ sshfs
$_i_ fuseiso
$_i_ ifuse
$_i_ cdemu-daemon
$_i_ cdemu-client
$_i_ autofs
sudo cp -R "$_d_"/autofs/* /etc/autofs/
sudo systemctl enable autofs
sudo systemctl start autofs
$_i_ etcher # write ISO to USB-Storage
$_i_ open-fuse-iso
$_i_ gparted # alternative to KDE Partition Manager
$_i_ kdiskmark # Measure storage read/write performance
$_i_ diffuse
$_i_ meld
$_i_ kdiff3
$_i_ terminator
$_i_ xorg-xkill # xkill any window app
$_i_ plasma5-applets-caffeine-plus # Prevents the desktop becoming idle in full-screen mode
$_i_ wmctrl # Window control utility
# $_i_ pamac-gtk # this is now the default GUI package manager
# $_i_ pamac-tray-appindicator # Tray icon using appindicator which feets better in KDE
$_i_ krita # photo editor
$_i_ blender # video editor
# $_i_ xnviewmp # photo viewer
$_i_ kodi # video player
$_i_ celluloid # video player
# $_i_ kodi-addon-stream
$_i_ clementine # audio player
$_i_ kdegraphics-thumbnailers
$_i_ raw-thumbnailer
$_i_ raw-thumbnailer-entry
$_i_ webp-thumbnailer
# $_i_ ffmpegthumbnailer-mp3
$_i_ exe-thumbnailer
$_i_ appimage-thumbnailer-git
# $_i_ jar-thumbnailer-git
# Install a hook for minidlna
_sed_=$(command -v sed)
cat << EOS | sudo tee /etc/pacman.d/hooks/minidlna-unjail-home.hook > /dev/null
[Trigger]
Type = Package
Target = minidlna
Operation = Install
Operation = Upgrade
[Action]
Description = Unjail home for MiniDLNA service
When = PostTransaction
Exec = $_sed_ -i 's/ProtectHome=on/ProtectHome=off/' /lib/systemd/system/minidlna.service
EOS
$_i_ minidlna # Media Server
# sed -i 's/ProtectHome=on/ProtectHome=off/' /lib/systemd/system/minidlna.service
# Security
$_i_ rkhunter
$_i_ fail2ban
$_i_ unhide
$_i_ clamav # antivirus
$_i_ clamtk # GUI for clamav
# Fast reboot
$_i_ dash
$_i_ kexec-tools
f=~/.bin/krbt
if [ ! -s "$f" ] && [ -s "$_d_/.bin/krbt" ];
then
cat "$_d_/.bin/krbt" > "$f"
chmod +x "$f"
fi
# # https://wiki.archlinux.org/index.php/PPTP_Client
# $_i_ pptpclient
# # Create MikroTel VPN connection and daemonize it
# [ -x "$_d_/setup_mikrotel_pptp.sh" ] && sudo "$_d_/setup_mikrotel_pptp.sh";
# Install Cronie (if missing) and setup /etc/cron.minutely folder
[ -d /etc/cron.minutely ] || sudo mkdir /etc/cron.minutely
cat << EOF | sudo tee /etc/cron.d/0minutely > /dev/null
SHELL=/bin/bash
PATH=/sbin:/bin:/usr/sbin:/usr/bin
MAILTO=root
*/1 * * * * root run-parts /etc/cron.minutely #Runs a cron job script every minute
EOF
if ! ps -C crond > /dev/null; then
$_i_ cronie
sudo systemctl enable cronie
sudo systemctl start cronie
fi
$_i_ notepadqq # like notepad++
$_i_ vscodium-bin
# [ -d ~/.config/VSCodium/User ]
if [ -d ~/Dropbox/config/VSCodium/User/ ];
then
ln -sf ~/Dropbox/config/VSCodium/User ~/.config/VSCodium/
fi
# $_i_ sublime-text-dev
# sudo ln -sf /opt/sublime_text_3/sublime_text /usr/bin/subl
# [ -d ~/.config/sublime-text-3/Packages/User ]
if [ -d ~/Dropbox/config/Sublime/User/ ];
then
ln -sf ~/Dropbox/config/Sublime/User ~/.config/sublime-text-3/Packages/
fi
# File & Sync
$_i_ syncthing-gtk-python3
# Start syncthing delayed
f=~/.config/autostart-scripts/syncthing-delayed.sh
if [ ! -s "$f" ] && [ -s "$_d_/autostart-scripts/syncthing-delayed.sh" ];
then
cat "$_d_/autostart-scripts/syncthing-delayed.sh" > "$f"
chmod +x "$f"
fi
# systemctl enable --user syncthing
systemctl start --user syncthing
$_i_ freefilesync
$_i_ fslint
# $_i_ btsync-1.4
# sudo systemctl enable btsync
# sudo systemctl start btsync
# $_i_ btsync-gui
# google-chrome-stable http://localhost:8888 &
$_i_ qbittorrent
$_i_ gufw # GUI for ufw
# Like krunner
$_i_ rofi
# KDE VNC
$_i_ krfb
$_i_ krdc
# VNC
$_i_ tigervnc
# $_i_ tigervnc-viewer
$_i_ remmina
# Other Remote Desktop
$_i_ teamviewer
sudo systemctl enable teamviewerd
$_i_ telegram-desktop
$_i_ viber
# Start Viber and send it to the system tray
f=~/.config/autostart-scripts/viber-to-tray.sh
if [ ! -f "$f" ];
then
cat > "$f" << EOF
#!/bin/bash
viber StartMinimized &
sleep 3;
wid=\$(wmctrl -p -l | grep Viber | grep " \$! " | awk '{print \$1}') && \
wmctrl -i -c "\$wid"
# pidof /opt/viber/Viber
EOF
chmod +x "$f"
# Disable Viber autostart
f=~/.config/autostart/Viber.desktop
if [ -f "$f" ];
then
if ! grep -q 'Hidden=true' -- "$f";
then
echo 'Hidden=true' >> "$f"
fi
fi
fi
# $_i_ skypeforlinux-stable-bin
# cat > ~/.config/autostart/skypeforlinux.desktop << EOF
# [Desktop Entry]
# Name=Skype for Linux
# GenericName=Skype
# Exec=skypeforlinux
# Icon=skypeforlinux
# Terminal=false
# Type=Application
# Categories=Network;
# StartupNotify=false
# EOF
# SkypeForLinux doesn't use kwallet (yet?), but uses gnome-keyring instead
if $_i_ gnome-keyring;
then
if command -v sddm > /dev/null;
then
if ! grep -lq pam_gnome_keyring.so -- /etc/pam.d/sddm;
then
# Add these lines to /etc/pam.d/sddm
# -auth optional pam_gnome_keyring.so
# -session optional pam_gnome_keyring.so auto_start
t=-1;
while read -r ln; do
echo "$ln" | grep -q '^auth\s\+';
s="$?"
if [ "$s" -ne 0 ] && [ "$t" -eq 0 ]; then echo -e '-auth\t\toptional\tpam_gnome_keyring.so'; fi;
t=$s;
echo "$ln";
done < /etc/pam.d/sddm > /tmp/etc_pam_sddm && \
echo -e '-session\toptional\tpam_gnome_keyring.so\tauto_start' >> /tmp/etc_pam_sddm && \
sudo mv -f /tmp/etc_pam_sddm /etc/pam.d/sddm
fi
fi
git config --global credential.helper gnome-keyring
# git config --global credential.modalprompt true
$_i_ seahorse
fi
# Replace Yakuake with Guake
# You have to enable autostart in prefferences
cat > ~/.config/autostart/guake.desktop << EOF
[Desktop Entry]
Name=Guake Terminal
GenericName=Terminal
Comment=Use the command line in a Quake-like terminal
Exec=guake
Icon=guake
Terminal=false
Type=Application
Categories=GNOME;GTK;System;Utility;TerminalEmulator;
Encoding=UTF-8
StartupNotify=false
TryExec=guake
EOF
# Disable Yakuake autostart
f=~/.config/autostart/org.kde.yakuake.desktop
if [ -f "$f" ];
then
if ! grep -q 'Hidden=true' -- "$f";
then
echo 'Hidden=true' >> "$f"
fi
fi
# Ctrl+` opens Guake (global shortcut)
f=~/.config/kglobalshortcutsrc
if [ -f "$f" ];
then
if ! grep -q '[guake.desktop]' -- "$f";
then
cat >> "$f" << EOF
[guake.desktop]
_k_friendly_name=Launch Guake Terminal
_launch=\\tMeta+\`,none,Launch Guake Terminal
EOF
fi
fi
# Login screen on two displays
f=/usr/share/sddm/scripts/Xsetup
x=$(grep "xrandr --output" $f)
if [ -z "$x" ]; then
x=$(xrandr --listmonitors | awk '{print $4}' | grep -v '^$')
x1=$(echo "$x" | head -1)
x2=$(echo "$x" | tail -1)
cat << EOF
xrandr --output $x1 --primary --left-of $x2
EOF
fi | sudo tee -a $f > /dev/null
$_i_ doublecmd-gtk2
$_i_ sddm-config-editor-git
$_i_ kazam
$_i_ flameshot
$_i_ obs-studio # screen recording/streaming
$_i_ screenkey # show keystrokes on the screen
$_i_ kcolorchooser
#$_i_ winscp
$_i_ playonlinux
# if $_i_ crossover ;
# then
# $_i_ nss-mdns
# # On x64
# $_i_ lib32-nss-mdns
# $_i_ lib32-sdl2
# $_i_ lib32-vkd3d
# fi
$_i_ ttf-ms-fonts
# On x64
$_i_ lib32-libwbclient lib32-libxslt
$_i_ virtualbox virtualbox-host-dkms
$_i_ virt-manager virt-viewer qemu vde2 ebtables dnsmasq
sudo systemctl enable libvirtd
# sudo systemctl start libvirtd
echo "options kvm-intel nested=1" | tee /etc/modprobe.d/kvm-intel.conf
qemu-kvm
# If VMs doesn't start, try:
# yay --noconfirm linux-headers
# sudo modprobe vboxdrv
# depmod -a # Failed to start Load Kernel Modules
# Update virs signatures
sudo freshclam
|
def generate_documentation_list(doc_requires):
output = "## Required Documentation Tools\n"
for tool in doc_requires:
output += f"- {tool}\n"
return output |
try:
from dotenv import load_dotenv
print("Found .env file, loading environment variables from it.")
load_dotenv(override=True)
except ModuleNotFoundError:
pass
import asyncio
import logging
import os
from functools import partial, partialmethod
import arrow
import sentry_sdk
from discord.ext import commands
from sentry_sdk.integrations.logging import LoggingIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from bot import log, monkey_patches
sentry_logging = LoggingIntegration(
level=logging.DEBUG,
event_level=logging.WARNING
)
sentry_sdk.init(
dsn=os.environ.get("BOT_SENTRY_DSN"),
integrations=[
sentry_logging,
RedisIntegration()
],
release=f"sir-lancebot@{os.environ.get('GIT_SHA', 'foobar')}"
)
log.setup()
# Set timestamp of when execution started (approximately)
start_time = arrow.utcnow()
# On Windows, the selector event loop is required for aiodns.
if os.name == "nt":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
monkey_patches.patch_typing()
# This patches any convertors that use PartialMessage, but not the PartialMessageConverter itself
# as library objects are made by this mapping.
# https://github.com/Rapptz/discord.py/blob/1a4e73d59932cdbe7bf2c281f25e32529fc7ae1f/discord/ext/commands/converter.py#L984-L1004
commands.converter.PartialMessageConverter = monkey_patches.FixedPartialMessageConverter
# Monkey-patch discord.py decorators to use the both the Command and Group subclasses which supports root aliases.
# Must be patched before any cogs are added.
commands.command = partial(commands.command, cls=monkey_patches.Command)
commands.GroupMixin.command = partialmethod(commands.GroupMixin.command, cls=monkey_patches.Command)
commands.group = partial(commands.group, cls=monkey_patches.Group)
commands.GroupMixin.group = partialmethod(commands.GroupMixin.group, cls=monkey_patches.Group)
|
<gh_stars>0
import * as t from '../constants/ActionTypes';
import _ from 'lodash';
const initialState = { ecode: 0, collection: [], collection2JSON: '', options: {}, indexLoading: false, saveLoading: false };
export default function wfconfig(state = initialState, action) {
const { collection } = state;
switch (action.type) {
case t.WFCONFIG_INDEX:
return { ...state, indexLoading: true, collection: [] };
case t.WFCONFIG_INDEX_SUCCESS:
if (action.result.ecode === 0) {
state.collection = action.result.data.contents && action.result.data.contents.steps ? action.result.data.contents.steps : [];
state.collection2JSON = JSON.stringify(state.collection);
state.workflowId = action.result.data.id;
state.workflowName = action.result.data.name;
state.options = action.result.options;
}
return { ...state, indexLoading: false, ecode: action.result.ecode };
case t.WFCONFIG_INDEX_FAIL:
return { ...state, indexLoading: false, error: action.error };
case t.WFCONFIG_SAVE:
return { ...state, saveLoading: true };
case t.WFCONFIG_SAVE_SUCCESS:
return action.result.ecode === 0 ?
{ ...state, saveLoading: false, ecode: action.result.ecode, collection2JSON: JSON.stringify(state.collection) } :
{ ...state, saveLoading: false, ecode: action.result.ecode };
case t.WFCONFIG_SAVE_FAIL:
return { ...state, saveLoading: false, error: action.error };
case t.WFCONFIG_STEP_CREATE:
const maxStep = _.max(collection, step => step.id).id || 0;
collection.push({ id: maxStep+1, name: action.values.name, state: action.values.state, actions: [] });
return { ...state, collection };
case t.WFCONFIG_STEP_EDIT:
const index = _.findIndex(collection, { id: action.values.id });
collection[index]['name'] = action.values.name;
collection[index]['state'] = action.values.state;
return { ...state, collection };
case t.WFCONFIG_STEP_DELETE:
const inx = _.findIndex(collection, { id: action.id });
collection.splice(inx, 1);
return { ...state, collection: collection };
case t.WFCONFIG_ACTION_ADD:
const stepIndex = _.findIndex(collection, { id: action.stepId });
if (!collection[stepIndex].actions)
{
collection[stepIndex].actions = [];
}
const maxAction = _.max(collection[stepIndex].actions, value => value.id).id || 0;
action.values.id = action.stepId * 1000 + maxAction % 1000 + 1;
collection[stepIndex].actions.push(action.values);
return { ...state, collection };
case t.WFCONFIG_ACTION_EDIT:
const stepInd = _.findIndex(collection, { id: action.stepId });
const actionInd = _.findIndex(collection[stepInd].actions, { id: action.values.id })
collection[stepInd].actions[actionInd] = action.values;
return { ...state, collection };
case t.WFCONFIG_ACTION_DELETE:
const sInd = _.findIndex(collection, { id: action.stepId });
collection[sInd].actions = _.filter(collection[sInd].actions, function(v) { return _.indexOf(action.values, v.id) === -1 });
return { ...state, collection };
case t.WFCONFIG_CANCEL:
state.collection = JSON.parse(state.collection2JSON);
return { ...state };
default:
return state;
}
}
|
#!/bin/bash
# try running like '.scripts/tidy.sh --fix'
find ./src -type f -iname *.h -o -iname *.c -o -iname *.cpp -o -iname *.hpp | xargs -I {} clang-tidy --quiet $@ {}
|
#!/bin/bash
go run cmd/server/main.go
|
import requests
class HttpException(Exception):
pass
def delete_resource(api_url: str, resource_id: int) -> dict:
try:
response = requests.delete(f"{api_url}/{resource_id}")
response.raise_for_status() # Raise an HTTPError for 4xx or 5xx status codes
return {'status': 'deleted', 'resource_id': resource_id}
except requests.exceptions.HTTPError as e:
raise HttpException(f"A HTTP error response was received: {e}")
except requests.exceptions.RequestException as e:
raise HttpException(f"An error occurred while executing the API: {e}")
# Test the function
try:
result = delete_resource('https://api.example.com/resources', 123)
print(result) # Output: {'status': 'deleted', 'resource_id': 123}
except HttpException as e:
print(e) |
<filename>allrichstore/UI/Home/Message/C/MessageVC.h
//
// MessageVC.h
// allrichstore
//
// Created by 任强宾 on 16/11/15.
// Copyright © 2016年 allrich88. All rights reserved.
//
#import "BaseVC.h"
@interface MessageVC : BaseVC
@end
|
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
rail-rna prep elastic -m $DIR/sra_batch_82.manifest --skip-bad-records --ignore-missing-sra-samples --core-instance-type m3.xlarge --master-instance-type m3.xlarge -o s3://rail-sra-hg38/sra_prep_batch_82 -c 20 --core-instance-bid-price 0.25 --master-instance-bid-price 0.25 -f --max-task-attempts 6 --region us-east-1 --ec2-key-name useast1
|
def maxOverlapSum(intervals):
res = []
intervals = sorted(intervals, key=lambda x: x[0])
while len(intervals) > 0:
curr = intervals.pop(0)
temp = []
for interval in intervals:
if (interval[0] >= curr[0] and interval[1] <= curr[1]):
temp.append(interval)
elif (interval[0] <= curr[1] and interval[1] >= curr[1]):
curr[1] = interval[1]
temp.append(interval)
res.append(curr)
intervals = [x for x in intervals if x not in temp]
return sum(map(lambda x: x[1] - x[0], res))
intervals = [[1, 10], [2, 5], [8, 12], [15, 20]]
result = maxOverlapSum(intervals)
print(result) |
<gh_stars>1-10
import React from 'react';
import { StylePosterRandom } from './style';
const PosterRandom: React.FC =()=>{
return(
<StylePosterRandom>
<div className="wrraper">
<div className="cabecalho">
<img src="/assets/kiba.jfif" alt="userPhoto" className="userPhoto" />
<div className="text-cab">
<h1><NAME></h1>
<p>4 h. <i className="fas fa-globe-americas"></i></p>
</div>
<i className="fas fa-ellipsis-h"></i>
</div>
<img src="/assets/kibaPoster.jpg" alt="fotoPostada" className="fotoPostada" />
<span className="separator"></span>
<div className="infoCurt">
<div className="esq">
<i className="fas fa-thumbs-up"></i>
<i className="fas fa-heart"></i>
<span>1 mil</span>
</div>
<div className="dir">
<span>300 Comentários</span>
<span>500 Compartilhamentos</span>
</div>
</div>
<span className="separator"></span>
<div className="curti">
<button>
<i className="fas fa-thumbs-up"></i>
Curtir
</button>
<button>
<i className="far fa-comment-alt"></i>
Comentar
</button>
<button>
<i className="far fa-share-square"></i>
Compartilhar
</button>
</div>
</div>
</StylePosterRandom>
);
}
export default PosterRandom; |
function handleMessage(data, channel) {
if (data.error) {
console.log('# Something went wrong', data.error);
return;
}
if (data.message === 'ping') {
console.log('# Sending pong');
channel.send('pong');
}
if (data.message === 'pong') {
console.log('# Received ping');
channel.send('ping');
}
} |
package com.alchemyapi.api;
public class AlchemyAPI_TextParams extends AlchemyAPI_Params{
private Boolean useMetaData;
private Boolean extractLinks;
public boolean isUseMetaData() {
return useMetaData;
}
public void setUseMetaData(boolean useMetaData) {
this.useMetaData = useMetaData;
}
public boolean isExtractLinks() {
return extractLinks;
}
public void setExtractLinks(boolean extractLinks) {
this.extractLinks = extractLinks;
}
public String getParameterString(){
String retString = super.getParameterString();
if(useMetaData!=null) retString+="&useMetaData="+(useMetaData?"1":"0");
if(extractLinks!=null) retString+="&extractLinks="+(extractLinks?"1":"0");
return retString;
}
}
|
<filename>src/main/java/com/netcracker/ncstore/dto/ReviewCreateDTO.java<gh_stars>0
package com.netcracker.ncstore.dto;
import com.netcracker.ncstore.model.Product;
import com.netcracker.ncstore.model.User;
import lombok.AllArgsConstructor;
import lombok.Getter;
@AllArgsConstructor
@Getter
public class ReviewCreateDTO {
private final User author;
private final Product product;
private final int rating;
private final String text;
}
|
import locales from '../i18n/locales.json';
import anime from 'animejs';
import React, {useState, useRef} from 'react';
import * as Icon from 'react-feather';
import {useTranslation} from 'react-i18next';
import {Link} from 'react-router-dom';
import {useSpring, animated} from 'react-spring';
import {useEffectOnce, useLockBodyScroll, useWindowSize} from 'react-use';
function Navbar({
pages,
darkMode,
showLanguageSwitcher,
setShowLanguageSwitcher,
}) {
const {i18n, t} = useTranslation();
const currentLanguage = Object.keys(locales).includes(i18n?.language)
? i18n?.language
: i18n?.options?.fallbackLng[0];
const [expand, setExpand] = useState(false);
useLockBodyScroll(expand);
const windowSize = useWindowSize();
const [spring, set, stop] = useSpring(() => ({opacity: 0}));
set({opacity: 1});
stop();
return (
<animated.div className="Navbar" style={spring}>
<div
className="navbar-left"
onClick={() => {
setShowLanguageSwitcher((prevState) => !prevState);
}}
>
{locales[currentLanguage]}
</div>
<div className="navbar-middle">
<Link
to="/"
onClick={() => {
setExpand(false);
}}
>
Covid19<span>India</span>
</Link>
</div>
<div
className="navbar-right"
onClick={() => {
setExpand(!expand);
}}
onMouseEnter={() => {
if (window.innerWidth > 769) {
setExpand(true);
}
}}
>
{windowSize.width < 769 && (
<span>{expand ? t('Close') : t('Menu')}</span>
)}
{windowSize.width > 769 && (
<React.Fragment>
<span>
<Link to="/">
<Icon.Home {...activeNavIcon('/')} />
</Link>
</span>
<span>
<Link to="/demographics">
<Icon.Users {...activeNavIcon('/demographics')} />
</Link>
</span>
<span>
<Link to="/essentials">
<Icon.Package {...activeNavIcon('/essentials')} />
</Link>
</span>
<span>
<Link to="/about">
<Icon.HelpCircle {...activeNavIcon('/about')} />
</Link>
</span>
<span>
{window.innerWidth > 768 && <SunMoon {...{darkMode}} />}
</span>
</React.Fragment>
)}
</div>
{expand && (
<Expand {...{expand, pages, setExpand, darkMode, windowSize}} />
)}
</animated.div>
);
}
function Expand({expand, pages, setExpand, darkMode, windowSize}) {
const expandElement = useRef(null);
const {t} = useTranslation();
useEffectOnce(() => {
anime({
targets: expandElement.current,
translateX: '10.5rem',
easing: 'easeOutExpo',
duration: 250,
});
});
return (
<div
className="expand"
ref={expandElement}
onMouseLeave={() => {
if (windowSize.width > 769) setExpand(false);
}}
>
{pages.map((page, i) => {
if (page.showInNavbar === true) {
return (
<Link
to={page.pageLink}
key={i}
onClick={() => {
setExpand(false);
}}
>
<span
{...navLinkProps(page.pageLink, page.animationDelayForNavbar)}
>
{t(page.displayName)}
</span>
</Link>
);
}
return null;
})}
{window.innerWidth < 768 && <SunMoon {...{darkMode}} />}
<div className="expand-bottom fadeInUp" style={{animationDelay: '1s'}}>
<h5>{t('A crowdsourced initiative.')}</h5>
</div>
</div>
);
}
export default Navbar;
const navLinkProps = (path, animationDelay) => ({
className: `fadeInUp ${window.location.pathname === path ? 'focused' : ''}`,
style: {
animationDelay: `${animationDelay}s`,
},
});
const activeNavIcon = (path) => ({
style: {
stroke: window.location.pathname === path ? '#4c75f2' : '',
},
});
const SunMoon = ({darkMode}) => {
return (
<div
className="SunMoon"
onClick={() => {
darkMode.toggle();
}}
>
<div>
{darkMode.value ? <Icon.Sun color={'#ffc107'} /> : <Icon.Moon />}
</div>
</div>
);
};
|
<reponame>madhusha2020/inventory-frontend-ngx
import {Component, OnInit} from '@angular/core';
import {ActivatedRoute, Router} from '@angular/router';
import {
Customer,
CustomerControllerService,
CustomerUser,
Role,
RoleControllerService,
User,
UserControllerService
} from '../../../service/rest';
import Swal from 'sweetalert2';
import {FormBuilder, FormGroup, Validators} from '@angular/forms';
import {ServiceUtil} from '../../../service/util/service-util';
import {TokenService} from '../../../service/auth/token.service';
@Component({
selector: 'ngx-customer-view',
templateUrl: './customer-view.component.html',
styleUrls: ['./customer-view.component.scss']
})
export class CustomerViewComponent implements OnInit {
editMode: boolean;
disabledProperty = 'disabled';
title: string;
customerForm: FormGroup;
customerPasswordForm: FormGroup;
customerUser: CustomerUser = {};
user: User = {};
customer: Customer = {};
customerTypes: Array<string> = ServiceUtil.getCustomerTypes();
roles: Array<Role> = [];
roleNameList: Array<string> = [];
assignedRoles: Map<string, Role> = new Map<string, Role>();
constructor(private formBuilder: FormBuilder,
private roleControllerService: RoleControllerService,
private userControllerService: UserControllerService,
private customerControllerService: CustomerControllerService,
private tokenService: TokenService,
private route: ActivatedRoute,
private router: Router) {
}
get name() {
return this.customerForm.get('name');
}
get userName() {
return this.customerForm.get('userName');
}
get oldPassword() {
return this.customerPasswordForm.get('oldPassword');
}
get password() {
return this.customerPasswordForm.get('password');
}
get confirmPassword() {
return this.customerPasswordForm.get('confirmPassword');
}
get type() {
return this.customerForm.get('type');
}
get address() {
return this.customerForm.get('address');
}
get contact1() {
return this.customerForm.get('contact1');
}
get contact2() {
return this.customerForm.get('contact2');
}
get fax() {
return this.customerForm.get('fax');
}
ngOnInit(): void {
this.editMode = false;
this.route.queryParams.subscribe(params => {
if (params.id) {
this.fetchCustomer(params.id);
} else {
Swal.fire('Error', 'Customer not found', 'error');
}
}
);
this.fetchRoles();
this.customerForm = this.formBuilder.group({
name: [null, [Validators.required]],
userName: [null, [Validators.required, Validators.pattern('^[a-z0-9._%+-]+@[a-z0-9.-]+\\.[a-z]{2,4}$')]],
type: [ServiceUtil.getExternalCustomerType(), [Validators.required]],
address: [null, [Validators.required]],
contact1: [null, [Validators.required, Validators.minLength(10), Validators.maxLength(13), Validators.pattern('^((\\+91?)|(\\+94?)|0)?[0-9]{10}$')]],
contact2: [null, [Validators.minLength(10), Validators.maxLength(13), Validators.pattern('^((\\+91?)|(\\+94?)|0)?[0-9]{10}$')]],
fax: [null, [Validators.minLength(10), Validators.maxLength(13), Validators.pattern('^((\\+91?)|(\\+94?)|0)?[0-9]{10}$')]],
});
this.customerPasswordForm = this.formBuilder.group({
password: [null, [Validators.required, Validators.minLength(8)]],
confirmPassword: [null, [Validators.required, Validators.minLength(8)]],
oldPassword: [null, [Validators.required, Validators.minLength(8)]],
});
}
fetchCustomer(id: string) {
this.userControllerService.getCustomerByIdUsingGET(id).subscribe(response => {
console.log('CustomerUser Data :', response);
this.customer = response.customer;
this.user = response.user;
this.roleNameList = response.roleNameList;
console.log('Customer Data :', this.customer);
console.log('User Data :', this.user);
console.log('Role Name List Data :', this.roleNameList);
this.setData();
});
}
setData() {
this.userName.setValue(this.user.userName);
this.name.setValue(this.customer.name);
this.address.setValue(this.customer.address);
this.contact1.setValue(this.customer.contact1);
this.contact2.setValue(this.customer.contact2);
this.fax.setValue(this.customer.fax);
this.type.setValue(this.customer.type);
this.roleNameList.forEach(role => {
this.assignedRoles.set(role, {name: role});
});
}
fetchRoles() {
this.roleControllerService.getAllRolesUsingGET().subscribe(response => {
console.log('Roles :', response);
this.roles = response.roles;
});
}
typeStateChange(event) {
console.log('Customer Type : ', event);
this.type.setValue(event);
}
roleStateChange(event, role: Role) {
if (event.target.checked) {
this.assignedRoles.set(role.name, role);
} else {
this.assignedRoles.delete(role.name);
}
console.log('Assigned Roles :', this.assignedRoles);
}
enableEditMode() {
this.editMode = true;
this.disabledProperty = null;
this.title = 'Edit';
}
disabledEditMode() {
this.editMode = false;
this.disabledProperty = 'disabled';
this.title = null;
this.setData();
}
updateCustomerDetails() {
this.updateCustomer();
}
updateCustomerPassword() {
this.user.password = <PASSWORD>;
this.user.oldPassword = <PASSWORD>;
this.updateCustomer();
}
suspend() {
console.log('Suspend customer');
if (this.customer.email != this.tokenService.getUserName()) {
Swal.fire({
title: 'Are you sure?',
text: 'Suspend customer : {0}'.replace('{0}', this.customer.name),
icon: 'warning',
showCancelButton: true,
confirmButtonText: 'Yes',
cancelButtonText: 'No'
}).then((result) => {
if (result.value) {
this.customerControllerService.suspendCustomerUsingPUT({id: this.customer.id, userId: this.tokenService.getUserName()}).subscribe(response => {
console.log('Suspend customer :', response);
Swal.fire('Success', 'Customer suspend successfully', 'success').then(ok => {
this.router.navigate(['/pages/customer/main']);
});
});
} else if (result.dismiss === Swal.DismissReason.cancel) {
// Canceled
}
});
} else {
Swal.fire('Error', 'You cannot suspend your own customer record', 'error');
}
}
activate() {
console.log('Activate customer');
if (this.customer.email != this.tokenService.getUserName()) {
Swal.fire({
title: 'Are you sure?',
text: 'Activate customer : {0}'.replace('{0}', this.customer.name),
icon: 'warning',
showCancelButton: true,
confirmButtonText: 'Yes',
cancelButtonText: 'No'
}).then((result) => {
if (result.value) {
this.customerControllerService.activateCustomerUsingPUT({id: this.customer.id, userId: this.tokenService.getUserName()}).subscribe(response => {
console.log('Activate customer :', response);
Swal.fire('Success', 'Customer activate successfully', 'success').then(ok => {
this.router.navigate(['/pages/customer/main']);
});
});
} else if (result.dismiss === Swal.DismissReason.cancel) {
// Canceled
}
});
} else {
Swal.fire('Error', 'You cannot activate your own customer record', 'error');
}
}
private updateCustomer() {
this.user.userId = this.tokenService.getUserName();
this.customer.name = this.name.value;
this.customer.email = this.userName.value;
this.customer.address = this.address.value;
this.customer.contact1 = this.contact1.value;
this.customer.contact2 = this.contact2.value;
this.customer.fax = this.fax.value;
this.customer.description = ServiceUtil.getUpdateCustomerDescription();
this.customer.type = this.type.value;
this.customer.userId = this.tokenService.getUserName();
this.customerUser.roleNameList = new Array<string>();
this.assignedRoles.forEach((value, key) => {
this.customerUser.roleNameList.push(key);
});
this.customerUser.user = this.user;
this.customerUser.customer = this.customer;
console.log('Customer User : ', this.customerUser);
Swal.fire({
title: 'Are you sure?',
text: 'Update customer : {0}'.replace('{0}', this.customer.name),
icon: 'warning',
showCancelButton: true,
confirmButtonText: 'Yes',
cancelButtonText: 'No'
}).then((result) => {
if (result.value) {
this.userControllerService.updateCustomerUsingPUT(this.customerUser).subscribe(response => {
console.log('Updated Customer :', response);
Swal.fire('Success', 'Customer successfully updated', 'success').then(value => {
this.router.navigate(['/pages/customer/main']);
});
});
} else if (result.dismiss === Swal.DismissReason.cancel) {
// Canceled
}
});
}
}
|
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* get_value.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: rle <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/05/05 22:03:03 by rle #+# #+# */
/* Updated: 2017/05/05 22:15:52 by rle ### ########.fr */
/* */
/* ************************************************************************** */
#include <ft_db.h>
int get_value(char *line, struct s_command *command,
struct s_header *header)
{
int i;
int j;
j = 0;
i = 0;
command->value = (char *)malloc(sizeof(char) * \
header->fields[command->field].value_size);
ft_bzero(command->value, header->fields[command->field].value_size);
while (line[i] && line[i] != ':')
i++;
if (!line[i++])
return (0);
while (line[i])
{
((char *)command->value)[j] = line[i];
i++;
j++;
}
return (1);
}
uint64_t value_size(char *line)
{
int i;
int j;
i = 0;
j = 0;
while (line[i] && line[i] != ':')
i++;
if (!line[i++])
return (-1);
while (line[i])
{
i++;
j++;
}
return (j);
}
|
<html>
<head>
<title>Toggle between List and Card View</title>
</head>
<body>
<h3> Toggle between List and Card View </h3>
<div class="container">
<input type="radio" name="view" value="list" id="list-view">
<label for="list-view">List View</label>
<input type="radio" name="view" value="card" id="card-view">
<label for="card-view">Card View</label>
<ul class="items">
<li>Item 1</li>
<li>Item 2</li>
<li>Item 3</li>
<li>Item 4</li>
</ul>
</div>
<script>
const container = document.querySelector('.container');
const cards = document.querySelectorAll('.items li');
function showListView() {
container.classList.remove('card-view');
container.classList.add('list-view');
}
function showCardView() {
container.classList.remove('list-view');
container.classList.add('card-view');
}
document.querySelector('#list-view').addEventListener('change', showListView);
document.querySelector('#card-view').addEventListener('change', showCardView);
</script>
</body>
</html> |
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
echo "=============================================================================================================="
echo "Please run the script as: "
echo "bash run.sh RANK_TABLE_FILE RANK_SIZE RANK_START /path/weight_file /path/OTB"
echo "For example: bash run_distributed_test.sh /path/rank_table.json 16 0 weight_file /data/OTB"
echo "It is better to use the absolute path."
echo "=============================================================================================================="
execute_path=$(pwd)
echo ${execute_path}
script_self=$(readlink -f "$0")
self_path=$(dirname "${script_self}")
echo ${self_path}
export RANK_TABLE_FILE=$1
export RANK_SIZE=$2
DEVICE_START=$3
WEIGHT_FILE=$4
for((i=0;i<$RANK_SIZE;i++));
do
export RANK_ID=$i
export DEVICE_ID=$((DEVICE_START + i))
echo "Start test for rank $RANK_ID, device $DEVICE_ID."
if [ -d ${execute_path}/eval_device${DEVICE_ID} ]; then
rm -rf ${execute_path}/eval_device${DEVICE_ID}
fi
mkdir ${execute_path}/eval_device${DEVICE_ID}
cp -f eval.py ${execute_path}/eval_device${DEVICE_ID}
cp -rf src ${execute_path}/eval_device${DEVICE_ID}
cd ${execute_path}/eval_device${DEVICE_ID} || exit
python3.7 -u eval.py --distributed 'True' --weight_file ${WEIGHT_FILE} --dataset_path $5> eval_log$i 2>&1 &
cd ..
done
wait
filename=`echo ${WEIGHT_FILE##*/} |awk -F. '{print $1}'`
bboxes_folder="results_on_test_images_part2/${filename}.-0.5"
python3 create_plots.py --bboxes_folder ${execute_path}/${bboxes_folder} > eval_result.txt
|
#!/bin/bash
CROMWELL_JAR=$(which cromwell-30.1.jar)
WDL=$CODE/atac-seq-pipeline/atac.wdl
INPUT=$CODE/atac-seq-pipeline-test-data/scripts/ENCSR356KRQ_no_dup_removal.json
WF_OPT=$CODE/atac-seq-pipeline/workflow_opts/docker.json
BACKEND_CONF=$CODE/atac-seq-pipeline/backends/backend.conf
BACKEND=Local
mkdir -p ENCSR356KRQ_no_dup_removal && cd ENCSR356KRQ_no_dup_removal
java -Dconfig.file=${BACKEND_CONF} -Dbackend.default=${BACKEND} -jar ${CROMWELL_JAR} run ${WDL} -i ${INPUT} -o ${WF_OPT}
find $PWD -name '*.filt.bam' | grep -v glob | grep shard-0
cd .. |
set :markdown_engine, :redcarpet
set :markdown, fenced_code_blocks: true, smartypants: true, tables: true, no_intra_emphasis: true
set :css_dir, 'stylesheets'
set :js_dir, 'javascripts'
set :images_dir, 'images'
helpers do
def version
@version ||= File.read('source/_changelog.md').match(/(v\d+\.[\w\.]*)/).try(:[], 1)
end
end
configure :build do
activate :minify_css
activate :minify_javascript
end
|
SELECT *
FROM customers
WHERE payment_method = 'Credit Card' AND
purchase_date BETWEEN CURDATE() - INTERVAL 3 MONTH AND CURDATE(); |
<reponame>amygdaloideum/browser-env-vars<filename>test/main.js<gh_stars>1-10
const expect = require('chai').expect;
const sinon = require('sinon');
const fs = require('fs');
const service = require('../index');
let s;
describe('Generate()', function () {
var readFileSyncStub, unlinkSyncStub, existsSyncStub;
let output;
const mockEnvFile = content => s.stub(fs, 'readFileSync').returns(content);
const setFileExists = (file, exists) => existsSyncStub.withArgs(file).returns(exists);
beforeEach(function() {
output = '';
process.env = {};
s = sinon.sandbox.create();
s.stub(fs, 'appendFileSync').callsFake(function fakeFn(path, data) {
output += data;
});
unlinkSyncStub = s.stub(fs, 'unlinkSync');
existsSyncStub = s.stub(fs, 'existsSync');
});
afterEach(function() {
s.restore();
});
it('should take values from .env', function () {
setFileExists('.env', true);
mockEnvFile('test=val\ntest2=val2');
service.generate();
expect(output).to.equal(`module.exports = {\n "test": "val",\n "test2": "val2"\n}`);
});
it('should use the whitelisted values from process.env', function () {
setFileExists('.env', false);
mockEnvFile('');
process.env.test='value';
process.env.test2='value2';
process.env.notWhitelistedKey='notWhitelistedValue';
const options = {
whitelist: ['test', 'test2'],
};
service.generate(options);
expect(output).to.equal('module.exports = {\n "test": "value",\n "test2": "value2"\n}');
});
it('should ignore whitelisted values that does not exist in the enviroment', function () {
setFileExists('.env', false);
mockEnvFile('');
process.env.TEST='value';
const options = {
whitelist: ['TEST', 'VALUE_THAT_DOES_NOT_EXIST_IN_THE_ENV'],
};
service.generate(options);
expect(output).to.equal('module.exports = {\n "TEST": "value"\n}');
});
it('should prioritize values from env over values read from the .env file', function () {
setFileExists('.env', true);
mockEnvFile('DUPE=valueFromFile\nVALUE_FILE=fileValue');
process.env.DUPE='valueFromProcessEnv';
process.env.ENV_VALUE='envValue';
process.env.notWhitelistedKey='notWhitelistedValue';
const options = {
whitelist: ['DUPE', 'ENV_VALUE'],
};
service.generate(options);
expect(output).to.equal('module.exports = {\n "DUPE": "valueFromProcessEnv",\n "VALUE_FILE": "fileValue",\n "ENV_VALUE": "envValue"\n}');
});
it('should delete the previous output file if it exists', function () {
setFileExists('.env', false);
setFileExists('config.js', true);
service.generate();
expect(unlinkSyncStub.called).to.be.true;
});
it('should not attempt to delete the previous output file if it does not exists', function () {
setFileExists('.env', false);
setFileExists('config.js', false);
service.generate();
expect(unlinkSyncStub.called).to.be.false;
});
it('should export a json file if the outFile option has a json file extension', function () {
setFileExists('.env', false);
setFileExists('config.js', false);
process.env.TEST='value';
const options = {
whitelist: ['TEST'],
outFile: 'config.json',
};
service.generate(options);
expect(output).to.equal('{\n "TEST": "value"\n}');
});
it('should export a json file if the outFile option has a json file extension', function () {
setFileExists('.env', false);
setFileExists('config.js', false);
process.env.TEST='value';
const options = {
whitelist: ['TEST'],
outFile: 'config.json',
};
service.generate(options);
expect(output).to.equal('{\n "TEST": "value"\n}');
});
it('should read from the specified readFile path if provided', function () {
setFileExists('.mycustomfile', true);
setFileExists('config.js', false);
mockEnvFile('TEST=value');
const options = {
whitelist: ['TEST'],
readFile: '.mycustomfile'
};
service.generate(options);
expect(output).to.equal('module.exports = {\n "TEST": "value"\n}');
});
it('should output a ES6 module if the esm flag is set', function () {
setFileExists('.env', false);
setFileExists('config.js', false);
process.env.TEST = 'value';
const options = {
whitelist: ['TEST'],
esm: true,
};
service.generate(options);
expect(output).to.equal('export default {\n "TEST": "value"\n}');
});
}); |
#!/usr/bin/env bash
source ./docker/.env
COMMAND="${1:-build}"
IMAGE="${2:-slim}"
EDITION="${3:-rtm}"
VERSION="${4:-latest}"
if [[ $IMAGE != "alpine" && $IMAGE != "slim" ]]; then
echo "Unsupported image $IMAGE"
exit 5
fi
if [[ $EDITION != "src" && $EDITION != "rtm" ]]; then
echo "Unsupported image $EDITION"
exit 5
fi
if [[ -z "$VERSION" ]]; then
VERSION=$([[ "$EDITION" == "rtm" ]] && echo "v4.34-9745-beta" || echo "5.01.9674")
fi
dockerfile="docker/dockerfile/sevpn.$EDITION.$IMAGE.Dockerfile"
tag="softethervpn:$IMAGE-$EDITION-$VERSION"
if [[ $COMMAND == "build" ]]; then
docker build -f "$dockerfile" --build-arg VPN_VERSION="$VERSION" -t "$tag" ./docker
elif [[ $COMMAND == "up" ]]; then
cat <<EOT >docker/dev-vpnserver.env
IMAGE=$IMAGE
EDITION=$EDITION
VERSION=$VERSION
EOT
docker-compose -f docker/vpnserver-dkc.yml --env-file docker/dev-vpnserver.env up
fi
|
<filename>ruby/URI_1070.rb
x = gets.to_i
y = 0
while y < 6 do
x += 1
if x % 2 == 1 then
puts x
y += 1
end
end |
#!/bin/bash
# Use nc to create a bidirection link between one IP address/port ${1}:${2} and another ${3}:${4}
IP_LEFT=$1
PORT_LEFT=$2
IP_RIGHT=$3
PORT_RIGHT=$4
ESPEC=$5
ISPEC=$6
rm -f fifo*
mkfifo fifo-left
mkfifo fifo-right
nc -4 -k -l ${IP_LEFT} ${PORT_LEFT} < fifo-left | cat > fifo-right &
sleep 2
nc -4 ${IP_RIGHT} ${PORT_RIGHT} < fifo-right | cat > fifo-left &
sleep 2
N= `ps -ef | grep "nc -4 | grep -v grep | wc -l`
if [ $N -ne 2 ]
then
echo "ERROR: 2 netcat processes should be started, only $N found"
else
echo "SUCCESS"
fi
|
<gh_stars>1-10
class UpdateEmployer
def initialize(repository, address_factory, phone_factory, email_factory, plan_year_factory)
@repository = repository
@address_factory = address_factory
@phone_factory = phone_factory
@email_factory = email_factory
@plan_year_factory = plan_year_factory
end
def execute(request)
@employer = @repository.find_for_fein(request[:fein])
@request = request
@requested_contact = request[:contact]
@employer.name = request[:name] unless (request[:name].nil? || request[:name].empty?)
@employer.hbx_id = request[:hbx_id] unless (request[:hbx_id].nil? || request[:hbx_id].empty?)
@employer.fein = request[:fein] unless (request[:fein].nil? || request[:fein].empty?)
@employer.sic_code = request[:sic_code] unless (request[:sic_code].nil? || request[:sic_code].empty?)
@employer.notes = request[:notes] unless (request[:notes].nil? || request[:notes].empty?)
update_address
update_phone
update_email
update_plan_year
update_contact
@employer.save!
end
private
def update_address
return unless @requested_contact[:address]
address = @address_factory.make(@requested_contact[:address])
@employer.merge_address(address)
end
def update_phone
return unless @requested_contact[:phone]
phone = @phone_factory.make(@requested_contact[:phone])
@employer.merge_phone(phone)
end
def update_email
return unless @requested_contact[:email]
email = @email_factory.make(@requested_contact[:email])
@employer.merge_email(email)
end
def update_plan_year
plan_year = @plan_year_factory.make({
open_enrollment_start: @request[:open_enrollment_start],
open_enrollment_end: @request[:open_enrollment_end],
start_date: @request[:plan_year_start],
end_date: @request[:plan_year_end],
plans: @request[:plans],
broker_npn: @request[:broker_npn],
fte_count: @request[:fte_count],
pte_count: @request[:pte_count]})
@employer.merge_plan_year(plan_year)
end
def update_contact
return unless @requested_contact[:name]
@employer.update_contact(@request[:contact][:name])
end
end
|
//============================================================================
// Copyright 2009-2018 ECMWF.
// This software is licensed under the terms of the Apache Licence version 2.0
// which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
// In applying this licence, ECMWF does not waive the privileges and immunities
// granted to it by virtue of its status as an intergovernmental organisation
// nor does it submit to any jurisdiction.
//============================================================================
#include "ModelColumn.hpp"
#include <map>
#include <QDebug>
#include "DiagData.hpp"
#include "VConfig.hpp"
#include "VConfigLoader.hpp"
#include "VProperty.hpp"
#include "VSettingsLoader.hpp"
static std::map<std::string,ModelColumn*> defs;
ModelColumn::ModelColumn(const std::string& id) : id_(id), diagStart_(-1), diagEnd_(-1)
{
defs[id_]=this;
}
ModelColumn* ModelColumn::def(const std::string& id)
{
std::map<std::string,ModelColumn*>::const_iterator it=defs.find(id);
if(it != defs.end())
return it->second;
return nullptr;
}
ModelColumn* ModelColumn::tableModelColumn()
{
return ModelColumn::def("table_columns");
}
//int ModelColumn::indexOf(const std::string& id) const
//{
// return indexOf(QString::fromStdString(id));
//}
int ModelColumn::indexOf(QString id) const
{
for(int i=0; i< items_.count(); i++)
{
if(items_.at(i)->id_ == id)
return i;
}
return -1;
}
void ModelColumn::loadItem(VProperty *p)
{
auto* obj=new ModelColumnItem(p->strName());
obj->label_=p->param("label");
obj->tooltip_=p->param("tooltip");
obj->icon_=p->param("icon");
obj->index_=items_.count();
items_ << obj;
}
void ModelColumn::loadExtraItem(QString name,QString label)
{
auto* obj=new ModelColumnItem(name.toStdString(),true);
obj->label_=label;
obj->tooltip_=obj->label_;
//obj->icon_=p->param("icon");
obj->index_=items_.count();
obj->editable_=true;
if(hasDiag())
{
items_.insert(diagStart_,obj);
diagStart_++;
diagEnd_++;
}
else
{
items_ << obj;
}
}
void ModelColumn::loadDiagItem(QString name,QString label)
{
auto* obj=new ModelColumnItem(name.toStdString(),true);
obj->label_=label;
obj->tooltip_=obj->label_;
//obj->icon_=p->param("icon");
obj->index_=items_.count();
obj->editable_=false;
items_ << obj;
}
void ModelColumn::addExtraItem(QString name,QString label)
{
if(indexOf(name) != -1)
return;
//Editable extra items are always inserted in front of the diag items
int pos=items_.count();
if(hasDiag())
{
pos=diagStart_;
Q_ASSERT(pos >=0);
}
Q_EMIT addItemsBegin(pos,pos);
//Q_EMIT appendItemBegin();
loadExtraItem(name,label);
//Q_EMIT appendItemEnd();
Q_EMIT addItemsEnd(pos,pos);
save();
}
void ModelColumn::changeExtraItem(int idx, QString name,QString label)
{
if(indexOf(name) != -1)
return;
if(!isExtra(idx) || !isEditable(idx))
return;
Q_EMIT changeItemBegin(idx);
items_[idx]->id_=name;
items_[idx]->label_=label;
Q_EMIT changeItemEnd(idx);
save();
}
void ModelColumn::removeExtraItem(QString name)
{
int idx=indexOf(name);
if(idx != -1 &&
items_[idx]->isExtra() && items_[idx]->isEditable())
{
Q_EMIT removeItemsBegin(idx,idx);
ModelColumnItem* obj=items_[idx];
items_.removeAt(idx);
delete obj;
if(hasDiag())
{
diagStart_--;
diagEnd_--;
Q_ASSERT(diagStart_ >= 0);
}
Q_EMIT removeItemsEnd(idx,idx);
save();
}
}
bool ModelColumn::isSameDiag(DiagData *diag) const
{
if(diagStart_ >=0 && diagEnd_ >=0 && diag->count() == diagEnd_-diagStart_+1)
{
for(int i=diagStart_; i <= diagEnd_; i++)
{
if(items_[i]->id_ !=
QString::fromStdString(diag->columnName(i-diagStart_)))
{
return false;
}
}
return true;
}
return false;
}
void ModelColumn::setDiagData(DiagData *diag)
{
if(isSameDiag(diag))
return;
//Remove the current diag items
if(diagStart_ >=0 && diagEnd_ >=0)
{
Q_EMIT removeItemsBegin(diagStart_,diagEnd_);
for(int i=diagStart_; i <= diagEnd_; i++)
{
ModelColumnItem* obj=items_[diagStart_];
items_.removeAt(diagStart_);
delete obj;
}
Q_EMIT removeItemsEnd(diagStart_,diagEnd_);
diagStart_=-1;
diagEnd_=-1;
}
//Add the current diag items to the back of the items
if(diag->count() <=0)
return;
diagStart_=items_.count();
diagEnd_=items_.count()+diag->count()-1;
Q_ASSERT(diagStart_ >= 0);
Q_ASSERT(diagStart_ <= diagEnd_);
Q_EMIT addItemsBegin(diagStart_,diagEnd_);
for(int i=0; i < diag->count(); i++)
{
QString n=QString::fromStdString(diag->columnName(i));
loadDiagItem(n,n);// these are not editable items!!!
}
Q_EMIT addItemsEnd(diagStart_,diagEnd_);
}
void ModelColumn::save()
{
if(!configPath_.empty())
{
if(VProperty* prop=VConfig::instance()->find(configPath_))
{
QStringList lst;
for(int i=0; i < items_.count(); i++)
{
if(items_[i]->isExtra() && items_[i]->isEditable())
lst << items_[i]->id_;
}
if(lst.isEmpty())
prop->setValue(prop->defaultValue());
else
prop->setValue(lst.join("/"));
}
}
}
void ModelColumn::load(VProperty* group)
{
Q_ASSERT(group);
auto* m=new ModelColumn(group->strName());
for(int i=0; i < group->children().size(); i++)
{
VProperty *p=group->children().at(i);
m->loadItem(p);
}
//Define extra config property
m->configPath_=group->param("__config__").toStdString();
}
//Called via VSettingsLoader after the users settings are read
void ModelColumn::loadSettings()
{
for(auto it=defs.begin(); it != defs.end(); ++it)
{
it->second->loadUserSettings();
}
}
//Load user defined settings
void ModelColumn::loadUserSettings()
{
//Load extra config
if(!configPath_.empty())
{
if(VProperty* p=VConfig::instance()->find(configPath_))
{
QString pval=p->valueAsString();
if(!pval.isEmpty() && pval != "__none__")
{
Q_FOREACH(QString s,pval.split("/"))
{
loadExtraItem(s,s);
}
}
}
}
}
ModelColumnItem::ModelColumnItem(const std::string& id, bool extra) :
id_(QString::fromStdString(id)),index_(-1), extra_(extra), editable_(extra)
{
}
static SimpleLoader<ModelColumn> loaderQuery("query_columns");
static SimpleLoader<ModelColumn> loaderTable("table_columns");
static SimpleLoader<ModelColumn> loaderZombie("zombie_columns");
static SimpleLoader<ModelColumn> loaderTriggerGraph("trigger_graph_columns");
static SimpleLoader<ModelColumn> loaderOutput("output_columns");
static SimpleSettingsLoader<ModelColumn> settingsLoader;
|
#!/bin/bash
#
# @file wp_search_replace.sh
#
# Do a search and replace on a WP database using WP-CLI, including any
# serialized data (IMPORTANT!!!). Run from within anywhere in the site itself.
# This is most useful for fixing any hardcoded domains that WP creates during
# uploads, etc.
#
# USAGE: wp_search_replace.sh [-n] old_domain.com new_domain.com
#
# For **Multisite** pass the -n argument, its necessary
#
# @author @dbsinteractive 2014-11-31
#
#######################################################
#######################################################
wp=wp
# sometimes root is a good thing
#wp="wp --allow-root"
[ "$1" == "-n" ] && network=" --network" && shift
clear
! which wp > /dev/null && echo wp-cli not installed, aborting. FIXME! && exit 1
! [ $1 ] && echo 'USAGE: wp_search_replace.sh [-n] old_domain.com new_domain.tld' && exit 1
! [ $2 ] && echo 'USAGE: wp_search_replace.sh [-n] old_domain.com new_domain.tld' && exit 1
echo Your are about to update a WP database, please have a current backup handy.
echo -n Selected database is:\
$wp db query "select database()" |grep -v "database(\|*" || exit 1
echo
echo If this is not the correct database, cancel now ctrl-c, and use defined\(\'WP_CLI\'\)
echo to select the correct database in wp-config.php.
echo Press any key when ready, sir, or ctrl-c to cancel
read
clear
echo Let\'s do a dry run first, OK? Press any key.
read
$wp search-replace $network $1 $2 --dry-run
echo Look OK? If not, ctrl-c to cancel, anything else to give it a go for real this time.
read
echo running for real now ...
sleep 1
$wp search-replace $network $1 $2
echo Done.
|
<gh_stars>10-100
package chylex.hee.gui.helpers;
import gnu.trove.map.hash.TIntObjectHashMap;
import org.apache.commons.lang3.BooleanUtils;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public final class KeyState{
private static final TIntObjectHashMap<Boolean> keyMap = new TIntObjectHashMap<>(8);
public static void startTracking(int keyCode){
keyMap.putIfAbsent(keyCode, Boolean.FALSE);
}
public static void stopTracking(int keyCode){
keyMap.remove(keyCode);
}
public static void setState(int keyCode, boolean isHeld){
if (keyMap.contains(keyCode))keyMap.put(keyCode, Boolean.valueOf(isHeld));
}
public static boolean isHeld(int keyCode){
return BooleanUtils.isTrue(keyMap.get(keyCode));
}
}
|
# Copyright (c) 2017 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import random
import numpy as np
import os
import sys
import nnabla as nn
import nnabla.functions as F
import nnabla.parametric_functions as PF
import nnabla.solvers as S
from nnabla.ext_utils import get_extension_context
import nnabla.communicators as C
from nnabla.monitor import Monitor, MonitorSeries, MonitorTimeElapsed
import glob
import cv2
from utils import *
from models import rrdb_net
from utils.lr_scheduler import get_repeated_cosine_annealing_learning_rate, get_multistep_learning_rate
from datetime import datetime
from utils.util import array_to_image, calculate_psnr
from args import get_config
# Calculate PSNR and save validation image
def val_save(val_gt, val_lq, val_lq_path, idx, epoch, avg_psnr):
conf = get_config()
sr_img = rrdb_net(val_lq, 64, 23)
real_image = array_to_image(val_gt.data)
sr_image = array_to_image(sr_img.data)
img_name = os.path.splitext(
os.path.basename(val_lq_path[idx]))[0]
img_dir = os.path.join(
conf.val.save_results + "/results", img_name)
if not os.path.exists(img_dir):
os.makedirs(img_dir)
save_img_path = os.path.join(
img_dir, '{:s}_{:d}.png'.format(img_name, epoch))
cv2.imwrite(save_img_path, sr_image)
crop_size = conf.train.scale
cropped_sr_image = sr_image[crop_size:-
crop_size, crop_size:-crop_size, :]
cropped_real_image = real_image[crop_size:-
crop_size, crop_size:-crop_size, :]
avg_psnr += calculate_psnr(cropped_sr_image, cropped_real_image)
print("validating", img_name)
return avg_psnr
def main():
conf = get_config()
train_gt_path = sorted(glob.glob(conf.DIV2K.gt_train + "/*.png"))
train_lq_path = sorted(glob.glob(conf.DIV2K.lq_train + "/*.png"))
val_gt_path = sorted(glob.glob(conf.SET14.gt_val + "/*.png"))
val_lq_path = sorted(glob.glob(conf.SET14.lq_val + "/*.png"))
train_samples = len(train_gt_path)
val_samples = len(val_gt_path)
lr_g = conf.hyperparameters.lr_g
lr_d = conf.hyperparameters.lr_d
lr_steps = conf.train.lr_steps
random.seed(conf.train.seed)
np.random.seed(conf.train.seed)
extension_module = conf.nnabla_context.context
ctx = get_extension_context(
extension_module, device_id=conf.nnabla_context.device_id)
comm = CommunicatorWrapper(ctx)
nn.set_default_context(comm.ctx)
# data iterators for train and val data
from data_loader import data_iterator_sr
data_iterator_train = data_iterator_sr(
train_samples, conf.train.batch_size, train_gt_path, train_lq_path, train=True, shuffle=True)
data_iterator_val = data_iterator_sr(
val_samples, conf.val.batch_size, val_gt_path, val_lq_path, train=False, shuffle=False)
if comm.n_procs > 1:
data_iterator_train = data_iterator_train.slice(
rng=None, num_of_slices=comm.n_procs, slice_pos=comm.rank)
train_gt = nn.Variable(
(conf.train.batch_size, 3, conf.train.gt_size, conf.train.gt_size))
train_lq = nn.Variable(
(conf.train.batch_size, 3, conf.train.gt_size // conf.train.scale, conf.train.gt_size // conf.train.scale))
# setting up monitors for logging
monitor_path = './nnmonitor' + str(datetime.now().strftime("%Y%m%d%H%M%S"))
monitor = Monitor(monitor_path)
monitor_pixel_g = MonitorSeries(
'l_g_pix per iteration', monitor, interval=100)
monitor_val = MonitorSeries(
'Validation loss per epoch', monitor, interval=1)
monitor_time = MonitorTimeElapsed(
"Training time per epoch", monitor, interval=1)
with nn.parameter_scope("gen"):
nn.load_parameters(conf.train.gen_pretrained)
fake_h = rrdb_net(train_lq, 64, 23)
fake_h.persistent = True
pixel_loss = F.mean(F.absolute_error(fake_h, train_gt))
pixel_loss.persistent = True
gen_loss = pixel_loss
if conf.model.esrgan:
from esrgan_model import get_esrgan_gen, get_esrgan_dis, get_esrgan_monitors
gen_model = get_esrgan_gen(conf, train_gt, train_lq, fake_h)
gen_loss = conf.hyperparameters.eta_pixel_loss * pixel_loss + conf.hyperparameters.feature_loss_weight * gen_model.feature_loss + \
conf.hyperparameters.lambda_gan_loss * gen_model.loss_gan_gen
dis_model = get_esrgan_dis(fake_h, gen_model.pred_d_real)
# Set Discriminator parameters
solver_dis = S.Adam(lr_d, beta1=0.9, beta2=0.99)
with nn.parameter_scope("dis"):
solver_dis.set_parameters(nn.get_parameters())
esr_mon = get_esrgan_monitors()
# Set generator Parameters
solver_gen = S.Adam(alpha=lr_g, beta1=0.9, beta2=0.99)
with nn.parameter_scope("gen"):
solver_gen.set_parameters(nn.get_parameters())
train_size = int(
train_samples / conf.train.batch_size / comm.n_procs)
total_epochs = conf.train.n_epochs
start_epoch = 0
current_iter = 0
if comm.rank == 0:
print("total_epochs", total_epochs)
print("train_samples", train_samples)
print("val_samples", val_samples)
print("train_size", train_size)
for epoch in range(start_epoch + 1, total_epochs + 1):
index = 0
# Training loop for psnr rrdb model
while index < train_size:
if comm.rank == 0:
current_iter += comm.n_procs
train_gt.d, train_lq.d = data_iterator_train.next()
if not conf.model.esrgan:
lr_g = get_repeated_cosine_annealing_learning_rate(
current_iter, conf.hyperparameters.eta_max, conf.hyperparameters.eta_min, conf.train.cosine_period,
conf.train.cosine_num_period)
if conf.model.esrgan:
lr_g = get_multistep_learning_rate(
current_iter, lr_steps, lr_g)
gen_model.var_ref.d = train_gt.d
gen_model.pred_d_real.grad.zero()
gen_model.pred_d_real.forward(clear_no_need_grad=True)
gen_model.pred_d_real.need_grad = False
# Generator update
gen_loss.forward(clear_no_need_grad=True)
solver_gen.zero_grad()
# All-reduce gradients every 2MiB parameters during backward computation
if comm.n_procs > 1:
with nn.parameter_scope('gen'):
all_reduce_callback = comm.get_all_reduce_callback()
gen_loss.backward(clear_buffer=True,
communicator_callbacks=all_reduce_callback)
else:
gen_loss.backward(clear_buffer=True)
solver_gen.set_learning_rate(lr_g)
solver_gen.update()
# Discriminator Upate
if conf.model.esrgan:
gen_model.pred_d_real.need_grad = True
lr_d = get_multistep_learning_rate(
current_iter, lr_steps, lr_d)
solver_dis.zero_grad()
dis_model.l_d_total.forward(clear_no_need_grad=True)
if comm.n_procs > 1:
with nn.parameter_scope('dis'):
all_reduce_callback = comm.get_all_reduce_callback()
dis_model.l_d_total.backward(
clear_buffer=True, communicator_callbacks=all_reduce_callback)
else:
dis_model.l_d_total.backward(clear_buffer=True)
solver_dis.set_learning_rate(lr_d)
solver_dis.update()
index += 1
if comm.rank == 0:
monitor_pixel_g.add(
current_iter, pixel_loss.d.copy())
monitor_time.add(epoch * comm.n_procs)
if comm.rank == 0 and conf.model.esrgan:
esr_mon.monitor_feature_g.add(
current_iter, gen_model.feature_loss.d.copy())
esr_mon.monitor_gan_g.add(
current_iter, gen_model.loss_gan_gen.d.copy())
esr_mon.monitor_gan_d.add(
current_iter, dis_model.l_d_total.d.copy())
esr_mon.monitor_d_real.add(current_iter, F.mean(
gen_model.pred_d_real.data).data)
esr_mon.monitor_d_fake.add(current_iter, F.mean(
gen_model.pred_g_fake.data).data)
# Validation Loop
if comm.rank == 0:
avg_psnr = 0.0
for idx in range(val_samples):
val_gt_im, val_lq_im = data_iterator_val.next()
val_gt = nn.NdArray.from_numpy_array(val_gt_im)
val_lq = nn.NdArray.from_numpy_array(val_lq_im)
with nn.parameter_scope("gen"):
avg_psnr = val_save(
val_gt, val_lq, val_lq_path, idx, epoch, avg_psnr)
avg_psnr = avg_psnr / val_samples
monitor_val.add(epoch, avg_psnr)
# Save generator weights
if comm.rank == 0:
if not os.path.exists(conf.train.savemodel):
os.makedirs(conf.train.savemodel)
with nn.parameter_scope("gen"):
nn.save_parameters(os.path.join(
conf.train.savemodel, "generator_param_%06d.h5" % epoch))
# Save discriminator weights
if comm.rank == 0 and conf.model.esrgan:
with nn.parameter_scope("dis"):
nn.save_parameters(os.path.join(
conf.train.savemodel, "discriminator_param_%06d.h5" % epoch))
if __name__ == "__main__":
main()
|
require_relative 'temp_dir'
module Inferno
module Terminology
module Tasks
class CreateValueSetValidators
include TempDir
attr_reader :minimum_binding_strength, :version, :delete_existing, :type
def initialize(minimum_binding_strength:, version:, delete_existing:, type:)
@minimum_binding_strength = minimum_binding_strength
@version = version
@delete_existing = delete_existing != 'false'
@type = type.to_sym
end
def run
Loader.register_umls_db db_for_version
Loader.load_value_sets_from_directory(Inferno::Terminology::PACKAGE_DIR, true)
Loader.create_validators(
type: type,
minimum_binding_strength: minimum_binding_strength,
delete_existing: delete_existing
)
end
def db_for_version
File.join(versioned_temp_dir, 'umls.db')
end
end
end
end
end
|
/**
* SPDX-License-Identifier: Apache-2.0
*/
import PeerGraph from './PeerGraph';
const setup = () => {
const props = {
peerList: [
{
requests: "grpcs://127.0.0.1:7051",
server_hostname: "peer0.org1.example.com"
},
{
requests: "grpcs://127.0.0.1:8051",
server_hostname: "peer1.org1.example.com"
},
{
requests: "grpcs://127.0.0.1:9051",
server_hostname: "peer0.org2.example.com"
},
{
requests: "grpcs://127.0.0.1:10051",
server_hostname: "peer1.org2.example.com"
}
]
}
const wrapper = shallow(<PeerGraph {...props} />);
return{
props,
wrapper }
}
describe('PeerGraph', () => {
test("PeerGraph component should render", () => {
const { wrapper } = setup();
expect(wrapper.exists()).toBe(true);
});
});
|
source global.sh
download_compile https://ftp.gnu.org/gnu/gdb/gdb-$GDB_VERSION.tar.gz gdb-$GDB_VERSION "--target=i386-elf" |
#!/usr/bin/env bash
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--adv_eval --epoch_delay 5 \
--adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
g1
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_input
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--adv_eval --epoch_delay 5 \
--adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
g2
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--adv_eval --epoch_delay 5 \
--adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 169018
cc@sat:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-04-14-04-57-058607111.txt
# no adversarial training
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_input
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 71214
cc@icml-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-04-14-11-03-294585424.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 119881
cc@icml-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-04-14-19-45-778648234.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 113589
cc@wifi:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-04-14-21-24-566295267.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_input
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--adv_eval --epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 74651
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-02-53-25-778977624.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--adv_eval --epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 66643
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-03-00-16-357489616.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 75452
cc@sat:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-03-21-57-962498895.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 73745
cc@f:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-03-25-28-247734754.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_input
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 121227
cc@z:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-03-27-16-250630896.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_adv_train_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 102368
cc@wifi:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-04-08-31-621040073.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 58848
cc@sat:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-05-37-23-891561239.txt
############### Configurations ########################
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 6965
cc@z:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-05-36-01-105993017.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 13099
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-10-33-49-546921320.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_both
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net_both
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \m
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 100998
-end 29
cc@icml-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-16-40-40-277973375.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_adv_train_vanilla_resnet20
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \m
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 71337
cc@f:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-16-51-32-572125767.txt
cc@f:~/code/bandlimited-cnns/cnns/nnlib/pytorch_architecture$
[1] Done PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} --data_path ${data_path} --arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} --epochs ${epochs} --learning_rate 0.1 --optimizer ${optimizer} --schedule 80 120 --gammas 0.1 0.1 --batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 --print_freq 100 --decay 0.0003 --momentum 0.9 --epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 (wd: ~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code)
(wd now: ~/code/bandlimited-cnns/cnns/nnlib/pytorch_architecture)
==>>[2020-02-05 21:58:12] [Epoch=159/160] [Need: 00:01:55] [LR=0.0010][M=0.90] [Best : Accuracy=84.15, Error=15.85] Epoch: [159][000/391] Time 0.492 (0.492) Data 0.199 (0.199) Loss 0.7769 (0.7769) Prec@1 93.750 (93.750) Prec@5 99.219 (99.219) [2020-02-05 21:58:12] Epoch: [159][100/391] Time 0.304 (0.312) Data 0.000 (0.002) Loss 0.7494 (0.7410) Prec@1 88.281 (88.877) Prec@5 100.000 (99.613) [2020-02-05 21:58:43]
Epoch: [159][200/391] Time 0.344 (0.300) Data 0.000 (0.001) Loss 0.8205 (0.7450) Prec@1 87.500 (89.043) Prec@5 98.438 (99.677) [2020-02-05 21:59:12]
Epoch: [159][300/391] Time 0.327 (0.299) Data 0.000 (0.001) Loss 0.6536 (0.7461) Prec@1 80.469 (88.741) Prec@5 100.000 (99.676) [2020-02-05 21:59:42] **Train** Prec@1 88.730 Prec@5 99.684 Error@1 11.270
**Adversarial Train** Prec@1 53.920 Prec@5 97.768 Error@1 46.080 **Test** Prec@1 83.760 Prec@5 99.270 Error@1 16.240
---- save figure the accuracy/loss curve of train/val into ./save//cifar10_vanilla_resnet20_160_SGD_train_layerwise_3e-4decay_robust_net/curve.png
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_input
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_no_adv_train_robust_net_input
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 130484
cc@icml-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-18-52-11-960855088.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 2 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 70468
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-19-41-09-712288797.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust # init 0.1 inner 0.1
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net_adv_train
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 79546
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-20-00-18-937669813.txt
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../ nohup /home/${USER}/anaconda3/bin/python3.6 vgg_train.py --paramNoise 0.0 --noiseInit 0.0 --noiseInner 0.0 --net 'vgg16-fft' --compress_rate 85.0 --initializeNoise 0.02 >> ${timestamp}.txt 2>&1 &
echo ${timestamp}.txt
[1] 4844
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/pytorch_architecture$ echo ${timestamp}.txt
2020-02-05-14-52-24-658630568.txt
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../ nohup /home/${USER}/anaconda3/bin/python3.6 vgg_train.py --paramNoise 0.0 --noiseInit 0.0 --noiseInner 0.0 --net 'vgg16-fft' --compress_rate 80.0 --initializeNoise 0.02 >> ${timestamp}.txt 2>&1 &
echo ${timestamp}.txt
f 100995
2020-02-05-21-14-46-235774981.txt
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../ nohup /home/${USER}/anaconda3/bin/python3.6 vgg_train.py --paramNoise 0.0 --noiseInit 0.0 --noiseInner 0.0 --net 'vgg16-fft' --compress_rate 70.0 --initializeNoise 0.02 >> ${timestamp}.txt 2>&1 &
echo ${timestamp}.txt
[3] 101291
cc@f:~/code/bandlimited-cnns/cnns/nnlib/pytorch_architecture$ echo ${timestamp}.txt
2020-02-05-21-15-47-337316762.txt
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../ nohup /home/${USER}/anaconda3/bin/python3.6 vgg_train.py --paramNoise 0.0 --noiseInit 0.0 --noiseInner 0.0 --net 'vgg16-fft' --compress_rate 70.0 --initializeNoise 0.02 >> ${timestamp}.txt 2>&1 &
echo ${timestamp}.txt
[4] 101366
cc@f:~/code/bandlimited-cnns/cnns/nnlib/pytorch_architecture$ echo ${timestamp}.txt
2020-02-05-21-16-27-074140847.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_robust_net_init_noise_0.15
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
PYTHONPATH=../../../../../ $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 77816
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-05-21-26-54-710179415.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_adv_train_vanilla_resnet20_plain
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 56312
cc@icml-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-05-30-25-346400537.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_adv_train_vanilla_resnet20_plain_no_adv
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 111266
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-47-05-931286438.txt
[1] 23458
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-16-33-52-502708202.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay_adv_train_vanilla_resnet20_plain
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 112258
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-49-34-905649455.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 62743
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-53-56-198637208.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_02
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 62419
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-53-08-232294426.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 120954
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-56-07-651468314.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_input
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/data/pytorch/cifar10"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 121461
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-06-07-58-23-439637304.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 137953
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-03-38-56-802012326.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
g2
[2] 138292
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-03-39-59-518614900.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=imagenet
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_02
dataset=imagenet
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=imagenet
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=imagenet
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-224/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=imagenet
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets/tiny-64/"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 90458
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-10-11-635161494.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 90645
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-14-39-557431445.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 90825
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-15-13-385413955.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 91994 cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-16-37-444371274.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_02
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 92817
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-17-21-435364545.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 78569
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-27-54-835694871.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters=40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 79151
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-29-13-558159540.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 80315
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-30-08-183339905.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_02
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 80658
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-30-27-956193475.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 81609
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-36-24-783102383.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 6244
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-08-23-32-29-696181815.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters=40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 82117
cc@g-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-37-38-391999029.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_02
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 48591
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-39-46-508918889.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters=40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 48916
cc@g-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-40-19-667773368.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=stl10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters=40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 52832
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-43-24-525493119.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=stl10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters=40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 52920
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-44-27-931238848.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 78569
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-27-54-835694871.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-100-iters
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters=100 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 53312
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-49-47-737908358.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-100-iters
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters=100 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 54479
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-09-05-52-24-203653053.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-40
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 123446
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-17-24-21-783253335.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_01
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-7
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 123591
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-17-24-38-313852566.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-7
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 34326
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-19-50-26-595321624.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-7
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 171687
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-19-51-12-835446430.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-7-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 7 --adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 172810
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-19-53-04-872641562.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-attack-iters-7-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 --adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 173360
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-19-53-40-329671271.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_014
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-014-no-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 63686
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-21-09-46-773395157.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_014
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-014-no-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
--adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 20073
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-21-20-08-929333746.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_014
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-014-no-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 100 \
--adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 20619
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-21-20-44-493891179.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-9_0-7
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.07 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 31204
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-16-11-56-427824622.txt
[1] 50935
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-22-13-32-685014413.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-08_0-08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.08 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 52296
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-22-16-04-193327018.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-1_0-09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.1 \
--inner_noise 0.09 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 53931
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-22-18-15-051988181.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-1_0-09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 54476
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-22-18-35-303368930.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_014
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-014-no-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
--adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 183636
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-25-44-048463992.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_014
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-014-no-adv-train-true
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 184099
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-26-39-189445535.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-9_0-7
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 185585
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-28-01-641968720.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_only
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 186864
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-29-13-520212968.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-013-adv-train
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 40 \
--adv_train \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 96554
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-31-58-108970222.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_013
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-roubst-013-no-adv-train-true
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --attack_iters 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 96706
cc@p:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-32-19-562397893.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust_0-1_0-09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.1 \
--inner_noise 0.09 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 71933
cc@nips:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-11-04-34-17-796533109.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar100
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust_0-1_0-09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.1 \
--inner_noise 0.09 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 22472
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-10-22-37-30-279744125.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 114431
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-12-02-14-58-101825118.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 114820
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-12-02-15-51-126110622.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust-0.05-0.04
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.05 \
--inner_noise 0.04 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 125712
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-12-02-38-49-734586301.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_robust-0.05-0.04
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.05 \
--inner_noise 0.04 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 125856
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-12-02-39-14-445383221.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_only
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_iters 100 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 88151
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-15-57-04-273719851.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 88512
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-15-57-39-228843915.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust_0-14_0-10
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.14 \
--inner_noise 0.10 --adv_train \
--attack_iters 100 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[5] 90499
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-16-01-22-359838561.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_plain_robust_0-14_0-10
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.14 \
--inner_noise 0.10 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[6] 91406
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-16-03-10-170347657.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-train_only
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 86138
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-17-09-52-069740438.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 86237
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-17-10-11-386067412.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 87471
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-17-11-51-338507251.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 88092
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-25-17-12-25-568371784.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.05-0.04
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.05 \
--inner_noise 0.04 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 27527
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-26-21-31-01-483688075.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.04-0.03
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.04 \
--inner_noise 0.03 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 27661
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-26-21-31-58-096840788.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.03-0.02
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.03 \
--inner_noise 0.02 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 27777
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-26-21-32-38-278049122.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust_0-14_0-10
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.14 \
--inner_noise 0.10 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 8884
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-02-29-18-53-34-770940567.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train --attack_iters 40 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.04-0.03
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.04 \
--inner_noise 0.03 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 9462
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-01-21-31-27-485749832.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.04-0.03
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.04 \
--inner_noise 0.04 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 158873
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-32-16-704481602.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-without-adv-train_robust-0.03-0.03
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.03 \
--inner_noise 0.03 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 159247
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-33-06-302830089.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 --adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 160607
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-34-55-754752844.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train_robust-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 --adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 161556
cc@i-2:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-35-47-804009354.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train-0.09-0.08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.08 --adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 2952
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-37-37-294899008.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train-0.09-0.08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.08 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 192870
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-39-10-341006193.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 --adv_train \
--attack_iters 40 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 193327
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-40-29-748117820.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train-0.25-0.21
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.25 \
--inner_noise 0.21 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 195243
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-43-45-118528090.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train-0.3-0.2
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.3 \
--inner_noise 0.2 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[4] 195598
cc@i-1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-02-03-44-38-792221701.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08\
--inner_noise 0.07 --adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 17097
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-03-17-52-43-686410941.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.03-0.02
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.03 \
--inner_noise 0.02 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 22511
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-03-11-59-03-207700069.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.02-0.01
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.02 \
--inner_noise 0.01 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 14028
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-04-11-23-31-285120306.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train-0.07-0.06
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.07 \
--inner_noise 0.06 --adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 34523
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-04-17-28-15-091439370.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 10730
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-04-14-42-18-135400803.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.09-0.08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.08 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 18056
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-05-19-53-35-772051178.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.09-0.08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.08 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 99000
cc@m-3:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-06-03-20-54-406689261.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.1-0.09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.1 \
--inner_noise 0.09 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 101138
cc@m-3:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-06-03-29-25-885235788.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_strengths 0.031 \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 19729
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-18-22-11-23-453104281.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 13460
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-03-12-50-034552005.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 19729
ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-18-22-11-23-453104281.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 \
--attack_strengths 0.0 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 159439
cc@i:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-04-18-38-012564140.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=adv-train_robust-0.08-0.07
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.08 \
--inner_noise 0.07 \
--attack_strengths 0.031 \
--adv_train \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 160024
cc@i:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-04-20-12-960935239.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_strengths 0.031 \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[3] 161680
cc@i:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-04-22-24-078882321.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_weight
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=3 PYTHONPATH=../../../../../ nohup $PYTHON main.py --dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} --save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.09-0.08
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.09 \
--inner_noise 0.08 \
--attack_strengths 0.0 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 6776
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-16-04-22-388666599.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.1-0.09
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --init_noise 0.1 \
--inner_noise 0.09 \
--attack_strengths 0.0 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 31104
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-19-22-42-15-543433057.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no-adv-train_robust-0.2-0.1
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--init_noise 0.2 \
--inner_noise 0.1 \
--attack_strengths 0.0 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 43363
cc@rtx:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-03-20-01-24-37-190837110.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=no_adv_train_vanilla_pure
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--init_noise 0.0 \
--inner_noise 0.0 \
--attack_strengths 0.0 \
--attack_iters 0 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 95043
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-04-01-19-30-34-938456390.txt
PYTHON="/home/${USER}/anaconda3/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=vanilla_resnet20
dataset=svhn
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=2 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} --learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 --gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 --adv_train \
--attack_strengths 0.031 \
--attack_iters 7 >> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.2-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'laplace' \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 19042
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-04-30-21-58-55-598796642.txt
PYTHON='python'
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.2-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'laplace' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 20372
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-04-30-22-27-42-215337499.txt
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.2-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'laplace' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 48714
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-03-34-27-764514292.txt
mosh cc@129.114.109.80
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.2-0.1-gauss
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'gauss' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 6176 cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-03-46-30-811341935.txt
mosh cc@129.114.109.80
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train-0.2-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--noise_type 'laplace' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
--adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
cat train_2020-05-01-03-33-04-837271633.txt
cc@129.114.108.196
8809
cat train_2020-05-01-03-56-22-104907395.txt
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train-0.2-0.1-gauss
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--noise_type 'gauss' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
--adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
cc@129.114.108.196
[1] 9346
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-03-57-40-173017453.txt
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train-0.2-0.1-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--noise_type 'uniform' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
--adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 32490
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-04-00-11-980826924.txt
129.114.109.205
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.2-0.1-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 32601
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-04-00-31-640335413.txt
129.114.109.205
# PYTHON='python'
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train-0.2-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--noise_type 'laplace' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
--adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 4936
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-04-30-23-06-56-149173140.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-adv-train-0.15-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--noise_type 'laplace' \
--init_noise 0.15 \
--inner_noise 0.1 \
--limit_batch_number 0 \
--adv_train \
--attack_strengths 0.031 \
--attack_iters 7 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
PYTHON='python'
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.15-0.1-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'laplace' \
--init_noise 0.15 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 21470
cc@iclr:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-15-58-25-282431600.txt
# PYTHON='python'
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.17-0.09-laplace
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'laplace' \
--init_noise 0.17 \
--inner_noise 0.09 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 6357
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-01-11-00-45-221321170.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.25-0.15-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.25 \
--inner_noise 0.15 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 32210
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-03-10-43-46-487040710.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.20-0.16-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.20 \
--inner_noise 0.16 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 41438
cc@iclr:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-03-15-53-50-609959192.txt
# PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.3-0.15-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.3 \
--inner_noise 0.15 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 32583
cc@iclr:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-00-53-13-784249285.txt
129.114.108.241
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.3-0.2-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.3 \
--inner_noise 0.2 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 33184
cc@iclr:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-00-55-03-592820804.txt
129.114.108.241
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.25-0.20-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.25 \
--inner_noise 0.20 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 3808
(base) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-04-19-57-27-883190580.txt
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.3-0.25-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.3 \
--inner_noise 0.25 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 9549
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-01-04-40-695554020.txt
129.114.109.80
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.3-0.3-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.3 \
--inner_noise 0.3 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[2] 9906
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-01-05-37-887306655.txt
cc@129.114.109.80
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.35-0.3-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.35 \
--inner_noise 0.3 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
cc@129.114.109.80
[1] 47295
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-03-04-47-774546927.txt
PYTHON='python'
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.4-0.3-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=1 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.4 \
--inner_noise 0.3 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
cc@129.114.109.80
[2] 2863
cc@icml:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-05-03-17-37-095230986.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.4-0.4-uniform
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'uniform' \
--init_noise 0.4 \
--inner_noise 0.4 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 26048
(base) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-04-22-19-57-526786769.txt
# PYTHON='python'
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-0.22-0.0-gauss
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'gauss' \
--init_noise 0.22 \
--inner_noise 0.0 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 16860
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-10-15-33-59-432930458.txt
PYTHON="/home/${USER}/anaconda3/envs/abs/bin/python" # python environment
enable_tb_display=false # enable tensorboard display
model=noise_resnet20_robust_no_grad
dataset=cifar10
epochs=160
batch_size=128
optimizer=SGD
# add more labels as additional info into the saving path
label_info=train_layerwise_3e-4decay-no-adv-0.2-0.1-gauss-no-grad
#dataset path
data_path="/home/${USER}/code/bandlimited-cnns/cnns/nnlib/datasets"
timestamp=$(date +%Y-%m-%d-%H-%M-%S-%N)
CUDA_VISIBLE_DEVICES=0 PYTHONPATH=../../../../../ nohup $PYTHON main.py \
--dataset ${dataset} \
--data_path ${data_path} \
--arch ${model} \
--save_path ./save/${DATE}/${dataset}_${model}_${epochs}_${optimizer}_${label_info} \
--epochs ${epochs} \
--learning_rate 0.1 \
--optimizer ${optimizer} \
--schedule 80 120 \
--gammas 0.1 0.1 \
--batch_size ${batch_size} --workers 4 --ngpu 1 --gpu_id 0 \
--print_freq 100 --decay 0.0003 --momentum 0.9 \
--epoch_delay 5 \
--attack_strengths 0.0 \
--attack_iters 0 \
--noise_type 'gauss' \
--init_noise 0.2 \
--inner_noise 0.1 \
--limit_batch_number 0 \
>> train_${timestamp}.txt 2>&1 &
echo train_${timestamp}.txt
[1] 14739
(abs) ady@skr-compute1:~/code/bandlimited-cnns/cnns/nnlib/robustness/pni/code$ echo train_${timestamp}.txt
train_2020-05-20-22-09-40-446365138.txt
|
public boolean search(Node root, int x)
{
if (root==null)
return false;
if (root.val == x)
return true;
// Then recur on left sutree
boolean res1 = search(root.left, x);
// Now recur on right subtree
boolean res2 = search(root.right, x);
return res1 || res2;
} |
<filename>src/PGTA/include/PGTA/akPGTAContext.inl
#ifndef AK_PGTA_CPP_H
#error "donut include pls"
#endif
namespace PGTA
{
PGTAContext::PGTAContext(HPGTAContext context):
m_pgtaContext(context)
{
}
PGTAContext::PGTAContext(const PGTAContext& other):
m_pgtaContext(other.m_pgtaContext)
{
}
PGTAContext::~PGTAContext()
{
}
PGTABuffer PGTAContext::Update(const float deltaSeconds)
{
return pgtaUpdate(m_pgtaContext, deltaSeconds);
}
PGTABuffer PGTAContext::GetOutputBuffer()
{
return pgtaGetOutputBuffer(m_pgtaContext);
}
void PGTAContext::BindTrack(HPGTATrack track)
{
pgtaBindTrack(m_pgtaContext, track);
}
void PGTAContext::Transition(HPGTATrack track, const float percentAmount, const float durationSeconds)
{
pgtaTransition(m_pgtaContext, track, percentAmount, durationSeconds);
}
}
|
#!/bin/bash
projectId=$1
reportId=$2
authToken=$3
reportOptions=$4
###############################################################################
# Call the script to collect the data and generate the report
# This script will create a zip file containing the viewable file
# combined with another zip file that contains all report artifacts for
# download. Since this is executed from the tomcat/bin directory we need to
# use REPORTDIR to get the location of this shell script since the script is
# relative to that.
###############################################################################
REPORTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
python3 ${REPORTDIR}/create_report.py -pid $projectId -rid $reportId -authToken $authToken -reportOpts "$reportOptions" |
<reponame>n-paukov/swengine<filename>sources/Game/Core/GameApplication.cpp
#include "GameApplication.h"
#include <spdlog/spdlog.h>
#include <glm/gtx/string_cast.hpp>
#include <Engine/Exceptions/EngineRuntimeException.h>
#include <Engine/Modules/Graphics/Resources/SkeletonResourceManager.h>
#include <Engine/Utility/files.h>
#include "Game/Screens/GameScreen.h"
#include "Game/Screens/MainMenuScreen.h"
#include "Game/Screens/MainMenuSettingsScreen.h"
#include "Game/Inventory/InventoryUI.h"
#include "Game/Dynamic/DialoguesUI.h"
#include "Game/Saving/SavingSystem.h"
#include "Game/Game.h"
GameApplication::GameApplication(int argc, char* argv[])
: BaseGameApplication(argc, argv, "Game")
{
}
GameApplication::~GameApplication()
{
}
void GameApplication::render()
{
}
void GameApplication::load()
{
auto resourceMgr = m_resourceManagementModule->getResourceManager();
resourceMgr->loadResourcesMapFile("../resources/resources.xml");
resourceMgr->loadResourcesMapFile("../resources/game/resources.xml");
m_gameWorld->registerComponentBinderFactory<ActorComponent>(
std::make_shared<GameObjectsComponentsGenericBindersFactory<ActorComponent, ActorComponentBinder>>());
m_gameWorld->registerComponentBinderFactory<InventoryComponent>(
std::make_shared<GameObjectsComponentsGenericBindersFactory<InventoryComponent, InventoryComponentBinder,
GameObjectsComponentsBinderInjectParameters::GameWorld>>(m_gameWorld));
m_gameWorld->registerComponentBinderFactory<InventoryItemComponent>(
std::make_shared<GameObjectsComponentsGenericBindersFactory<InventoryItemComponent, InventoryItemComponentBinder,
GameObjectsComponentsBinderInjectParameters::ResourcesManager>>(resourceMgr));
m_gameWorld->registerComponentBinderFactory<PlayerComponent>(
std::make_shared<GameObjectsComponentsGenericBindersFactory<PlayerComponent, PlayerComponentBinder>>());
m_componentsLoader = std::make_unique<GameComponentsLoader>(m_gameWorld, resourceMgr);
m_levelsManager->getObjectsLoader().registerGenericComponentLoader("player",
[this](const pugi::xml_node& data) {
return m_componentsLoader->loadPlayerData(data);
});
m_levelsManager->getObjectsLoader().registerGenericComponentLoader("inventory_item",
[this](const pugi::xml_node& data) {
return m_componentsLoader->loadInventoryItemData(data);
});
m_levelsManager->getObjectsLoader().registerGenericComponentLoader("inventory",
[this](const pugi::xml_node& data) {
return m_componentsLoader->loadInventoryData(data);
});
m_levelsManager->getObjectsLoader().registerGenericComponentLoader("interactive",
[this](const pugi::xml_node& data) {
return m_componentsLoader->loadInteractiveData(data);
});
m_levelsManager->getObjectsLoader().registerGenericComponentLoader("actor",
[this](const pugi::xml_node& data) {
return m_componentsLoader->loadActorData(data);
});
auto gameScreen = std::make_shared<GameScreen>(m_inputModule,
getGameApplicationSystemsGroup(),
m_levelsManager,
m_graphicsScene,
m_guiSystem);
m_screenManager->registerScreen(gameScreen);
auto mainMenuGUILayout = m_guiSystem->loadScheme(
FileUtils::getGUISchemePath("screen_main_menu"));
m_screenManager->registerScreen(std::make_shared<MainMenuScreen>(
m_inputModule,
mainMenuGUILayout,
m_gameConsole));
auto mainMenuSettingsGUILayout = m_guiSystem->loadScheme(
FileUtils::getGUISchemePath("screen_main_menu_settings"));
m_screenManager->registerScreen(std::make_shared<MainMenuSettingsScreen>(mainMenuSettingsGUILayout));
GUIWidgetStylesheet commonStylesheet = m_guiSystem->loadStylesheet(
FileUtils::getGUISchemePath("common.stylesheet"));
m_screenManager->getCommonGUILayout()->applyStylesheet(commonStylesheet);
std::shared_ptr deferredAccumulationPipeline = std::make_shared<GLShadersPipeline>(
resourceMgr->getResource<GLShader>("deferred_accum_pass_vertex_shader"),
resourceMgr->getResource<GLShader>("deferred_accum_pass_fragment_shader"),
std::optional<ResourceHandle<GLShader>>());
m_graphicsModule->getGraphicsContext()->setupDeferredAccumulationMaterial(deferredAccumulationPipeline);
m_engineGameSystems->addGameSystem(std::make_shared<SavingSystem>(m_levelsManager,
gameScreen->getGame()));
m_gameWorld->subscribeEventsListener<ScreenSwitchEvent>(this);
m_screenManager->changeScreen(BaseGameScreen::getScreenName(GameScreenType::MainMenu));
}
void GameApplication::unload()
{
m_componentsLoader.reset();
m_gameWorld->unsubscribeEventsListener<ScreenSwitchEvent>(this);
}
EventProcessStatus GameApplication::receiveEvent(const ScreenSwitchEvent& event)
{
if (event.newScreen->getName() == "Game") {
m_engineGameSystems->getGameSystem<SkeletalAnimationSystem>()->setActive(true);
m_engineGameSystems->getGameSystem<PhysicsSystem>()->setActive(true);
m_renderingSystemsPipeline->setActive(true);
m_gameApplicationSystems->setActive(true);
}
else {
if (m_renderingSystemsPipeline->isActive()) {
m_engineGameSystems->getGameSystem<SkeletalAnimationSystem>()->setActive(false);
m_engineGameSystems->getGameSystem<PhysicsSystem>()->setActive(false);
m_renderingSystemsPipeline->setActive(false);
m_gameApplicationSystems->setActive(false);
}
}
return EventProcessStatus::Processed;
}
|
<gh_stars>1-10
var mtg = {};
mtg.search = function (name, cb, fail) {
$.ajax({
"url": "https://api.magicthegathering.io/v1/cards",
"data": {
name: name
}
}).done(function (data) {
console.log(data.cards);
cb(data.cards);
}).fail(function (err) {
fail(err);
});
};
mtg.get_counter_class = function (counter) {
switch (Math.abs(counter.val)) {
case 1: return "fi-die-one";
case 2: return "fi-die-two";
case 3: return "fi-die-three";
case 4: return "fi-die-four";
case 5: return "fi-die-five";
case 6: return "fi-die-six";
default:
console.error('Dont know which class for ', counter.val);
break;
}
};
mtg.get_counters = function (a_card) {
var counters = [];
if (a_card.counters) {
for (var i = 0; i < a_card.counters.length; i++) {
var curr = a_card.counters[i];
var c_class = mtg.get_counter_class(curr);
if (curr.is_pos) {
counters.push("<li><i class='counter pos " + c_class + "'></i></li>");
} else {
counters.push("<li><i class='counter neg " + c_class + "'></i></li>");
}
}
}
return counters.join("");
};
mtg.get_card = function (a_card) {
var the_card = $("<a class='card'>"
+ "<ul class='counters'>" + mtg.get_counters(a_card) + "</ul>"
+ "<img src='" + a_card.imageUrl + "'/>"
+ "</a>");
if (a_card.is_tapped) {
the_card.addClass("card-tapped");
}
return $("<li></li>").append(the_card);
};
mtg.make_hand = function (cards) {
var hand = $("<ul class='hand'>");
for (var i = 0; i < cards.length; i++) {
var curr = cards[i];
hand.append(mtg.get_card(curr));
}
return hand.wrap("<div class='playingCards'>");
};
|
const icons_disabled = {
"16": "/assets/icons-disabled/16.png",
"19": "/assets/icons-disabled/19.png",
"32": "/assets/icons-disabled/32.png",
"64": "/assets/icons-disabled/64.png",
"128": "/assets/icons-disabled/128.png"
}
const icons_enabled = {
"16": "/assets/icons/16.png",
"19": "/assets/icons/19.png",
"32": "/assets/icons/32.png",
"64": "/assets/icons/64.png",
"128": "/assets/icons/128.png"
}
chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
if(request=="isDev") {
chrome.management.getSelf(self => {
sendResponse(self.installType=="development");
});
}
return true; // VERY IMPORTANT TO RETURN TRUE HERE. Because of asynchronous sendResponse.
});
/**
* Fires when the active tab in a window changes.
* Note that the tab's URL may not be set at the time this event fired,
* but you can listen to onUpdated events so as to be notified when a URL is
* https://developer.chrome.com/docs/extensions/reference/tabs/#event-onActivated
*/
chrome.tabs.onActivated.addListener(function(activeInfo) {
chrome.tabs.query({active: true}, function(tab) {
updateIcon(tab.url);
});
});
/**
* Fired when a tab is updated.
* https://developer.chrome.com/docs/extensions/reference/tabs/#event-onUpdated
* We need to tell the tab that the URL has changed because the content script doesn't always automatically reload.
*/
chrome.tabs.onUpdated.addListener(function(tabId, changeInfo, tab) {
if (changeInfo.url) {
chrome.tabs.sendMessage(tabId, "url_changed");
updateIcon(changeInfo.url);
}
});
/**
* Change the popup icon based on the current URL
* @param {string} url
*/
function updateIcon(url) {
if(!url) return;
const icons = url.match(/amazon.*\/s/)
? icons_enabled
: icons_disabled;
chrome.browserAction.setIcon({ path : icons });
} |
<reponame>huangjianqin/bigdata
package org.kin.distributelock;
import io.lettuce.core.RedisClient;
import io.lettuce.core.RedisURI;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.api.sync.RedisCommands;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Condition;
/**
* 支持阻塞锁和超时锁+进程故障会自动释放锁(利用超时实现)
* Created by 健勤 on 2017/5/23.
*/
public class RedisDistributeLock implements DistributeLock {
private static final Logger log = LoggerFactory.getLogger(RedisDistributeLock.class);
//每轮锁请求的间隔
private static final long LOCK_REQUEST_DURATION = 50;
//阻塞锁的最大超时时间
private static final long MAX_TIMEOUT = Integer.MAX_VALUE;
//请求的锁名字
private final String lockName;
//redis服务器的host和port
private final String host;
private final int port;
//redis客户端连接
private RedisClient redisClient;
private StatefulRedisConnection<String, String> connection;
//表示该锁是否被锁上
private volatile long lockedThreadId;
public RedisDistributeLock(String host, String lockName) {
this(host, 6379, lockName);
}
public RedisDistributeLock(String host, int port, String lockName) {
this.host = host;
this.port = port;
this.lockName = lockName;
}
/**
* 初始化锁
* 初始化redis客户端
*/
@Override
public void init() {
RedisURI redisUri = RedisURI.builder()
.withHost(host)
.withPort(port)
.withTimeout(Duration.of(10, ChronoUnit.SECONDS))
.build();
redisClient = RedisClient.create(redisUri);
connection = redisClient.connect();
}
/**
* 销毁锁
* 关闭redis客户端
*/
@Override
public void destroy() {
connection.close();
redisClient.shutdown();
}
/**
* 获得锁的封装方法
* 阻塞式和超时锁都基于该方法获得锁
*/
private Boolean requestLock(boolean isBlock, Long time, TimeUnit unit) {
long start = System.currentTimeMillis();
//阻塞时一直尝试获得锁
//超时锁时判断获得锁的过程是否超时
while ((isBlock || System.currentTimeMillis() - start < unit.toMillis(time))) {
Thread currentThread = Thread.currentThread();
RedisCommands<String, String> redisCommands = connection.sync();
long now = System.currentTimeMillis();
//如果没有进程获得锁,redis上并没有这个key,setnx就会返回1,当前进程就可以获得锁
if (redisCommands.setnx(lockName, now + "," + (isBlock ? MAX_TIMEOUT : unit.toMillis(time)))) {
log.debug(currentThread.getName() + "命中锁");
lockedThreadId = currentThread.getId();
return true;
}
//否则,判断持有锁的进程是否超时,若是超时,则抢占锁
else {
String v = redisCommands.get(lockName);
if (v != null) {
long expireTime = Long.parseLong(v.split(",")[1]);
long lockedTime = Long.parseLong(v.split(",")[0]);
/**
* 该锁超时,尝试抢占
* 对于阻塞式锁,超时时间 = MAX_TIMEOUT,一般值比较大,很难会超时
*
* 这里会存在一个问题,假设两个进程同时判断锁超时
* 一个进程先获得锁,那么另外一个进程就会通过下面逻辑尝试获得锁
* 但是该进程执行了getset命令,该锁的值已经不是获得锁的进程设置的值
* 如果获得锁的进程正常释放,问题并不大,但是如果该进程挂了
* 那么锁真正的超时时间就长了
*/
if (now - lockedTime > expireTime) {
//尝试抢占,并校验锁有没被其他进程抢占了,也就是key对应的value改变了
String ov = redisCommands.getset(lockName, now + "," + (isBlock ? MAX_TIMEOUT : unit.toMillis(time)));
if (ov != null && ov.equals(v)) {
//设置成功, 并返回原来的value, 抢占成功
log.debug(currentThread.getName() + "命中锁");
lockedThreadId = currentThread.getId();
return true;
}
}
}
}
//睡眠一会再重试
try {
Thread.sleep(LOCK_REQUEST_DURATION);
} catch (InterruptedException e) {
}
}
return false;
}
/**
* 阻塞式获得锁
*/
@Override
public void lock() {
//阻塞
requestLock(true, null, null);
}
/**
* 可中断阻塞获得锁
*/
@Override
public void lockInterruptibly() throws InterruptedException {
//没经严格测试
if (Thread.interrupted()) {
throw new InterruptedException();
}
lock();
}
/**
* 尝试一次去获得锁,并返回结果
*/
@Override
public boolean tryLock() {
long now = System.currentTimeMillis();
RedisCommands<String, String> redisCommands = connection.sync();
return redisCommands.setnx(lockName, now + "," + MAX_TIMEOUT);
}
/**
* 超时尝试获得锁,超时后返回是否获得锁
*/
@Override
public boolean tryLock(long time, TimeUnit unit) {
return requestLock(false, time, unit);
}
/**
* 释放锁
* 也就是删除lockName的key
*/
@Override
public void unlock() {
Thread currentThread = Thread.currentThread();
if (currentThread.getId() == lockedThreadId) {
log.debug(currentThread.getName() + "释放锁");
/**
* 先释放锁, 再删除key, 不然会存在以下情况:
* A进程: 删除key, 但仍没有释放锁
* B进程: 获取了分布式锁并设置锁
* A进程: 释放锁状态
* 待B进程要释放锁时, 却无法执行删除key的逻辑, 导致死锁
*/
lockedThreadId = 0;
RedisCommands<String, String> redisCommands = connection.sync();
redisCommands.del(lockName);
}
}
/**
* 不支持
*/
@Override
public Condition newCondition() {
throw new UnsupportedOperationException("DistributedLock Base on Redis don't support now");
}
}
|
#!/bin/bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/node.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./node-debian.sh [directory to install nvm] [node version to install (use "none" to skip)] [non-root user] [Update rc files flag]
export NVM_DIR=${1:-"/usr/local/share/nvm"}
export NODE_VERSION=${2:-"lts/*"}
USERNAME=${3:-"automatic"}
UPDATE_RC=${4:-"true"}
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# Determine the appropriate non-root user
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=root
fi
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root
fi
if [ "${NODE_VERSION}" = "none" ]; then
export NODE_VERSION=
fi
function updaterc() {
if [ "${UPDATE_RC}" = "true" ]; then
echo "Updating /etc/bash.bashrc and /etc/zsh/zshrc..."
echo -e "$1" >> /etc/bash.bashrc
if [ -f "/etc/zsh/zshrc" ]; then
echo -e "$1" >> /etc/zsh/zshrc
fi
fi
}
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Install curl, apt-transport-https, tar, or gpg if missing
if ! dpkg -s apt-transport-https curl ca-certificates tar > /dev/null 2>&1 || ! type gpg > /dev/null 2>&1; then
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
apt-get update
fi
apt-get -y install --no-install-recommends apt-transport-https curl ca-certificates tar gnupg2
fi
# Install yarn
if type yarn > /dev/null 2>&1; then
echo "Yarn already installed."
else
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | (OUT=$(apt-key add - 2>&1) || echo $OUT)
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
apt-get update
apt-get -y install --no-install-recommends yarn
fi
# Install the specified node version if NVM directory already exists, then exit
if [ -d "${NVM_DIR}" ]; then
echo "NVM already installed."
if [ "${NODE_VERSION}" != "" ]; then
su ${USERNAME} -c ". $NVM_DIR/nvm.sh && nvm install ${NODE_VERSION} && nvm clear-cache"
fi
exit 0
fi
# Create nvm group, nvm dir, and set sticky bit
if ! cat /etc/group | grep -e "^nvm:" > /dev/null 2>&1; then
groupadd -r nvm
fi
umask 0002
usermod -a -G nvm ${USERNAME}
mkdir -p ${NVM_DIR}
chown :nvm ${NVM_DIR}
chmod g+s ${NVM_DIR}
su ${USERNAME} -c "$(cat << EOF
set -e
umask 0002
# Do not update profile - we'll do this manually
export PROFILE=/dev/null
curl -so- https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash
source ${NVM_DIR}/nvm.sh
if [ "${NODE_VERSION}" != "" ]; then
nvm alias default ${NODE_VERSION}
fi
nvm clear-cache
EOF
)" 2>&1
# Update rc files
if [ "${UPDATE_RC}" = "true" ]; then
updaterc "$(cat <<EOF
export NVM_DIR="${NVM_DIR}"
[ -s "\$NVM_DIR/nvm.sh" ] && . "\$NVM_DIR/nvm.sh"
[ -s "\$NVM_DIR/bash_completion" ] && . "\$NVM_DIR/bash_completion"
EOF
)"
fi
echo "Done!"
|
#!/bin/bash
# LICENSE UPL 1.0
#
# Copyright (c) 1982-2019 Oracle and/or its affiliates. All rights reserved.
#
# Since: January, 2019
# Author: paramdeep.saini@oracle.com
# Description: Cleanup the $GRID_HOME and ORACLE_BASE after Grid confguration in the image
#
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
#
# Image Cleanup Script
source /home/grid/.bashrc
rm -rf /u01/app/grid/*
rm -rf $GRID_HOME/log
rm -rf $GRID_HOME/logs
rm -rf $GRID_HOME/crs/init
rm -rf $GRID_HOME/crs/install/rhpdata
rm -rf $GRID_HOME/crs/log
rm -rf $GRID_HOME/racg/dump
rm -rf $GRID_HOME/srvm/log
rm -rf $GRID_HOME/cv/log
rm -rf $GRID_HOME/cdata
rm -rf $GRID_HOME/bin/core*
rm -rf $GRID_HOME/bin/diagsnap.pl
rm -rf $GRID_HOME/cfgtoollogs/*
rm -rf $GRID_HOME/network/admin/listener.ora
rm -rf $GRID_HOME/crf
rm -rf $GRID_HOME/ologgerd/init
rm -rf $GRID_HOME/osysmond/init
rm -rf $GRID_HOME/ohasd/init
rm -rf $GRID_HOME/ctss/init
rm -rf $GRID_HOME/dbs/.*.dat
rm -rf $GRID_HOME/oc4j/j2ee/home/log
rm -rf $GRID_HOME/inventory/Scripts/ext/bin/log
rm -rf $GRID_HOME/inventory/backup/*
rm -rf $GRID_HOME/mdns/init
rm -rf $GRID_HOME/gnsd/init
rm -rf $GRID_HOME/evm/init
rm -rf $GRID_HOME/gipc/init
rm -rf $GRID_HOME/gpnp/gpnp_bcp.*
rm -rf $GRID_HOME/gpnp/init
rm -rf $GRID_HOME/auth
rm -rf $GRID_HOME/tfa
rm -rf $GRID_HOME/suptools/tfa/release/diag
rm -rf $GRID_HOME/rdbms/audit/*
rm -rf $GRID_HOME/rdbms/log/*
rm -rf $GRID_HOME/network/log/*
rm -rf $GRID_HOME/inventory/Scripts/comps.xml.*
rm -rf $GRID_HOME/inventory/Scripts/oraclehomeproperties.xml.*
rm -rf $GRID_HOME/inventory/Scripts/oraInst.loc.*
rm -rf $GRID_HOME/inventory/Scripts/inventory.xml.*
rm -rf $GRID_HOME/log_file_client.log
|
#!/usr/bin/env bash
readonly BASEDIR=$(readlink -f $(dirname $0))/../../../
PRIORITY=normal
function run_app(){
cd $BASEDIR/src/app/voltdb/voltdb_src/bin
./voltdb init
unset LD_PRELOAD
CXLMALLOC=$BASEDIR/lib/smdk_allocator/lib/libcxlmalloc.so
export LD_PRELOAD=$CXLMALLOC
CXLMALLOC_CONF=use_exmem:true,exmem_zone_size:16384,normal_zone_size:16384,maxmemory_policy:remain
if [ "$PRIORITY" == 'exmem' ]; then
CXLMALLOC_CONF+=,priority:exmem,:
elif [ "$PRIORITY" == 'normal' ]; then
CXLMALLOC_CONF+=,priority:normal,:
fi
export CXLMALLOC_CONF
echo $CXLMALLOC_CONF
./voltdb start
}
while getopts ":ena" opt; do
case "$opt" in
e)
PRIORITY='exmem'
;;
n)
PRIORITY='normal'
;;
a)
run_app
;;
:)
echo "Usage: $0 [-e | -n] -a"
esac
done
|
#/bin/bash/
# to use particles2grid, it has to be compiled by running
python setup.py build_ext
# check first that the name of the file particle2grid is well in setup.py.
|
package bootcamp.mercado.config.exception;
import org.springframework.context.MessageSource;
import org.springframework.validation.FieldError;
import java.util.List;
import java.util.stream.Collectors;
public class FieldErrorListResponse {
List<FieldErrorResponse> errors;
public FieldErrorListResponse(List<FieldError> errors,
MessageSource messageSource) {
this.errors = errors.stream()
.map(i -> { return new FieldErrorResponse(i, messageSource); })
.collect(Collectors.toList());
}
public List<FieldErrorResponse> getErrors() {
return errors;
}
}
|
import sys
from pyspark import SparkConf
from collections import namedtuple
from pyspark.sql import SparkSession#, SparkContext
from lib.logger import Log4j
# create schema. Can also define a class and use it to define the schema. However name tuple is more convenient
SurveyRecord = namedtuple("SurveyRecord", ["Age", "Gender", "Country", "State"])
if __name__ == "__main__":
conf = SparkConf()\
.setMaster('local[3]')\
.setAppName('MyFirstRDD')
#spark context to create the RDD, without SparkSession
#context = SparkContext(conf=conf)
# better use SparkContext, which is a higher level object, from which we
spark = SparkSession.builder.config(conf=conf).getOrCreate()
context = spark.sparkContext
# set up logger
logger = Log4j(spark)
# check cmd argument
if len(sys.argv) != 2:
logger.error("Usage: my_RDD <filename>")
sys.exit(-1)
#create RDD
linesRDD = context.textFile(sys.argv[1])
# RDD basic transformation
partitionedRDD = linesRDD.repartition(2)
colsRDD = partitionedRDD.map(lambda line: line.replace('"','').split(",")) # separate col from each line and map to a list
selectRDD = colsRDD.map(lambda cols: SurveyRecord(int(cols[1]), cols[2], cols[3],cols[4]))
filteredRDD = selectRDD.filter(lambda r: r.Age<40)
KeyValueRDD = filteredRDD.map(lambda r: (r.Country, 1)) #map
countRDD = KeyValueRDD.reduceByKey(lambda v1, v2: v1 +v2) #reduce
colsList = countRDD.collect()
for x in colsList:
logger.info(x) |
module.exports = async (d) => {
const data = d.util.aoiFunc(d);
const [shardId = 0] = data.inside.splits;
if (isNaN(shardId))
return d.aoiError.fnError(
d,
"custom",
{ inside: data.inside },
"Invalid ShardId Provided In",
);
data.result = await d.client.shard.broadcastEval((c) => c.ws.ping, {
shard: Number(shardId),
});
return {
code: d.util.setCode(data),
};
}; |
#!/bin/sh
###############################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# If unspecified, the hostname of the container is taken as the JobManager address
JOB_MANAGER_RPC_ADDRESS=${JOB_MANAGER_RPC_ADDRESS:-$(hostname -f)}
CONF_FILE="${FLINK_HOME}/conf/flink-conf.yaml"
drop_privs_cmd() {
if [ $(id -u) != 0 ]; then
# Don't need to drop privs if EUID != 0
return
elif [ -x /sbin/su-exec ]; then
# Alpine
echo su-exec flink
else
# Others
echo gosu flink
fi
}
if [ "$1" = "help" ]; then
echo "Usage: $(basename "$0") (jobmanager|taskmanager|help)"
exit 0
elif [ "$1" = "jobmanager" ]; then
shift 1
echo "Starting Job Manager"
if grep -E "^jobmanager\.rpc\.address:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/jobmanager\.rpc\.address:.*/jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}/g" "${CONF_FILE}"
else
echo "jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}" >> "${CONF_FILE}"
fi
if grep -E "^blob\.server\.port:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/blob\.server\.port:.*/blob.server.port: 6124/g" "${CONF_FILE}"
else
echo "blob.server.port: 6124" >> "${CONF_FILE}"
fi
if grep -E "^query\.server\.port:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/query\.server\.port:.*/query.server.port: 6125/g" "${CONF_FILE}"
else
echo "query.server.port: 6125" >> "${CONF_FILE}"
fi
if [ -n "${FLINK_PROPERTIES}" ]; then
echo "${FLINK_PROPERTIES}" >> "${CONF_FILE}"
fi
envsubst < "${CONF_FILE}" > "${CONF_FILE}.tmp" && mv "${CONF_FILE}.tmp" "${CONF_FILE}"
echo "config file: " && grep '^[^\n#]' "${CONF_FILE}"
exec $(drop_privs_cmd) "$FLINK_HOME/bin/jobmanager.sh" start-foreground "$@"
elif [ "$1" = "taskmanager" ]; then
shift 1
echo "Starting Task Manager"
TASK_MANAGER_NUMBER_OF_TASK_SLOTS=${TASK_MANAGER_NUMBER_OF_TASK_SLOTS:-$(grep -c ^processor /proc/cpuinfo)}
if grep -E "^jobmanager\.rpc\.address:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/jobmanager\.rpc\.address:.*/jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}/g" "${CONF_FILE}"
else
echo "jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}" >> "${CONF_FILE}"
fi
if grep -E "^taskmanager\.numberOfTaskSlots:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/taskmanager\.numberOfTaskSlots:.*/taskmanager.numberOfTaskSlots: ${TASK_MANAGER_NUMBER_OF_TASK_SLOTS}/g" "${CONF_FILE}"
else
echo "taskmanager.numberOfTaskSlots: ${TASK_MANAGER_NUMBER_OF_TASK_SLOTS}" >> "${CONF_FILE}"
fi
if grep -E "^blob\.server\.port:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/blob\.server\.port:.*/blob.server.port: 6124/g" "${CONF_FILE}"
else
echo "blob.server.port: 6124" >> "${CONF_FILE}"
fi
if grep -E "^query\.server\.port:.*" "${CONF_FILE}" > /dev/null; then
sed -i -e "s/query\.server\.port:.*/query.server.port: 6125/g" "${CONF_FILE}"
else
echo "query.server.port: 6125" >> "${CONF_FILE}"
fi
if [ -n "${FLINK_PROPERTIES}" ]; then
echo "${FLINK_PROPERTIES}" >> "${CONF_FILE}"
fi
envsubst < "${CONF_FILE}" > "${CONF_FILE}.tmp" && mv "${CONF_FILE}.tmp" "${CONF_FILE}"
echo "config file: " && grep '^[^\n#]' "${CONF_FILE}"
exec $(drop_privs_cmd) "$FLINK_HOME/bin/taskmanager.sh" start-foreground "$@"
fi
exec "$@"
|
<reponame>pertsenga/shards3d
require "shards3d/version"
require "stl"
require "geo3d"
class Shards3d
attr_reader :stl, :max_x, :max_y, :max_z
def initialize(stl_file)
@stl_faces = STL.read(stl_file) # gem doesn't work exactly as stated on its doc
@max_x, @max_y, @max_z = [800, 800, 800]
end
def max_dimensions
@stl.faces
end
def slice(increment = 0.1) # milimeters
slicer = Slicer.new(max_x, max_y, max_z, increment)
# slice here
end
end
|
def get_divisors(n):
divisors_list = []
for i in range(2, n):
if n % i == 0:
divisors_list.append(i)
return divisors_list
if __name__ == '__main__':
print (get_divisors(6))
# Output
[2, 3] |
<reponame>yiminghe/babel-loose-runtime
var slice = Array.prototype.slice;
module.exports = function _extends(to) {
var from = slice.call(arguments, 1);
from.forEach(function t(f) {
if (f && typeof (f) === 'object') {
Object.keys(f).forEach(function tt(k) {
to[k] = f[k];
});
}
});
return to;
};
|
<reponame>vadi2/codeql
class ElemIterator implements Iterator<MyElem>, Iterable<MyElem> {
private MyElem[] data;
private idx = 0;
public boolean hasNext() {
return idx < data.length;
}
public MyElem next() {
return data[idx++];
}
public Iterator<MyElem> iterator() {
return this;
}
// ...
}
void useMySequence(Iterable<MyElem> s) {
// do some work by traversing the sequence
for (MyElem e : s) {
// ...
}
// do some more work by traversing it again
for (MyElem e : s) {
// ...
}
}
|
<filename>example/pages/setOptions.tsx
import type { NextPage } from 'next'
import { Box } from '@fower/react'
import { Form, useForm } from 'fomir-react'
import { request } from '@peajs/request'
const Home: NextPage = () => {
const form = useForm({
onSubmit(values) {
console.log('values', values)
},
children: [
{
component: 'Select',
label: 'Todos',
name: 'todo',
value: '',
async onFieldInit() {
const todos = await request('https://jsonplaceholder.typicode.com/todos')
form.setFieldState('todo', {
options: todos.map((i) => ({
label: i.title,
value: i.id,
})),
})
},
},
{
component: 'Submit',
text: 'submit',
},
],
})
return (
<Box p-100>
<Form form={form} />
</Box>
)
}
export default Home
|
mv /etc/resolv.conf /etc/resolv.conf.backup
echo "search ${vcnFQDN} ${privateBSubnetsFQDN} ${privateSubnetsFQDN} ${privateProtocolSubnetFQDN}" > /etc/resolv.conf
echo "nameserver 169.254.169.254" >> /etc/resolv.conf
if [ -z /etc/oci-hostname.conf ]; then
echo "PRESERVE_HOSTINFO=2" > /etc/oci-hostname.conf
else
# https://docs.cloud.oracle.com/iaas/Content/Network/Tasks/managingDHCP.htm#notes
sed -i "s/^PRESERVE_HOSTINFO/#PRESERVE_HOSTINFO/g" /etc/oci-hostname.conf
echo "PRESERVE_HOSTINFO=2" >> /etc/oci-hostname.conf
fi
# not be overwritten by dhclient
chattr +i /etc/resolv.conf
|
import * as yup from 'yup';
import HelpOrder from '../models/HelpOrder';
import Student from '../models/Student';
class HelpOrderController {
async index(req, res) {
const { page } = req.query;
const { student_id } = req.params;
const student = await Student.findByPk(student_id);
if (!student) {
return res.status(401).json({ error: 'Aluno não encontrado!' });
}
const helpOrders = await HelpOrder.findAll({
where: {
student_id
},
attributes: ['id', 'question', 'answer', 'answer_at', 'created_at'],
order: [['created_at', 'desc']],
limit: 10,
offset: ((page && page > 0 ? page : 1) - 1) * 10
});
return res.json(helpOrders);
}
async store(req, res) {
const { student_id } = req.params;
const student = await Student.findByPk(student_id);
if (!student) {
return res.status(401).json({ error: 'Aluno não encontrado!' });
}
const schema = yup.object().shape({
question: yup.string().required('Informe sua dúvida')
});
try {
await schema.validate(req.body);
} catch (err) {
return res.status(400).json({ error: err.message });
}
req.body.student_id = +student_id;
const { id, question, created_at } = await HelpOrder.create(req.body);
return res.json({
id,
question,
created_at
});
}
}
export default new HelpOrderController();
|
#!/bin/sh
api_base="https://api.github.com/repos"
# Function to take 2 git tags/commits and get any lines from commit messages
# that contain something that looks like a PR reference: e.g., (#1234)
sanitised_git_logs(){
git --no-pager log --pretty=format:"%s" "$1...$2" |
# Only find messages referencing a PR
grep -E '\(#[0-9]+\)' |
# Strip any asterisks
sed 's/^* //g' |
# And add them all back
sed 's/^/* /g'
}
# Returns the last published release on github
# Note: we can't just use /latest because that ignores prereleases
# repo: 'organization/repo'
# Usage: last_github_release "$repo"
last_github_release(){
i=0
# Iterate over releases until we find the last release that's not just a draft
while [ $i -lt 29 ]; do
out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$1/releases" | jq ".[$i]")
echo "$out"
# Ugh when echoing to jq, we need to translate newlines into spaces :/
if [ "$(echo "$out" | tr '\r\n' ' ' | jq '.draft')" = "false" ]; then
echo "$out" | tr '\r\n' ' ' | jq '.tag_name'
return
else
i=$((i + 1))
fi
done
}
# Checks whether a tag on github has been verified
# repo: 'organization/repo'
# tagver: 'v1.2.3'
# Usage: check_tag $repo $tagver
check_tag () {
repo=$1
tagver=$2
tag_out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver")
tag_sha=$(echo "$tag_out" | jq -r .object.sha)
object_url=$(echo "$tag_out" | jq -r .object.url)
if [ "$tag_sha" = "null" ]; then
return 2
fi
verified_str=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$object_url" | jq -r .verification.verified)
if [ "$verified_str" = "true" ]; then
# Verified, everything is good
return 0
else
# Not verified. Bad juju.
return 1
fi
}
# Checks whether a given PR has a given label.
# repo: 'organization/repo'
# pr_id: 12345
# label: B1-silent
# Usage: has_label $repo $pr_id $label
has_label(){
repo="$1"
pr_id="$2"
label="$3"
# These will exist if the function is called in Gitlab.
# If the function's called in Github, we should have GITHUB_ACCESS_TOKEN set
# already.
if [ -n "$GITHUB_RELEASE_TOKEN" ]; then
GITHUB_TOKEN="$GITHUB_RELEASE_TOKEN"
elif [ -n "$GITHUB_PR_TOKEN" ]; then
GITHUB_TOKEN="$GITHUB_PR_TOKEN"
fi
out=$(curl -H "Authorization: token $GITHUB_TOKEN" -s "$api_base/$repo/pulls/$pr_id")
[ -n "$(echo "$out" | tr -d '\r\n' | jq ".labels | .[] | select(.name==\"$label\")")" ]
}
# Formats a message into a JSON string for posting to Matrix
# message: 'any plaintext message'
# formatted_message: '<strong>optional message formatted in <em>html</em></strong>'
# Usage: structure_message $content $formatted_content (optional)
structure_message() {
if [ -z "$2" ]; then
body=$(jq -Rs --arg body "$1" '{"msgtype": "m.text", $body}' < /dev/null)
else
body=$(jq -Rs --arg body "$1" --arg formatted_body "$2" '{"msgtype": "m.text", $body, "format": "org.matrix.custom.html", $formatted_body}' < /dev/null)
fi
echo "$body"
}
# Post a message to a matrix room
# body: '{body: "JSON string produced by structure_message"}'
# room_id: !fsfSRjgjBWEWffws:matrix.parity.io
# access_token: see https://matrix.org/docs/guides/client-server-api/
# Usage: send_message $body (json formatted) $room_id $access_token
send_message() {
curl -XPOST -d "$1" "https://matrix.parity.io/_matrix/client/r0/rooms/$2/send/m.room.message?access_token=$3"
}
|
class AddProducts < ActiveRecord::Migration
def change
Product.create!(
title: "Margarita",
description: "This is Margarit's pizza",
price: 120,
size: 30,
is_spicy: false,
is_veg: false,
is_best_offer: true,
path_to_image: "/images/margarita.jpeg"
)
Product.create!(
title: "Assorti",
description: "This is Assorti pizza",
price: 420,
size: 30,
is_spicy: true,
is_veg: false,
is_best_offer: false,
path_to_image: "/images/assorti.jpg"
)
Product.create!(
title: "Vegetarian",
description: "Amazing Vegetarian pizza",
price: 320,
size: 30,
is_spicy: true,
is_veg: true,
is_best_offer: false,
path_to_image: "/images/veg.jpeg"
)
end
end
|
#!/bin/bash
# Test Pip install/uninstall works okay
sudo pip install -i https://testpypi.python.org/pypi pyresttest
# Test installed
if [ -f '/usr/local/bin/resttest.py' ];
then
echo "Runnable script installed okay"
else
echo "ERROR: Runnable script DID NOT install okay"
fi
if [ -d '/usr/local/lib/python2.7/dist-packages/pyresttest/' ];
then
echo "Library install okay"
else
echo "ERROR: Library install DID NOT install okay"
fi
# Test script runs
resttest.py https://github.com ../simple_test.yaml
if [$? -ne 0]; then
echo 'ERROR: Runnable script failed to execute okay testing GitHub query'
fi
# Test uninstall is clean
sudo pip uninstall -y pyresttest
if [ -f '/usr/local/bin/resttest.py' ];
then
echo "ERROR: Runnable script for resttest.py did non uninstall"
else
echo "Runnable script uninstalled okay"
fi
if [ -d '/usr/local/lib/python2.7/dist-packages/pyresttest/' ];
then
echo "ERROR: Library install DID NOT uninstall okay"
else
echo "Library uninstall okay"
fi |
import java.util.HashSet;
public class UniqueGUIComponentsCounter {
public static int countUniqueGUIComponents(String guiCode) {
String[] components = guiCode.split(";");
HashSet<String> uniqueComponents = new HashSet<>();
for (String component : components) {
String[] parts = component.trim().split("\\s+");
if (parts.length > 2 && parts[1].equals("javax.swing")) {
uniqueComponents.add(parts[2]);
}
}
return uniqueComponents.size();
}
public static void main(String[] args) {
String guiCode = "private javax.swing.JPanel jPanel1; private javax.swing.JButton jButton1; private javax.swing.JTextField jTextField1; private javax.swing.JLabel jLabel1;";
System.out.println(countUniqueGUIComponents(guiCode)); // Output: 4
}
} |
<reponame>pomali/priznanie-digital<gh_stars>1-10
import { validate } from '../src/pages/hypoteka'
import { testValidation } from './utils/testValidation'
describe('hypoteka', () => {
describe('#validate', () => {
testValidation(validate, [
{
input: { r037_uplatnuje_uroky: undefined },
expected: ['r037_uplatnuje_uroky'],
},
{ input: { r037_uplatnuje_uroky: false }, expected: [] },
{
input: { r037_uplatnuje_uroky: true },
expected: ['r037_zaplatene_uroky', 'r037_pocetMesiacov'],
},
{
input: {
r037_uplatnuje_uroky: true,
r037_zaplatene_uroky: 'a',
r037_pocetMesiacov: 'b',
},
expected: ['r037_zaplatene_uroky', 'r037_pocetMesiacov'],
},
{
input: {
r037_uplatnuje_uroky: true,
r037_zaplatene_uroky: '10',
r037_pocetMesiacov: '20',
},
expected: ['r037_pocetMesiacov'],
},
{
input: {
r037_uplatnuje_uroky: true,
r037_zaplatene_uroky: '10',
r037_pocetMesiacov: '12',
},
expected: [],
},
])
})
})
|
#!/bin/bash
#SBATCH -J Act_sigmoid_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=2000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/progs/meta.py sigmoid 1 RMSprop 2 0.5394407940012951 293 0.0005872644578229275 varscaling PE-infersent
|
# Given a file name, this program creates 5 figures using xmgrace:
# 1. A plot of the NRIXS and resolution raw data
# 2. A plot of the peak subtraction
# 3. A plot of the phonon density of states (PDOS)
# 4. A plot of the PDOS integrated over energy
# 5. A zoomed-in plot of the PDOS integral
# INPUT parameter
FILE_NAME="Fe_Murphy_P10"
# PLOT 1: NRIXS
# -------------
INPUTFILE_DAT="../../$FILE_NAME.dat"
INPUTFILE_RES="../../$FILE_NAME.res"
OUTPUTFILE_NRIXS="../NRIXS_$FILE_NAME.ps"
PDFFILE_NRIXS="../NRIXS_$FILE_NAME.pdf"
BATCHFILE_NRIXS="NRIXS.bfile"
# Change NRIXS batchfile
# awk (use the awk program)
# -v VAR1=$INPUTFILE_DAT (allows shell variable VAR1 to be passed to awk)
# '/READ XYDY/ {$3 = VAR1} {print $0}' (Find the line containing READ XYDY and replace
# the third word on the line with VAR1)
# Then do a funny dance to properly save the new file
awk -v VAR1=\"$INPUTFILE_RES\" '/READ XY/ {$3 = VAR1} {print $0}' $BATCHFILE_NRIXS > input_file.tmp && mv input_file.tmp $BATCHFILE_NRIXS
awk -v VAR1=\"$INPUTFILE_DAT\" '/READ XYDY/ {$3 = VAR1} {print $0}' $BATCHFILE_NRIXS > input_file.tmp && mv input_file.tmp $BATCHFILE_NRIXS
awk -v VAR1=\"$OUTPUTFILE_NRIXS\" '/PRINT TO/ {$3 = VAR1} {print $0}' $BATCHFILE_NRIXS > input_file.tmp && mv input_file.tmp $BATCHFILE_NRIXS
# Create the postscript file
xmgrace -batch NRIXS.bfile -nosafe -hardcopy
# Convert postscript file to pdf
pstopdf $OUTPUTFILE_NRIXS
# Eliminate whitespace around pdf (pdfcrop input.pdf output.pdf, overrides file)
pdfcrop $PDFFILE_NRIXS $PDFFILE_NRIXS
# Delete postscript file to keep clutter down
rm $OUTPUTFILE_NRIXS
# PLOT 2: Resolution Peak Subtraction
# -----------------------------------
INPUTFILE_PSN="../../Output/$FILE_NAME\_psn.dat"
OUTPUTFILE_PSN="../PeakSub_$FILE_NAME.ps"
PDFFILE_PSN="../PeakSub_$FILE_NAME.pdf"
BATCHFILE_PSN="PeakSub.bfile"
# Change PeakSub batchfile
awk -v VAR1=\"$INPUTFILE_PSN\" '/READ XYDY/ {$3 = VAR1} {print $0}' $BATCHFILE_PSN > input_file.tmp && mv input_file.tmp $BATCHFILE_PSN
awk -v VAR1=\"$OUTPUTFILE_PSN\" '/PRINT TO/ {$3 = VAR1} {print $0}' $BATCHFILE_PSN > input_file.tmp && mv input_file.tmp $BATCHFILE_PSN
# Create the postscript file
xmgrace -batch PeakSub.bfile -nosafe -hardcopy
# Convert postscript file to pdf
pstopdf $OUTPUTFILE_PSN
# Eliminate whitespace around pdf (pdfcrop input.pdf output.pdf, overrides file)
pdfcrop $PDFFILE_PSN $PDFFILE_PSN
# Delete postscript file to keep clutter down
rm $OUTPUTFILE_PSN
# PLOT 3: Phonon Density of States
# -----------------------------------
INPUTFILE_PDOS="../../Output/$FILE_NAME\_dos.dat"
OUTPUTFILE_PDOS="../PDOS_$FILE_NAME.ps"
PDFFILE_PDOS="../PDOS_$FILE_NAME.pdf"
BATCHFILE_PDOS="PDOS.bfile"
# Change PeakSub batchfile
awk -v VAR1=\"$INPUTFILE_PDOS\" '/READ XYDY/ {$3 = VAR1} {print $0}' $BATCHFILE_PDOS > input_file.tmp && mv input_file.tmp $BATCHFILE_PDOS
awk -v VAR1=\"$OUTPUTFILE_PDOS\" '/PRINT TO/ {$3 = VAR1} {print $0}' $BATCHFILE_PDOS > input_file.tmp && mv input_file.tmp $BATCHFILE_PDOS
# Create the postscript file
xmgrace -batch PDOS.bfile -nosafe -hardcopy
# Convert postscript file to pdf
pstopdf $OUTPUTFILE_PDOS
# Eliminate whitespace around pdf (pdfcrop input.pdf output.pdf, overrides file)
pdfcrop $PDFFILE_PDOS $PDFFILE_PDOS
# Delete postscript file to keep clutter down
rm $OUTPUTFILE_PDOS
# PLOT 4: Integrated PDOS wrt Energy
# ----------------------------------
INPUTFILE_INTPDOS="../../Output/$FILE_NAME\_dos.dat"
OUTPUTFILE_INTPDOS="../IntPDOS_$FILE_NAME.ps"
PDFFILE_INTPDOS="../IntPDOS_$FILE_NAME.pdf"
BATCHFILE_INTPDOS="IntPDOS.bfile"
# Change PeakSub batchfile
awk -v VAR1=\"$INPUTFILE_INTPDOS\" '/READ XY/ {$3 = VAR1} {print $0}' $BATCHFILE_INTPDOS > input_file.tmp && mv input_file.tmp $BATCHFILE_INTPDOS
awk -v VAR1=\"$OUTPUTFILE_INTPDOS\" '/PRINT TO/ {$3 = VAR1} {print $0}' $BATCHFILE_INTPDOS > input_file.tmp && mv input_file.tmp $BATCHFILE_INTPDOS
# Create the postscript file
xmgrace -batch IntPDOS.bfile -nosafe -hardcopy
# Convert postscript file to pdf
pstopdf $OUTPUTFILE_INTPDOS
# Eliminate whitespace around pdf (pdfcrop input.pdf output.pdf, overrides file)
pdfcrop $PDFFILE_INTPDOS $PDFFILE_INTPDOS
# Delete postscript file to keep clutter down
rm $OUTPUTFILE_INTPDOS
# PLOT 5: Zoomed integrated PDOS wrt Energy
# -----------------------------------------
INPUTFILE_INTPDOSZOOM="../../Output/$FILE_NAME\_dos.dat"
OUTPUTFILE_INTPDOSZOOM="../IntPDOSZoom_$FILE_NAME.ps"
PDFFILE_INTPDOSZOOM="../IntPDOSZoom_$FILE_NAME.pdf"
BATCHFILE_INTPDOSZOOM="IntPDOSZoom.bfile"
# Change PeakSub batchfile
awk -v VAR1=\"$INPUTFILE_INTPDOSZOOM\" '/READ XY/ {$3 = VAR1} {print $0}' $BATCHFILE_INTPDOSZOOM > input_file.tmp && mv input_file.tmp $BATCHFILE_INTPDOSZOOM
awk -v VAR1=\"$OUTPUTFILE_INTPDOSZOOM\" '/PRINT TO/ {$3 = VAR1} {print $0}' $BATCHFILE_INTPDOSZOOM > input_file.tmp && mv input_file.tmp $BATCHFILE_INTPDOSZOOM
# Create the postscript file
xmgrace -batch IntPDOSZoom.bfile -nosafe -hardcopy
# Convert postscript file to pdf
pstopdf $OUTPUTFILE_INTPDOSZOOM
# Eliminate whitespace around pdf (pdfcrop input.pdf output.pdf, overrides file)
pdfcrop $PDFFILE_INTPDOSZOOM $PDFFILE_INTPDOSZOOM
# Delete postscript file to keep clutter down
rm $OUTPUTFILE_INTPDOSZOOM
|
#!/bin/bash -ev
#
# Installation Script
# Written by: Tommy Lincoln <pajamapants3000@gmail.com>
# Github: https://github.com/pajamapants3000
# Legal: See LICENSE in parent directory
#
#
# Dependencies
#**************
# Begin Required
#gtk+-3.16.6
# End Required
# Begin Recommended
#gobject_introspection-1.44.0
# End Recommended
# Begin Optional
#vala-0.28.1
#glade
#gtk_doc-1.24
# End Optional
# Begin Kernel
# End Kernel
#
# Installation
#**************
# Check for previous installation:
PROCEED="yes"
REINSTALL=0
grep gtksourceview-3.16.1 /list-$CHRISTENED"-"$SURNAME > /dev/null && ((\!$?)) &&\
REINSTALL=1 && echo "Previous installation detected, proceed?" && read PROCEED
[ $PROCEED = "yes" ] || [ $PROCEED = "y" ] || exit 0
# Download:
wget http://ftp.gnome.org/pub/gnome/sources/gtksourceview/3.16/gtksourceview-3.16.1.tar.xz
# FTP/alt Download:
#wget ftp://ftp.gnome.org/pub/gnome/sources/gtksourceview/3.16/gtksourceview-3.16.1.tar.xz
#
# md5sum:
echo "e727db8202d23a54b54b69ebc66f5331 gtksourceview-3.16.1.tar.xz" | md5sum -c ;\
( exit ${PIPESTATUS[0]} )
#
tar -xvf gtksourceview-3.16.1.tar.xz
cd gtksourceview-3.16.1
./configure --prefix=/usr
make
# Test (must be in graphical environment):
make check
#
as_root make install
cd ..
as_root rm -rf gtksourceview-3.16.1
#
# Add to installed list for this computer:
echo "gtksourceview-3.16.1" >> /list-$CHRISTENED"-"$SURNAME
#
################################################### |
package zdebug
import (
"fmt"
"testing"
)
func TestPrintStack(t *testing.T) {
func() {
PrintStack()
}()
}
func TestLoc(t *testing.T) {
func() {
fmt.Println(Loc(0))
fmt.Println(Loc(1))
fmt.Println(Loc(2))
}()
}
|
<reponame>JasonLiu798/javautil
package com.atjl.dbservice.util;
import com.atjl.common.constant.CommonConstant;
import com.atjl.dbservice.api.domain.DataCpConfig;
import com.atjl.util.character.StringCheckUtil;
import com.atjl.util.character.StringUtil;
import com.atjl.util.collection.CollectionUtil;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class DataFilterUtil {
/**
* 内存过滤重复数据
*/
public static List<Map> filterDuplicate(DataCpConfig config, List<Map> l) {
if (CollectionUtil.isEmpty(l)) {
return l;
}
List<Map> res = new ArrayList<>();
Map<String, Map> noDuplicateMap = new HashMap<>();
for (Map item : l) {
String pk = DataFieldUtil.getPkValues(item, config);
Map existItem = noDuplicateMap.get(pk);
if (existItem != null) {
if (config.getRawDataDuplicateCheck().keepWhich(existItem, item)) {
item = existItem;
}
}
noDuplicateMap.put(pk, item);
}
for (Map.Entry<String, Map> entry : noDuplicateMap.entrySet()) {
res.add(entry.getValue());
}
return res;
}
public static boolean loadTmDftChecker(Map raw, Map tgt) {
String rawStr = StringUtil.getEmptyString(raw.get("load_tm"));
String tgtStr = StringUtil.getEmptyString(tgt.get("LOAD_TM"));
if (rawStr.compareTo(tgtStr) <= 0) {//原始表值 小于 目标表值,不更新
return false;
}
return true;
}
public static boolean isModifyChecker(Map tgt) {
String modify = StringUtil.getEmptyString(tgt.get("IS_MODIFY"));
if (StringCheckUtil.equal(modify, CommonConstant.YES)) {
return false;
}
return true;
}
public static boolean isAllEqual(Map<String, String> pkMapping, Map raw, Map tgt) {
List<Boolean> result = new ArrayList<>();
for (Map.Entry<String, String> entry : pkMapping.entrySet()) {
boolean find = false;
if (StringCheckUtil.equal(String.valueOf(raw.get(entry.getKey())), String.valueOf(tgt.get(entry.getValue())))) {
find = true;
}
result.add(find);
}
boolean existNotEqual = false;
for (Boolean r : result) {
if (!r) {
existNotEqual = true;
}
}
return !existNotEqual;
}
/**
* 过滤 load_tm 小于等于的
*
public static boolean canUpdate(Map rawData, Map tgtData, DbTableTransferConfig config) {
Map<String, String> cols = config.getNoUpdateCheckMapping();
if (CollectionUtil.isEmpty(cols)) {
return true;
}
for (Map.Entry<String, String> fc : cols.entrySet()) {
String rawCol = fc.getKey();
String tgtCol = fc.getValue();
String rawStr = StringUtil.getEmptyString(rawData.get(rawCol));
String tgtStr = StringUtil.getEmptyString(tgtData.get(tgtCol));
if (rawStr.compareTo(tgtStr) <= 0) {//原始表值 小于 目标表值,不更新
return false;
}
}
return true;
}*/
}
|
#!/bin/bash
failed_any=0
diff_output_and_report() {
diff $1 $2 >/dev/null
if [ $? != 0 ]; then
printf "\t\x1b[31m%s\x1b[0m\n" "FAILED test $3!"
failed_any=1
else
printf "\tPASSED test $3!\n"
fi
}
diff_output_and_report2() {
diff $1 $3 >/dev/null
if [ $? != 0 ]; then
printf "\t\x1b[31m%s\x1b[0m\n" "FAILED test $5!"
else
diff $2 $4 >/dev/null
if [ $? != 0 ]; then
printf "\t\x1b[31m%s\x1b[0m\n" "FAILED test $5!"
failed_any=1
else
printf "\tPASSED test $5!\n"
fi
fi
}
print_test_header() {
echo "Testing..." $1
}
print_test_header "search for sequences using the different regex engines"
../fxtract CAAAGGGATTGAGACGCCACTT 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 1a
../fxtract -G CAAAGGGATTGAGACGCCACTT 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 1b
../fxtract -E CAAAGGGATTGAGACGCCACTT 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 1c
../fxtract -P CAAAGGGATTGAGACGCCACTT 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 1d
print_test_header "search for headers"
../fxtract -H "HISEQ2000:55:C0JRTACXX:2:1101:11128:12710_1:N:0:CTTGTAAT" 1.fa > 2.output.fa
diff_output_and_report 2.output.fa 2.expected.fa 2a
../fxtract -HG "HISEQ2000:55:C0JRTACXX:2:1101:11128:12710_1:N:0:CTTGTAAT" 1.fa > 2.output.fa
diff_output_and_report 2.output.fa 2.expected.fa 2b
../fxtract -HE "HISEQ2000:55:C0JRTACXX:2:1101:11128:12710_1:N:0:CTTGTAAT" 1.fa > 2.output.fa
diff_output_and_report 2.output.fa 2.expected.fa 2c
../fxtract -HP "HISEQ2000:55:C0JRTACXX:2:1101:11128:12710_1:N:0:CTTGTAAT" 1.fa > 2.output.fa
diff_output_and_report 2.output.fa 2.expected.fa 2d
../fxtract -HX "HISEQ2000:55:C0JRTACXX:2:1101:11128:12710_1:N:0:CTTGTAAT" 1.fa > 2.output.fa
diff_output_and_report 2.output.fa 2.expected.fa 2e
../fxtract -HX 1660334 12.fa >2f.output.fa
diff_output_and_report 2f.output.fa 12.expected.fa 2f
../fxtract -rHX 1660334 12.fa >2g.output.fa
diff_output_and_report 2g.output.fa 2g.expected.fa 2g
print_test_header "multipattern search"
../fxtract -Hf headers.txt 1.fa > 3.output.fa
diff_output_and_report 3.output.fa 3.expected.fa 3
print_test_header "paired reads"
../fxtract -H "HSQ868392H08B7ADXX:2:1112:8977:35114" 4_1.fa 4_2.fa > 4.output.fa
diff_output_and_report 4.output.fa 4.expected.fa 4
print_test_header "search in comment strings"
../fxtract -C "Accumulibacter" 5.fa > 5.output.fa
diff_output_and_report 5.output.fa 5.expected.fa 5
print_test_header "inverted match"
../fxtract -Hv 647449011 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6a
../fxtract -HvX 647449011 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6b
../fxtract -HvP 647449011 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6c
../fxtract -HvG 647449011 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6d
../fxtract -HvE 647449011 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6e
../fxtract -Hvf <(echo 647449011) 11.fa > 11.output.fa
diff_output_and_report 11.output.fa 11.expected.fa 6f
print_test_header "count the matches"
../fxtract -Hc HISEQ2000 1.fa > 7.output.fa
diff_output_and_report 7.output.fa 7.expected.fa 7
print_test_header "test out different fasta file styles"
../fxtract -H 1101:11128:12710 8.fa >8.output.fa
diff_output_and_report 8.output.fa 8.expected.fa 8
../fxtract -H 1101:11128:12710 9.fa >9.output.fa
diff_output_and_report 9.output.fa 8.expected.fa 9
print_test_header "compressed files"
gzip 8.fa
../fxtract -zH 1101:11128:12710 8.fa.gz >8.output.fa
diff_output_and_report 8.output.fa 8.expected.fa 10a
gunzip 8.fa.gz
gzip 4_1.fa 4_2.fa
../fxtract -Hz "HSQ868392H08B7ADXX:2:1112:8977:35114" 4_1.fa.gz 4_2.fa.gz > 4.output.fa
diff_output_and_report 4.output.fa 4.expected.fa 10c
gunzip 4_1.fa.gz 4_2.fa.gz
print_test_header "multiple files"
../fxtract -CS Accumulibacter 1.fa 11.fa 5.fa > 12.output.fa
diff_output_and_report 12.output.fa 5.expected.fa 11
print_test_header "multiple outputs"
../fxtract -HXf headers2.txt 1.fa
diff_output_and_report2 14_out_1.fasta 14_out_2.fasta 14_1.expected 14_2.expected 12a
../fxtract -Hf headers2.txt 1.fa
diff_output_and_report2 14_out_1.fasta 14_out_2.fasta 14_1.expected 14_2.expected 12b
print_test_header "proper comment printing"
../fxtract CAAAGGGATTGAGACGCCACTT 13.fa > 13.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 13a
../fxtract -G CAAAGGGATTGAGACGCCACTT 13.fa > 13.output.fa
diff_output_and_report 13.output.fa 13.expected.fa 13b
../fxtract -E CAAAGGGATTGAGACGCCACTT 13.fa > 13.output.fa
diff_output_and_report 13.output.fa 13.expected.fa 13c
../fxtract -P CAAAGGGATTGAGACGCCACTT 13.fa > 13.output.fa
diff_output_and_report 13.output.fa 13.expected.fa 13d
../fxtract -HX HWI-ST1243:175:C29BRACXX:4:1101:15034:30425 15.fq > 15.output.fq
diff_output_and_report 15.output.fq 15.expected.fq 13e
print_test_header "search for sequences using the reverse complement with different regex engines"
../fxtract -r AAGTGGCGTCTCAATCCCTTTG 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 14a
../fxtract -rX TCGAACGTCGCAAAGACTCGCACCCTCGCTGCGAACGACACGTCTCAATCCCTTTGAATTCAGGGCATCAGTTCGAACTGGAGCAGTACGACCACGTTGATCTGAAGTGGCGTCTCAATCCCTTTGAATTCAGGGCATCAGTTCGAACGG 1.fa > 13b.output.fa
diff_output_and_report 13b.output.fa 13b.expected.fa 14b
../fxtract -rf <(echo AAGTGGCGTCTCAATCCCTTTG) 1.fa > 1.output.fa
diff_output_and_report 1.output.fa 1.expected.fa 14c
../fxtract -X TCGAACGTCGCAAAGACTCGCACCCTCGCTGCGAACGACACGTCTCAATCCCTTTGAATTCAGGGCATCAGTTCGAACTGGAGCAGTACGACCACGTTGATCTGAAGTGGCGTCTCAATCCCTTTGAATTCAGGGCATCAGTTCGAACGG 1.fa > 13b.output.fa
diff_output_and_report 13b.output.fa empty.fa 14d
../fxtract -f <(echo AAGTGGCGTCTCAATCCCTTTG) 1.fa > 13e.output.fa
diff_output_and_report 13e.output.fa empty.fa 14e
exit $failed_any
|
FUNCTION multiplyBy2 (LIST aList)
FOR every element in aList
aList[element] *= 2
END FOR
END FUNCTION |
import React from 'react'
import { FilePond, File, registerPlugin } from 'react-filepond'
import FilePondPluginFileValidateType from 'filepond-plugin-file-validate-type'
import FilePondPluginImageExifOrientation from 'filepond-plugin-image-exif-orientation'
import FilePondPluginImagePreview from 'filepond-plugin-image-preview'
import 'filepond-plugin-image-preview/dist/filepond-plugin-image-preview.css'
import 'filepond/dist/filepond.css'
registerPlugin(
FilePondPluginImageExifOrientation,
FilePondPluginImagePreview,
FilePondPluginFileValidateType
)
class FileUploader extends React.Component {
constructor (props) {
super(props)
this.state = {
files: [],
media: [],
id: ''
}
this.handleRevert = this.handleRevert.bind(this)
}
handleRevert () {
fetch('https://video.laaksonen.me/api/videos/' + this.state.id, {
method: 'delete'
}).then(() => {
this.setState(prevState => ({
media: prevState.media.filter(value => value._id !== this.state.id)
}))
})
}
componentDidMount () {
fetch('https://video.laaksonen.me/api/videos')
.then(response => response.json())
.then(json => {
this.setState({ media: json })
})
}
render () {
return (
<div style={{ width: '50%', margin: 'auto', marginTop: '2em' }}>
<FilePond
ref={ref => {
this.pond = ref
}}
allowMultiple={true}
acceptedFileTypes={['video/mp4', 'video/webm']}
server={{
url: 'https://video.laaksonen.me/api/videos/'
}}
onupdatefiles={fileItems => {
this.setState({
files: fileItems.map(fileItem => fileItem.file)
})
}}
>
{this.state.files.map(file => (
<File key={file} src={file} origin="local" />
))}
</FilePond>
</div>
)
}
}
export default FileUploader
|
<reponame>Ciip1996/OsxJugueteria
//
// AdministradorVC.h
// Jugueteria_OSX
//
// Created by <NAME> on 14/06/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "ManejadorSQLite.h"
@interface AdministradorVC : NSViewController{
ManejadorSQLite *msqlite;
AppDelegate *appdelegate;
}
@property (weak) IBOutlet NSDatePicker *FechaNacimiento;
@property (weak) IBOutlet NSTextField *txtNombre;
@property (weak) IBOutlet NSTextField *txtPaterno;
@property (weak) IBOutlet NSTextField *txtMaterno;
@property (weak) IBOutlet NSTextField *txtCurp;
@property (weak) IBOutlet NSTextField *txtRFC;
@property (weak) IBOutlet NSPopUpButton *popUpBtnGenero;
@property (weak) IBOutlet NSTextField *txtClave;
@property (weak) IBOutlet NSTextField *txtConfirmarClave;
@property (weak) IBOutlet NSTextField *txtSalario;
@property (weak) IBOutlet NSPopUpButton *popUpBtnRol;
- (IBAction)OnCrearUsuarioNuevo:(id)sender;
- (IBAction)CerrarVC:(id)sender;
@end
|
<filename>ajax/endpoints.py<gh_stars>1-10
from django.core import serializers
from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson as json
from django.utils.encoding import smart_str
from django.utils.translation import ugettext_lazy as _
from django.db.models.fields import FieldDoesNotExist
from ajax.decorators import require_pk
from ajax.exceptions import AJAXError, AlreadyRegistered, NotRegistered, \
PrimaryKeyMissing
from ajax.encoders import encoder
from taggit.utils import parse_tags
class ModelEndpoint(object):
_value_map = {
'false': False,
'true': True,
'null': None
}
immutable_fields = [] # List of model fields that are not writable.
def __init__(self, application, model, method, **kwargs):
self.application = application
self.model = model
self.fields = [f.name for f in self.model._meta.fields]
self.method = method
self.pk = kwargs.get('pk', None)
self.options = kwargs
def create(self, request):
record = self.model(**self._extract_data(request))
if self.can_create(request.user, record):
record = self._save(record)
try:
tags = self._extract_tags(request)
record.tags.set(*tags)
except KeyError:
pass
return encoder.encode(record)
else:
raise AJAXError(403, _("Access to endpoint is forbidden"))
def tags(self, request):
cmd = self.options.get('taggit_command', None)
if not cmd:
raise AJAXError(400, _("Invalid or missing taggit command."))
record = self._get_record()
if cmd == 'similar':
result = record.tags.similar_objects()
else:
try:
tags = self._extract_tags(request)
getattr(record.tags, cmd)(*tags)
except KeyError:
pass # No tags to set/manipulate in this request.
result = record.tags.all()
return encoder.encode(result)
def _save(self, record):
try:
record.full_clean()
record.save()
return record
except ValidationError, e:
raise AJAXError(400, _("Could not save model."),
errors=e.message_dict)
@require_pk
def update(self, request):
record = self._get_record()
if self.can_update(request.user, record):
for key, val in self._extract_data(request).iteritems():
setattr(record, key, val)
self._save(record)
try:
tags = self._extract_tags(request)
if tags:
record.tags.set(*tags)
else:
# If tags were in the request and set to nothing, we will
# clear them all out.
record.tags.clear()
except KeyError:
pass
return encoder.encode(record)
else:
raise AJAXError(403, _("Access to endpoint is forbidden"))
@require_pk
def delete(self, request):
record = self._get_record()
if self.can_delete(request.user, record):
record.delete()
return {'pk': int(self.pk)}
else:
raise AJAXError(403, _("Access to endpoint is forbidden"))
@require_pk
def get(self, request):
record = self._get_record()
if self.can_get(request.user, record):
return encoder.encode(record)
else:
raise AJAXError(403, _("Access to endpoint is forbidden"))
def _extract_tags(self, request):
# We let this throw a KeyError so that calling functions will know if
# there were NO tags in the request or if there were, but that the
# call had an empty tags list in it.
raw_tags = request.POST['tags']
tags = []
if raw_tags:
try:
tags = [t for t in parse_tags(raw_tags) if len(t)]
except Exception, e:
pass
return tags
def _extract_data(self, request):
"""Extract data from POST.
Handles extracting a vanilla Python dict of values that are present
in the given model. This also handles instances of ``ForeignKey`` and
will convert those to the appropriate object instances from the
database. In other words, it will see that user is a ``ForeignKey`` to
Django's ``User`` class, assume the value is an appropriate pk, and
load up that record.
"""
data = {}
for field, val in request.POST.iteritems():
if field in self.immutable_fields:
continue # Ignore immutable fields silently.
if field in self.fields:
f = self.model._meta.get_field(field)
val = self._extract_value(val)
if val and isinstance(f, models.ForeignKey):
data[smart_str(field)] = f.rel.to.objects.get(pk=val)
else:
data[smart_str(field)] = val
return data
def _extract_value(self, value):
"""If the value is true/false/null replace with Python equivalent."""
return ModelEndpoint._value_map.get(smart_str(value).lower(), value)
def _get_record(self):
"""Fetch a given record.
Handles fetching a record from the database along with throwing an
appropriate instance of ``AJAXError`.
"""
if not self.pk:
raise AJAXError(400, _('Invalid request for record.'))
try:
return self.model.objects.get(pk=self.pk)
except self.model.DoesNotExist:
raise AJAXError(404, _('%s with id of "%s" not found.') % (
self.model.__name__, self.pk))
def can_get(self, user, record):
return True
def _user_is_active_or_staff(self, user, record):
return ((user.is_authenticated() and user.is_active) or user.is_staff)
can_create = _user_is_active_or_staff
can_update = _user_is_active_or_staff
can_delete = _user_is_active_or_staff
def authenticate(self, request, application, method):
"""Authenticate the AJAX request.
By default any request to fetch a model is allowed for any user,
including anonymous users. All other methods minimally require that
the user is already logged in.
Most likely you will want to lock down who can edit and delete various
models. To do this, just override this method in your child class.
"""
if request.user.is_authenticated():
return True
return False
class FormEndpoint(object):
"""AJAX endpoint for processing Django forms.
The models and forms are processed in pretty much the same manner, only a
form class is used rather than a model class.
"""
def create(self, request):
form = self.model(request.POST)
if form.is_valid():
model = form.save()
if hasattr(model, 'save'):
# This is a model form so we save it and return the model.
model.save()
return encoder.encode(model)
else:
return model # Assume this is a dict to encode.
else:
return encoder.encode(form.errors)
def update(self, request):
raise AJAXError(404, _("Endpoint does not exist."))
delete = update
get = update
class Endpoints(object):
def __init__(self):
self._registry = {}
def register(self, model, endpoint):
if model in self._registry:
raise AlreadyRegistered()
self._registry[model] = endpoint
def unregister(self, model):
if model not in self._registry:
raise NotRegistered()
del self._registry[model]
def load(self, model_name, application, method, **kwargs):
for model in self._registry:
if model.__name__.lower() == model_name:
return self._registry[model](application, model, method,
**kwargs)
raise NotRegistered()
|
import numpy as np
A = np.array([1, 2, 3, 4, 5, 6, 7, 8])
B = A.reshape((2, -1))
print(B) |
let Stack = function() {
// Hey! Rewrite in the new style. Your code will wind up looking very similar,
// but try not not reference your old code in writing the new style.
let someInstance = {
length: 0,
storage: {}
};
_.extend(someInstance, stackMethods);
return someInstance;
};
let stackMethods = {
push: function(value) {
this.storage[this.length] = value;
this.length++;
},
pop: function() {
this.length && this.length--;
return this.storage[this.length];
},
size: function() {
return this.length;
}
};
|
#!/bin/sh
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# This simple script checks for commits beginning with: scripted-diff:
# If found, looks for a script between the lines -BEGIN VERIFY SCRIPT- and
# -END VERIFY SCRIPT-. If no ending is found, it reads until the end of the
# commit message.
# The resulting script should exactly transform the previous commit into the current
# one. Any remaining diff signals an error.
export LC_ALL=C
if test "x$1" = "x"; then
echo "Usage: $0 <commit>..."
exit 1
fi
RET=0
PREV_BRANCH=$(git name-rev --name-only HEAD)
PREV_HEAD=$(git rev-parse HEAD)
for commit in $(git rev-list --reverse $1); do
if git rev-list -n 1 --pretty="%s" $commit | grep -q "^scripted-diff:"; then
git checkout --quiet $commit^ || exit
SCRIPT="$(git rev-list --format=%b -n1 $commit | sed '/^-BEGIN VERIFY SCRIPT-$/,/^-END VERIFY SCRIPT-$/{//!b};d')"
if test "x$SCRIPT" = "x"; then
echo "Error: missing script for: $commit"
echo "Failed"
RET=1
else
echo "Running script for: $commit"
echo "$SCRIPT"
(eval "$SCRIPT")
git --no-pager diff --exit-code $commit && echo "OK" || (echo "Failed"; false) || RET=1
fi
git reset --quiet --hard HEAD
else
if git rev-list "--format=%b" -n1 $commit | grep -q '^-\(BEGIN\|END\)[ a-zA-Z]*-$'; then
echo "Error: script block marker but no scripted-diff in title"
echo "Failed"
RET=1
fi
fi
done
git checkout --quiet $PREV_BRANCH 2>/dev/null || git checkout --quiet $PREV_HEAD
exit $RET
|
import kivy
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.screenmanager import ScreenManager
class Calculator(ScreenManager):
def do_calculation(self, instance):
x = self.ids.input1.text
y = self.ids.input2.text
value = 0
if instance.text == "+":
value = float(x) + float(y)
elif instance.text == "-":
value = float(x) - float(y)
elif instance.text == "*":
value = float(x) * float(y)
elif instance.text == "/":
value = float(x) / float(y)
self.ids.result.text = str(value)
class CalculatorApp(App):
def build(self):
return Calculator()
if __name__ == '__main__':
CalculatorApp().run() |
<reponame>jrfaller/maracas<gh_stars>1-10
package main.test.classRemoved;
public class ClassRemoved {
public int field;
public int method() {
return 90;
}
}
|
import '@babel/polyfill';
import dotenv from 'dotenv';
import 'isomorphic-fetch';
import createShopifyAuth, { verifyRequest } from '@shopify/koa-shopify-auth';
import graphQLProxy, { ApiVersion } from '@shopify/koa-shopify-graphql-proxy';
import Koa from 'koa';
import next from 'next';
import Router from 'koa-router';
import session from 'koa-session';
import {
DeliveryMethod,
Options,
receiveWebhook
} from '@shopify/koa-shopify-webhooks';
import addWebhook from 'src/server/utils/addWebhook';
dotenv.config();
const port = parseInt(process.env.PORT, 10) || 8081;
const dev = process.env.NODE_ENV !== 'production';
const app = next({
dev,
});
const handle = app.getRequestHandler();
const {
SHOPIFY_API_SECRET,
SHOPIFY_API_KEY,
SCOPES,
HOST,
} = process.env;
app.prepare().then(() => {
const server = new Koa();
const router = new Router();
server.use(
session(
{
sameSite: 'none',
secure: true,
},
server
)
);
server.keys = [SHOPIFY_API_SECRET];
server.use(
createShopifyAuth({
apiKey: SHOPIFY_API_KEY,
secret: SHOPIFY_API_SECRET,
scopes: [SCOPES],
async afterAuth(ctx) {
let { shop, accessToken, scopes } = ctx.session;
// Access token and shop available in ctx.state.shopify also??
if (!shop) {
shop = ctx.state.shopify.shop || new URLSearchParams(ctx.request.url).get('shop');
}
ctx.cookies.set('shopOrigin', shop, {
httpOnly: false,
secure: true,
sameSite: 'none'
});
console.log('accessToken', accessToken);
console.log('scopes', scopes);
const productsCreateWebhookOptions: Options = {
address: `${HOST}/webhooks/products/create`,
topic: 'PRODUCTS_CREATE',
accessToken,
shop,
apiVersion: ApiVersion.July20,
deliveryMethod: DeliveryMethod.Http,
}
await addWebhook(productsCreateWebhookOptions);
// Redirect to app with shop parameter upon auth
ctx.redirect(`/?shop=${shop}`);
},
})
);
const webhook = receiveWebhook({ secret: SHOPIFY_API_SECRET });
router.post('/webhooks/products/create', webhook, (ctx) => {
console.log('received webhook: ', ctx.state.webhook);
});
router.post('/webhooks/carrier_services', webhook, (ctx) => {
console.log('received webhook: ', ctx.state.webhook);
});
server.use(
graphQLProxy({
version: ApiVersion.October19,
})
);
router.get('(.*)', verifyRequest(), async (ctx) => {
await handle(ctx.req, ctx.res);
ctx.respond = false;
ctx.res.statusCode = 200;
});
server.use(router.allowedMethods());
server.use(router.routes());
server.listen(port, () => {
console.log(`> Ready on http://localhost:${port}`);
});
});
|
<gh_stars>0
import React from "react";
import { useFormContext } from "react-hook-form";
import { ErrorMessage } from "@hookform/error-message";
import {
StyledContainer,
StyledFlex,
StyledLabel,
StyledAsterisk,
StyledAlertText
} from "./styles";
const withInputWrapper = WrappedComponent => props => {
const {
id,
name,
readOnly,
required,
alertText,
errorName,
fullWidth,
alertStyle,
label = "",
containerStyle = {}
} = props;
const { errors } = useFormContext();
return (
<StyledContainer fullWidth={fullWidth} style={containerStyle}>
<StyledFlex>
<StyledLabel htmlFor={id}>
{label}
{!readOnly && required && (
<StyledAsterisk aria-label="required">*</StyledAsterisk>
)}
</StyledLabel>
<ErrorMessage
errors={errors}
name={errorName || name}
render={({ message }) => (
<StyledAlertText style={alertStyle}>
{alertText || message}
</StyledAlertText>
)}
/>
</StyledFlex>
<WrappedComponent {...props} />
</StyledContainer>
);
};
export default withInputWrapper;
|
'use strict';
const {app, clipboard, dialog, shell} = require('electron');
const os = require('os');
const {activate} = require('./win');
const {release} = require('./url');
const file = require('./file');
const settings = require('./settings');
class Dialog {
get _systemInfo() {
return [
`版本: ${app.getVersion()}`,
`系統: ${os.type()} ${os.arch()} ${os.release()}`
].join('\n');
}
_about() {
return this._create({
buttons: ['好嘞!', '复制'],
detail: `Developed by <NAME>.\n由Kevin Wang维护.\n联系方式:<EMAIL>.\n本软件永久免费\n\n${this._systemInfo}`,
message: `Ao ${app.getVersion()} (${os.arch()})`,
title: '关于 Ao'
});
}
_create(options) {
return dialog.showMessageBox(
Object.assign({
cancelId: 1,
defaultId: 0,
icon: file.icon
}, options)
);
}
_exit() {
return this._create({
buttons: ['退出', '取消'],
detail: '你确定你想要退出码?',
message: '退出',
title: 'Ao - 退出确认'
});
}
_signOut() {
return this._create({
buttons: ['注销', '取消'],
detail: '你确定要登出你的微软账户登录吗?',
message: '登出',
title: 'Ao - 登出确认'
});
}
_restart() {
return this._create({
buttons: ['重新启动', '取消'],
detail: '你确定你想要重启软件以更新设置码?',
message: '重启以更新选定的设置',
title: 'Ao - 需要登出'
});
}
_update(version) {
return this._create({
buttons: ['下载', '忽略'],
detail: `发现新版本!!!!`,
message: `${version} 可以更新啦`,
title: `Ao - 发现新版本:${version}`
});
}
confirmAbout() {
if (this._about() === 1) {
clipboard.writeText(this._systemInfo);
}
}
confirmExit() {
if (settings.get('requestExitConfirmation')) {
if (this._exit() === 0) {
app.quit();
}
} else {
app.quit();
}
}
confirmActivationRestart(option, state) {
if (this._restart() === 0) {
settings.set(option, state);
app.quit();
app.relaunch();
}
}
confirmSignOut() {
if (this._signOut() === 0) {
activate('sign-out');
}
}
updateError(content) {
return dialog.showErrorBox('获取最新版本失败!', content);
}
noUpdate() {
return this._create({
buttons: ['好嘞!'],
detail: `软件暂无新版本`,
message: '软件暂无新版本',
title: 'Ao - 无更新可用'
});
}
getUpdate(version) {
if (this._update(version) === 0) {
shell.openExternal(release);
}
}
}
module.exports = new Dialog();
|
<gh_stars>1-10
import { CommandArgs, SuiteStats, TestStats } from '@wdio/reporter'
import AllureReporter from '../src'
import { linkPlaceholder } from '../src/constants'
let processOn: any
beforeAll(() => {
processOn = process.on.bind(process)
process.on = jest.fn()
})
afterAll(() => {
process.on = processOn
})
describe('reporter runtime implementation', () => {
it('should correct add custom label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addLabel({ name: 'customLabel', value: 'Label' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('customLabel', 'Label')
})
it('should correct add story label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addStory({ storyName: 'foo' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('story', 'foo')
})
it('should correct add feature label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addFeature({ featureName: 'foo' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('feature', 'foo')
})
it('should correct add severity label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addSeverity({ severity: 'foo' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('severity', 'foo')
})
it('should correctly add issue label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addIssue({ issue: '1' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('issue', '1')
})
it('should correctly add issue label with link', () => {
const reporter = new AllureReporter({ issueLinkTemplate: `http://example.com/${linkPlaceholder}` })
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addIssue({ issue: '1' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('issue', 'http://example.com/1')
})
it('should correctly add test id label', () => {
const reporter = new AllureReporter()
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addTestId({ testId: '2' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('testId', '2')
})
it('should correctly add test id label with link', () => {
const reporter = new AllureReporter({ tmsLinkTemplate: `https://webdriver.io/${linkPlaceholder}` })
const addLabel = jest.fn()
const mock = jest.fn(() => {
return { addLabel }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addTestId({ testId: '2' })
expect(addLabel).toHaveBeenCalledTimes(1)
expect(addLabel).toHaveBeenCalledWith('testId', 'https://webdriver.io/2')
})
it('should correct add environment', () => {
const reporter = new AllureReporter()
const addParameter = jest.fn()
const mock = jest.fn(() => {
return { addParameter }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addEnvironment({ name: 'foo', value: 'bar' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('environment-variable', 'foo', 'bar')
})
it('should correct add description', () => {
const reporter = new AllureReporter()
const setDescription = jest.fn()
const mock = jest.fn(() => {
return { setDescription }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addDescription({ description: 'foo', descriptionType: 'bar' })
expect(setDescription).toHaveBeenCalledTimes(1)
expect(setDescription).toHaveBeenCalledWith('foo', 'bar')
})
it('should correct add attachment', () => {
const reporter = new AllureReporter()
const addAttachment = jest.fn()
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => true),
getCurrentTest: jest.fn(() => true),
addAttachment
} as any
reporter.addAttachment({ name: 'foo', content: 'bar', type: 'baz' })
expect(addAttachment).toHaveBeenCalledTimes(1)
expect(addAttachment).toHaveBeenCalledWith('foo', Buffer.from('bar'), 'baz')
})
it('should correct add "application/json" attachment', () => {
const reporter = new AllureReporter()
const dumpJSON = jest.fn()
reporter.dumpJSON = dumpJSON
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => true),
getCurrentTest: jest.fn(() => true),
} as any
reporter.addAttachment({ name: 'foo', content: 'bar', type: 'application/json' })
expect(dumpJSON).toHaveBeenCalledWith('foo', 'bar')
})
it('should allow to start end step', () => {
const reporter = new AllureReporter()
const startStep = jest.fn()
const endStep = jest.fn()
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => true),
getCurrentTest: jest.fn(() => true),
startStep,
endStep
} as any
reporter.startStep('bar')
reporter.endStep('failed')
expect(startStep).toHaveBeenCalledTimes(1)
expect(endStep).toHaveBeenCalledTimes(1)
expect(startStep).toHaveBeenCalledWith('bar')
expect(endStep).toHaveBeenCalledWith('failed')
})
it('should correct add step with attachment', () => {
const reporter = new AllureReporter()
const startStep = jest.fn()
const endStep = jest.fn()
const addAttachment = jest.fn()
reporter.addAttachment = addAttachment
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => true),
getCurrentTest: jest.fn(() => true),
startStep,
endStep
} as any
const step = {
'step': {
'attachment': { 'content': 'baz', 'name': 'attachment' },
'status': 'passed',
'title': 'foo'
}
}
reporter.addStep(step)
expect(startStep).toHaveBeenCalledTimes(1)
expect(endStep).toHaveBeenCalledTimes(1)
expect(addAttachment).toHaveBeenCalledTimes(1)
expect(startStep).toHaveBeenCalledWith(step.step.title)
expect(addAttachment).toHaveBeenCalledWith(step.step.attachment)
expect(endStep).toHaveBeenCalledWith(step.step.status)
})
it('should correct add step without attachment', () => {
const reporter = new AllureReporter()
const startStep = jest.fn()
const endStep = jest.fn()
const addAttachment = jest.fn()
reporter.addAttachment = addAttachment
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => true),
getCurrentTest: jest.fn(() => true),
startStep,
endStep
} as any
const step = { 'step': { 'status': 'passed', 'title': 'foo' } }
reporter.addStep(step)
expect(startStep).toHaveBeenCalledTimes(1)
expect(endStep).toHaveBeenCalledTimes(1)
expect(addAttachment).toHaveBeenCalledTimes(0)
expect(startStep).toHaveBeenCalledWith(step.step.title)
expect(endStep).toHaveBeenCalledWith(step.step.status)
})
it('should correctly add argument', () => {
const reporter = new AllureReporter()
const addParameter = jest.fn()
const mock = jest.fn(() => {
return { addParameter }
})
reporter['_allure'] = {
getCurrentSuite: mock,
getCurrentTest: mock,
} as any
reporter.addArgument({ name: 'os', value: 'osx' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'os', 'osx')
})
it('should do nothing if no tests run', () => {
const reporter = new AllureReporter()
expect(reporter.addLabel({})).toEqual(false)
expect(reporter.addStory({})).toEqual(false)
expect(reporter.addFeature({})).toEqual(false)
expect(reporter.addSeverity({})).toEqual(false)
expect(reporter.addIssue({})).toEqual(false)
expect(reporter.addTestId({})).toEqual(false)
expect(reporter.addEnvironment({})).toEqual(false)
expect(reporter.addDescription({})).toEqual(false)
expect(reporter.addAttachment({})).toEqual(false)
expect(reporter.startStep('test')).toEqual(false)
expect(reporter.endStep('passed')).toEqual(false)
expect(reporter.addStep({})).toEqual(false)
expect(reporter.addArgument({})).toEqual(false)
})
describe('add argument', () => {
let reporter: any, addParameter: any, addLabel: any, mock
beforeEach(() => {
reporter = new AllureReporter()
addParameter = jest.fn()
addLabel = jest.fn()
mock = jest.fn(() => {
return { addParameter, addLabel }
})
reporter['_allure'] = {
startCase: mock,
getCurrentSuite: mock,
getCurrentTest: mock,
}
})
it('should correctly add argument for selenium', () => {
reporter.onRunnerStart({ config: {}, capabilities: { browserName: 'firefox', version: '1.2.3' } })
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'browser', 'firefox-1.2.3')
})
it('should correctly set proper browser version for chrome headless in devtools', () => {
reporter.onRunnerStart({ config: {}, capabilities: { browserName: 'Chrome Headless', browserVersion: '85.0.4183.84' } })
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'browser', 'Chrome Headless-85.0.4183.84')
})
it('should correctly add argument for appium', () => {
reporter.onRunnerStart({ config: {}, capabilities: { deviceName: 'Android Emulator', platformVersion: '8.0' } })
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'device', 'Android Emulator-8.0')
})
it('should correctly add device name when run on BrowserStack', () => {
reporter.onRunnerStart({ config: {}, capabilities: { device: 'Google Pixel 3', platformVersion: '9.0' } })
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'device', 'Google Pixel 3-9.0')
})
it('should correctly add argument for multiremote', () => {
reporter.onRunnerStart({ isMultiremote: true, config: { capabilities: { myBrowser: { browserName: 'chrome' } } } })
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' })
expect(addParameter).toHaveBeenCalledTimes(1)
expect(addParameter).toHaveBeenCalledWith('argument', 'isMultiremote', 'true')
})
})
})
describe('auxiliary methods', () => {
it('isScreenshotCommand', () => {
const reporter = new AllureReporter()
expect(reporter.isScreenshotCommand({ endpoint: '/session/id/screenshot' } as CommandArgs)).toEqual(true)
expect(reporter.isScreenshotCommand({ endpoint: '/wdu/hub/session/id/screenshot' } as CommandArgs)).toEqual(true)
expect(reporter.isScreenshotCommand({ endpoint: '/session/id/click' } as CommandArgs)).toEqual(false)
expect(reporter.isScreenshotCommand({ command: 'takeScreenshot' } as CommandArgs)).toEqual(true)
expect(reporter.isScreenshotCommand({ command: 'elementClick' } as CommandArgs)).toEqual(false)
expect(reporter.isScreenshotCommand({ endpoint: '/session/id/element/id/screenshot' } as CommandArgs)).toEqual(true)
})
it('dumpJSON', () => {
const reporter = new AllureReporter()
const addAttachment = jest.fn()
reporter['_allure'] = {
addAttachment
} as any
const json = { bar: 'baz' }
reporter.dumpJSON('foo', json)
expect(addAttachment).toHaveBeenCalledTimes(1)
expect(addAttachment).toHaveBeenCalledWith('foo', JSON.stringify(json, null, 2), 'application/json')
})
it('should populate the correct deviceName', () => {
const capabilities = {
deviceName: 'emulator',
desired: {
platformName: 'Android',
automationName: 'UiAutomator2',
deviceName: 'Android GoogleAPI Emulator',
platformVersion: '6.0',
noReset: true,
}
}
const reporter = new AllureReporter()
const currentTestMock = { addParameter: jest.fn(), addLabel: jest.fn() }
reporter['_allure'].getCurrentTest = jest.fn().mockReturnValue(currentTestMock)
reporter['_allure'].startCase = jest.fn()
reporter['_isMultiremote'] = false
reporter['_capabilities'] = capabilities
reporter.onTestStart({ cid: '0-0', title: 'SomeTest' } as TestStats)
expect(reporter['_allure'].getCurrentTest).toBeCalledTimes(1)
expect(currentTestMock.addParameter).toHaveBeenCalledWith('argument', 'device', 'Android GoogleAPI Emulator 6.0')
})
})
describe('hooks handling disabled Mocha Hooks', () => {
let reporter: any, startCase: any, endCase: any, startStep: any, endStep: any
const allureInstance = ({ suite = {}, test = { steps: [1] } }: any = {}) => ({
getCurrentSuite: jest.fn(() => suite),
getCurrentTest: jest.fn(() => { return test }),
startCase,
endCase,
startStep,
endStep
})
beforeEach(() => {
reporter = new AllureReporter({ disableMochaHooks: true })
reporter.onTestStart = jest.fn(test => startCase(test.title))
startCase = jest.fn()
endCase = jest.fn(result => result)
startStep = jest.fn()
endStep = jest.fn(result => result)
})
it('should add test on custom hook', () => {
reporter['_allure'] = allureInstance()
reporter.onHookStart({ title: 'foo', parent: 'bar' })
expect(startCase).toHaveBeenCalledTimes(1)
expect(startCase).toHaveBeenCalledWith('foo')
expect(startStep).toHaveBeenCalledTimes(0)
})
it('should not add test if no suite', () => {
reporter['_allure'] = allureInstance({ suite: false })
reporter.onHookStart({ title: 'foo', parent: 'bar' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(0)
})
it('should ignore global mocha hooks', () => {
reporter['_allure'] = allureInstance()
reporter.onHookStart({ title: '"after all" hook', parent: '' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(0)
})
it('should capture mocha each hooks', () => {
reporter['_allure'] = allureInstance()
reporter.onHookStart({ title: '"before each" hook', parent: 'foo' })
expect(startStep).toHaveBeenCalledTimes(1)
expect(startCase).toHaveBeenCalledTimes(0)
})
it('should ignore mocha each hooks if no test', () => {
reporter['_allure'] = allureInstance({ test: null })
reporter.onHookStart({ title: '"after each" hook', parent: 'foo' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(0)
})
it('should not end test onHookEnd if no suite', () => {
reporter['_allure'] = allureInstance({ suite: false })
reporter.onHookEnd({ title: 'foo', parent: 'bar' })
expect(endCase).toHaveBeenCalledTimes(0)
})
it('should ignore mocha hook end if no test', () => {
reporter['_allure'] = allureInstance({ test: null })
reporter.onHookEnd({ title: 'foo', parent: 'bar' })
expect(endCase).toHaveBeenCalledTimes(0)
expect(endStep).toHaveBeenCalledTimes(0)
})
it('should ignore global mocha end hooks', () => {
reporter['_allure'] = allureInstance()
reporter.onHookEnd({ title: 'foo' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(0)
})
it('should not pop test case if no steps and before hook', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [] } })
reporter.onHookEnd({ title: '"before all" hook', parent: 'foo' })
expect(endCase).toHaveBeenCalledTimes(0)
expect(testcases).toHaveLength(1)
})
it('should pop test case if no steps and custom hook', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [] } })
reporter.onHookEnd({ title: 'bar', parent: 'foo' })
expect(endCase).toHaveBeenCalledTimes(1)
expect(testcases).toHaveLength(0)
})
it('should keep passed hooks if there are some steps', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [1] } })
reporter.onHookEnd({ title: 'foo', parent: 'bar' })
expect(endCase).toHaveBeenCalledTimes(1)
expect(endCase.mock.results[0].value).toBe('passed')
expect(testcases).toHaveLength(1)
})
it('should keep failed hooks if there no some steps', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [1] } })
reporter.onHookEnd({ title: '"after all" hook', parent: 'foo', error: { message: '', stack: '' } })
expect(endCase).toHaveBeenCalledTimes(1)
expect(endCase.mock.results[0].value).toBe('broken')
expect(testcases).toHaveLength(1)
})
it('should keep failed hooks if there are some steps', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [1] } })
reporter.onHookEnd({ title: '"after all" hook', parent: 'foo', error: { message: '', stack: '' } })
expect(endCase).toHaveBeenCalledTimes(1)
expect(endCase.mock.results[0].value).toBe('broken')
expect(testcases).toHaveLength(1)
})
it('should capture mocha each hooks end - passed', () => {
reporter['_allure'] = allureInstance()
reporter.onHookEnd({ title: '"after each" hook', parent: 'foo' })
expect(endCase).toHaveBeenCalledTimes(0)
expect(endStep).toHaveBeenCalledTimes(1)
expect(endStep.mock.results[0].value).toBe('passed')
})
it('should capture mocha each hooks end - failed', () => {
reporter['_allure'] = allureInstance()
reporter.onHookEnd({ title: '"before each" hook', parent: 'foo', error: { message: '', stack: '' } })
expect(endCase).toHaveBeenCalledTimes(0)
expect(endStep).toHaveBeenCalledTimes(1)
expect(endStep.mock.results[0].value).toBe('failed')
})
it('should ignore mocha all hooks if hook passes', () => {
reporter['_allure'] = allureInstance()
reporter.onHookStart({ title: '"after all" hook', parent: 'foo' })
expect(startCase).toHaveBeenCalledTimes(0)
expect(endCase).toHaveBeenCalledTimes(0)
})
it('should treat mocha all hooks as tests if hook throws', () => {
reporter['_allure'] = allureInstance()
reporter.onHookEnd({ title: '"before all" hook', parent: 'foo', error: { message: '', stack: '' } })
expect(startCase).toHaveBeenCalledTimes(1)
expect(endCase).toHaveBeenCalledTimes(1)
expect(endCase.mock.results[0].value).toBe('broken')
})
})
describe('hooks handling default', () => {
let reporter: any, startCase: any, endCase: any, startStep: any, endStep: any
const allureInstance = ({ suite = {}, test = { steps: [1] } }: any = {}) => ({
getCurrentSuite: jest.fn(() => suite),
getCurrentTest: jest.fn(() => { return test }),
startCase,
endCase,
startStep,
endStep
})
beforeEach(() => {
reporter = new AllureReporter({ disableMochaHooks: false })
reporter.onTestStart = jest.fn(test => startCase(test.title))
startCase = jest.fn()
endCase = jest.fn(result => result)
startStep = jest.fn()
endStep = jest.fn(result => result)
})
it('should capture mocha each hooks', () => {
reporter['_allure'] = allureInstance()
reporter.onHookStart({ title: '"before each" hook', parent: 'foo' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(1)
})
it('should not ignore mocha each hooks if no test', () => {
reporter['_allure'] = allureInstance({ test: null })
reporter.onHookStart({ title: '"after each" hook', parent: 'foo' })
expect(startStep).toHaveBeenCalledTimes(0)
expect(startCase).toHaveBeenCalledTimes(1)
})
it('should keep passed hooks if there are no steps (before/after)', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [] } })
reporter.onHookEnd({ title: '"before all" hook', parent: 'foo' })
expect(endCase).toHaveBeenCalledTimes(1)
expect(testcases).toHaveLength(1)
})
it('should keep passed hooks if there are some steps', () => {
const testcases = [1]
reporter['_allure'] = allureInstance({ suite: { testcases }, test: { steps: [1] } })
reporter.onHookEnd({ title: 'foo', parent: 'bar' })
expect(endCase).toHaveBeenCalledTimes(1)
expect(testcases).toHaveLength(1)
})
})
describe('nested suite naming', () => {
it('should not end test if no hook ignored', () => {
const reporter = new AllureReporter()
const startSuite = jest.fn()
reporter['_allure'] = {
getCurrentSuite: jest.fn(() => { return { name: 'foo' } }),
startSuite
} as any
reporter.onSuiteStart({ title: 'bar' } as SuiteStats)
expect(startSuite).toHaveBeenCalledTimes(1)
expect(startSuite).toHaveBeenCalledWith('foo: bar')
})
})
|
package javafx.scene.transform;
import com.sun.javafx.geom.Point2D;
import javafx.beans.property.*;
import javafx.geometry.GeometryUtil;
import javafx.geometry.Point3D;
import dev.webfx.kit.mapper.peers.javafxgraphics.markers.HasAngleProperty;
/**
* @author <NAME>
*/
public class Rotate extends PivotTransform implements
HasAngleProperty {
/**
* Specifies the X-axis as the axis of rotation.
*/
public static final Point3D X_AXIS = new Point3D(1,0,0);
/**
* Specifies the Y-axis as the axis of rotation.
*/
public static final Point3D Y_AXIS = new Point3D(0,1,0);
/**
* Specifies the Z-axis as the axis of rotation.
*/
public static final Point3D Z_AXIS = new Point3D(0,0,1);
/**
* Creates a default Rotate transform (identity).
*/
public Rotate() {
}
/**
* Creates a two-dimensional Rotate transform.
* The pivot point is set to (0,0)
* @param angle the angle of rotation measured in degrees
*/
public Rotate(double angle) {
setAngle(angle);
}
/**
* Creates a three-dimensional Rotate transform.
* The pivot point is set to (0,0,0)
* @param angle the angle of rotation measured in degrees
* @param axis the axis of rotation
*/
public Rotate(double angle, Point3D axis) {
setAngle(angle);
setAxis(axis);
}
/**
* Creates a two-dimensional Rotate transform with pivot.
* @param angle the angle of rotation measured in degrees
* @param pivotX the X coordinate of the rotation pivot point
* @param pivotY the Y coordinate of the rotation pivot point
*/
public Rotate(double angle, double pivotX, double pivotY) {
setAngle(angle);
setPivotX(pivotX);
setPivotY(pivotY);
}
private final DoubleProperty angleProperty = new SimpleDoubleProperty(0d);
@Override
public DoubleProperty angleProperty() {
return angleProperty;
}
/**
* Defines the axis of rotation at the pivot point.
*/
private ObjectProperty<Point3D> axis;
public final void setAxis(Point3D value) {
axisProperty().set(value);
}
public final Point3D getAxis() {
return axis == null ? Z_AXIS : axis.get();
}
public final ObjectProperty<Point3D> axisProperty() {
if (axis == null) {
axis = new ObjectPropertyBase<Point3D>(Z_AXIS) {
/*
@Override
public void invalidated() {
transformChanged();
}
*/
@Override
public Object getBean() {
return Rotate.this;
}
@Override
public String getName() {
return "axis";
}
};
}
return axis;
}
@Override
public Point2D transform(double x, double y) {
if (Z_AXIS.equals(getAxis())) // Ignoring 3D transforms for now
return GeometryUtil.rotate(getPivotX(), getPivotY(), x, y, getAngle());
return new Point2D((float) x, (float) y);
}
@Override
public Transform createInverse() {
return new Rotate(-getAngle(), getPivotX(), getPivotY());
}
@Override
public Property[] propertiesInvalidatingCache() {
return new Property[]{angleProperty, pivotXProperty, pivotYProperty};
}
@Override
public Affine toAffine() {
double rads = Math.toRadians(getAngle());
double px = getPivotX();
double py = getPivotY();
double sin = Math.sin(rads);
double cos = Math.cos(rads);
double mxx = cos;
double mxy = -sin;
double tx = px * (1 - cos) + py * sin;
double myx = sin;
double myy = cos;
double ty = py * (1 - cos) - px * sin;
return new Affine(mxx, mxy, myx, myy, tx, ty);
}
}
|
export { default as useEditAchievementSelector } from './useEditAchievementSelector';
|
<gh_stars>0
package com.example.googleplay.ui.holder;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.example.googleplay.R;
import com.example.googleplay.domain.SubjectInfo;
import com.example.googleplay.http.HttpHelper;
import com.example.googleplay.utils.BitmapHelper;
import com.example.googleplay.utils.UIUtils;
import com.lidroid.xutils.BitmapUtils;
public class SubjectHolder extends BaseHolder<SubjectInfo> {
private ImageView ivPic;
private TextView tvTitle;
private BitmapUtils mBitmapUtils;
@Override
public View initView() {
View view = UIUtils.inflate(R.layout.list_item_subject);
ivPic = (ImageView) view.findViewById(R.id.iv_pic);
tvTitle = (TextView) view.findViewById(R.id.tv_title);
mBitmapUtils = BitmapHelper.getBitmapUtils();
return view;
}
@Override
public void refreshView(SubjectInfo data) {
tvTitle.setText(data.des);
mBitmapUtils.display(ivPic, HttpHelper.URL + "image?name=" + data.url);
}
}
|
from typing import List
def process_array(arr: List[int]) -> List[int]:
modified_arr = []
if not arr: # Check if the input array is empty
return modified_arr # Return an empty array if input is empty
else:
for num in arr:
if num % 2 == 0: # If the number is even
modified_arr.append(num ** 2) # Square the number and add to modified array
else:
modified_arr.append(num ** 3) # Cube the number and add to modified array
return modified_arr |
class ASCIIFormatter:
def __init__(self, param_names, result, formats):
self.param_names = param_names
self.result = result
self.formats = formats
def get_ascii(self, names=None, params=None):
if names is None:
names = self.param_names
if params is None:
params = self.result[1]
idx = [self.param_names.index(f) for f in names]
text = [self.formats.get(n, '%11.4e') % params[i] for n, i in zip(names, idx)]
return ' '.join(text)
# Example usage
param_names = ['x', 'y', 'z']
result_values = [3.14159, 2.71828, 1.41421]
formats_dict = {'x': '%.2f', 'y': '%.3e'}
formatter = ASCIIFormatter(param_names, (None, result_values), formats_dict)
formatted_text = formatter.get_ascii(['x', 'y']) # Output: '3.14 2.718e+00' |
<filename>utils/index.ts
import * as util from "util";
import { wasm_modules_amount } from "../index";
import { log } from "../utils/log";
import { event } from "../rpc/parser";
import { getContract, runContract } from "../contract";
import { getWasmExport } from "../storage";
export const setValue = (moduleName: string, value: string) => {
const wasm_exports = getWasmExport(moduleName);
const textEncoder = new util.TextEncoder();
const typedArray = textEncoder.encode(value);
const ptr = wasm_exports._wasm_malloc(typedArray.length);
const Uint8Memory = new Uint8Array(wasm_exports.memory.buffer);
Uint8Memory.subarray(ptr, ptr + typedArray.length).set(typedArray);
return {ptr, length: typedArray.length};
};
export const getValue = (moduleName: string, ptr: number, length: number) => {
const wasm_exports = getWasmExport(moduleName);
const value = wasm_exports.memory.buffer.slice(ptr, ptr + length);
const utf8decoder = new util.TextDecoder();
return utf8decoder.decode(value);
};
export const setValueByBytes = (moduleName: string, bytes: any) => {
const wasm_exports = getWasmExport(moduleName);
const typedArray = new Uint8Array(bytes);
const ptr = wasm_exports._wasm_malloc(typedArray.length);
const Uint8Memory = new Uint8Array(wasm_exports.memory.buffer);
Uint8Memory.subarray(ptr, ptr + typedArray.length).set(typedArray);
return {ptr, length: typedArray.length };
};
export const getValueByBytes = (moduleName: string, ptr: number, length: number) => {
const wasm_exports = getWasmExport(moduleName);
const buffer = wasm_exports.memory.buffer.slice(ptr, ptr + length);
return buffer;
};
export const _get_timestamp = () => {
return Date.now();
};
export const _gen_rand32_callback = (fn: number, addr: number) => {};
export const _load_callback = (moduleName: string) => {
return async function _load_callback (ptr: number, size: number, cb: number, user_data: number) {
const wasm_exports = getWasmExport(moduleName);
log().info(ptr, size, cb, user_data, "from load callback");
const index = await getContract(moduleName, ptr, size);
wasm_exports.call_loader_callback_fn(index, cb, user_data);
};
};
export const _load_run = () => {
return function _load_run (index: number, ptr: number, size: number) {
const result = runContract(index, ptr, size);
return result;
};
};
let wasm_init_next = 1;
// When the previous wasm init is completed, this method will
// be notified to call the next wasm init
export const _callback_number = (index: number, num: number) => {
if (wasm_init_next >= wasm_modules_amount) {
log().info("wasm modules init complate");
return;
} else {
log().info(`wasm entry callback, begin init module ${wasm_init_next}`);
event.emit("next_wasm_init", wasm_init_next);
wasm_init_next++;
}
};
|
// Dependencies
// =============================================================
const express = require("express");
const router = express.Router();
// Import the model to use its database functions.
const blogs = require("../models/blogs");
// Routes
// =============================================================
module.exports = function (app) {
// Create all our routes and set up logic within those routes where required.
router.get("/", function (req, res) {
blogs.all(function (data) {
const hbsObject = {
blogs: data
};
console.log(hbsObject);
res.render("index", hbsObject);
});
});
router.post("/api/blogs", function (req, res) {
blogs.create([
"title", "link", "summary"
], [
req.body.title, req.body.link
], function (result) {
// Send back the ID of the new article
res.json({ id: result.insertId });
});
});
router.put("/api/blogs/:id", function (req, res) {
const condition = "id = " + req.params.id;
console.log("condition", condition);
blogs.update({
link: req.body.link
}, condition, function (result) {
if (result.changedRows == 0) {
// If no rows were changed, then the ID must not exist, so 404
return res.status(404).end();
} else {
res.status(200).end();
}
});
});
router.delete("/api/blogs/:id", function (req, res) {
const condition = "id = " + req.params.id;
blogs.delete(condition, function (result) {
if (result.affectedRows == 0) {
// If no rows were changed, then the ID must not exist, so 404
return res.status(404).end();
} else {
res.status(200).end();
}
});
});
}
|
SELECT city, COUNT(*) AS 'NumOfCustomers'
FROM Customers
GROUP BY city; |
"""
Created on Feb 5, 2010
@author: barthelemy
"""
from __future__ import unicode_literals, absolute_import
import unittest
from py4j.java_gateway import JavaGateway, GatewayParameters
from py4j.tests.java_gateway_test import (
start_example_app_process, safe_shutdown, sleep)
def get_map():
return {"a": 1, "b": 2.0, "c": "z"}
class AutoConvertTest(unittest.TestCase):
def setUp(self):
self.p = start_example_app_process()
self.gateway = JavaGateway(
gateway_parameters=GatewayParameters(auto_convert=True))
def tearDown(self):
safe_shutdown(self)
self.p.join()
sleep()
def testAutoConvert(self):
dj = self.gateway.jvm.java.util.HashMap()
dj["b"] = 2
dj["a"] = 1
dp = {"a": 1, "b": 2}
self.assertTrue(dj.equals(dp))
class MapTest(unittest.TestCase):
def setUp(self):
self.p = start_example_app_process()
self.gateway = JavaGateway()
def tearDown(self):
safe_shutdown(self)
self.p.join()
sleep()
def equal_maps(self, m1, m2):
if len(m1) == len(m2):
equal = True
for k in m1:
equal = m1[k] == m2[k]
if not equal:
break
return equal
else:
return False
def testMap(self):
dp0 = {}
dp = get_map()
dj = self.gateway.jvm.java.util.HashMap()
self.equal_maps(dj, dp0)
dj["a"] = 1
dj["b"] = 2.0
dj["c"] = "z"
self.equal_maps(dj, dp)
del(dj["a"])
del(dp["a"])
dj2 = self.gateway.jvm.java.util.HashMap()
dj2["b"] = 2.0
dj2["c"] = "z"
dj3 = self.gateway.jvm.java.util.HashMap()
dj3["a"] = 1
dj3["b"] = 2.0
dj3["c"] = "z"
self.equal_maps(dj, dp)
self.assertEqual(dj, dj)
self.assertEqual(dj, dj2)
# Does not always work for some reason...
# Probably not worth supporting for now...
# self.assertLess(dj, dj3)
self.assertNotEqual(dj, dp)
dps = {1: 1, 2: 2}
djs = self.gateway.jvm.java.util.HashMap()
djs[1] = 1
djs[2] = 2
self.assertEqual(str(djs), str(dps))
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env bash
# try find nginx conf
conf=resume.conf
if [[ ! -f ${ZEUS_NGINX_CONF}/${conf} ]];then
echo "服务配置文件不存在"
exit 1
else
mv ${ZEUS_NGINX_CONF}/${conf} ${ZEUS_NGINX_CONF}/${conf}.stop
nginx -s reload
exit 0
fi |
(function() {
'use strict';
angular
.module('bubbleApp')
.config(bubbleAppRoutes);
bubbleAppRoutes.$inject = [
'$stateProvider',
'$urlRouterProvider'
];
function bubbleAppRoutes($stateProvider, $urlRouterProvider) {
$urlRouterProvider.otherwise('/');
$stateProvider
.state('home', {
url: '/',
controller: 'TweetListCtrl as vm',
templateUrl: 'app/tweetlist/tweetlist.html'
});
}
})(); |
//@ts-check
const func = require('../solves/9');
const { testVal } = require('./helpers');
describe('#9', () => {
it("1", () => {
testVal(func, '1', true);
})
it("-1", () => {
testVal(func, '-1', false);
})
it("132333231", () => {
testVal(func, '132333231', true)
})
it("1323332310", () => {
testVal(func, '1323332310', false)
})
}) |
#!/bin/sh
# Build the .wasm Module first
# Since we're compiling a side module here, so that we can load it without the
# runtime cruft, we have to explicitly compile in support for malloc and
# friends.
# Note memcpy, memmove and memset are explicitly exported, otherwise they will
# be eliminated by the SIDE_MODULE=2 setting - not sure why that happens.
emcc \
src/wasm/mpeg1.c \
src/wasm/mp2.c \
src/wasm/buffer.c \
$EMSCRIPTEN/system/lib/emmalloc.cpp \
$EMSCRIPTEN/system/lib/libc/musl/src/string/memcpy.c \
$EMSCRIPTEN/system/lib/libc/musl/src/string/memmove.c \
$EMSCRIPTEN/system/lib/libc/musl/src/string/memset.c \
-s WASM=1 \
-s SIDE_MODULE=2 \
-s TOTAL_STACK=5242880\
-s USE_PTHREADS=0 \
-s LEGALIZE_JS_FFI=0\
-s NO_FILESYSTEM=1 \
-s DEFAULT_LIBRARY_FUNCS_TO_INCLUDE="[]" \
-s "EXPORTED_FUNCTIONS=[
'_memcpy',
'_memmove',
'_memset',
'_mpeg1_decoder_create',
'_mpeg1_decoder_destroy',
'_mpeg1_decoder_get_write_ptr',
'_mpeg1_decoder_get_index',
'_mpeg1_decoder_set_index',
'_mpeg1_decoder_did_write',
'_mpeg1_decoder_has_sequence_header',
'_mpeg1_decoder_get_frame_rate',
'_mpeg1_decoder_get_coded_size',
'_mpeg1_decoder_get_width',
'_mpeg1_decoder_get_height',
'_mpeg1_decoder_get_y_ptr',
'_mpeg1_decoder_get_cr_ptr',
'_mpeg1_decoder_get_cb_ptr',
'_mpeg1_decoder_decode',
'_mp2_decoder_create',
'_mp2_decoder_destroy',
'_mp2_decoder_get_write_ptr',
'_mp2_decoder_get_index',
'_mp2_decoder_set_index',
'_mp2_decoder_did_write',
'_mp2_decoder_get_left_channel_ptr',
'_mp2_decoder_get_right_channel_ptr',
'_mp2_decoder_get_sample_rate',
'_mp2_decoder_decode']" \
-O3 \
-o jsmpeg.wasm
# Concat all .js sources
cat \
src/jsmpeg.js \
src/video-element.js \
src/player.js \
src/buffer.js \
src/ajax.js \
src/ajax-progressive.js \
src/websocket.js \
src/stream.js \
src/ts.js \
src/decoder.js \
src/mpeg1.js \
src/mpeg1-wasm.js \
src/mp2.js \
src/mp2-wasm.js \
src/webgl.js \
src/canvas2d.js \
src/webaudio.js \
src/wasm-module.js \
> jsmpeg.js
# Append the .wasm module to the .js source as base64 string
echo "JSMpeg.WASM_BINARY_INLINED='$(base64 -w 0 jsmpeg.wasm)';" \
>> jsmpeg.js
# Minify
uglifyjs jsmpeg.js -o jsmpeg.min.js
# Cleanup
rm jsmpeg.js
rm jsmpeg.wasm
|
/******************************************************************************
Course videos: https://www.red-gate.com/hub/university/courses/t-sql/tsql-for-beginners
Course scripts: https://litknd.github.io/TSQLBeginners
Introducing SELECTs and Aliasing
This is your HOMEWORK file
For best results, work through this homework and test running the queries (learn by "doing" when you can)
Need some help?
Join the SQL Community Slack group for discussion: https://t.co/w5LWUuDrqG
Click the + next to 'Channels' and join #tsqlbeginners
*****************************************************************************/
/* ✋🏻 Doorstop ✋🏻 */
RAISERROR(N'Did you mean to run the whole thing?', 20, 1) WITH LOG;
GO
/* 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌
Homework documentation:
🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌 🚌*/
USE WideWorldImporters;
GO
--Not sure how to start? Get stuck? These pages will un-stick you!
--WHERE: https://docs.microsoft.com/en-us/sql/t-sql/queries/where-transact-sql
--LIKE: https://docs.microsoft.com/en-us/sql/t-sql/language-elements/like-transact-sql
--ORDER BY: https://docs.microsoft.com/en-us/sql/t-sql/queries/select-order-by-clause-transact-sql
/* 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮
Homework
🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 🌮 */
USE WideWorldImporters;
GO
/*
Q1
Write a query that SELECTS all the rows from Application.People
Return all columns in the table
Use a "worst practice" to SELECT every column in the table
GO
*/
/*
Q2
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
GO
*/
/*
Q3
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where Email has not been entered (NULL)
GO
*/
/*
Q4
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName is Agrita
GO
*/
/*
Q5
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName starts with the letter A
GO
*/
/*
Q6
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName starts with the LOWERCASE letter 'a'
GO
*/
/*
Q7
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY three columns:
FullName
PreferredName
EmailAddress - alias as: Email
Return ONLY rows where PreferredName contains 'y' or 'Y' anywhere in the string
AND the email address contains a space
Order the results by EmailAddress Ascending
GO
*/
/*
Q8
Write a query that SELECTS all the rows from Application.People
Return rows for ONLY two columns:
FullName
The length (number of characters in) the FullName column,
as calculated by the LEN() SQL Server function
https://docs.microsoft.com/en-us/sql/t-sql/functions/len-transact-sql?view=sql-server-2017
alias as: Len Full Name
Order the results by the length of FullName, Descending
Return only 10 rows
Do NOT use SET ROWCOUNT -- instead do everything in a single TSQL statement
GO
*/
/*
Q9
Write a query that SELECTS all the rows from Application.People
Just like Q8...
Return rows for ONLY two columns:
FullName
The length (number of characters in) the FullName column,
as calculated by the LEN() SQL Server function
https://docs.microsoft.com/en-us/sql/t-sql/functions/len-transact-sql?view=sql-server-2017
alias as: Len Full Name
Order the results by the length of FullName, Descending
Return only 10 rows
EXCEPT this time...
Return rows ONLY #11 - 20 (as ordered by description above)
Do NOT use the TOP keyword, do not use ROW_NUMBER(), and do not use SET ROWCOUNT
GO
*/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.