text stringlengths 1 1.05M |
|---|
#!/bin/sh
PYTHONPATH=../../src_py python main.py
|
package week1.view;
import week1.comparators.BillComparatorForSorting;
import week1.exeptions.InvalidLoginException;
import week1.interfaces.ITerminalController;
import week1.models.Bill;
import week1.models.Salesman;
import java.util.List;
import java.util.Scanner;
/**
* Created by ENIAC on 20.11.2017.
*/
public class View {
public void run(ITerminalController terminal) throws InvalidLoginException {
System.out.println("\nHello. write login/pass to sign in");
Scanner scanner = new Scanner(System.in);
String login = scanner.nextLine();
String pass = scanner.nextLine();
try {
terminal.login(login, pass);
} catch (InvalidLoginException e) {
e.getMessage();
}
if (terminal.getCurrentSalesmanIndex() != -1) {
menu();
do {
String choice = scanner.next();
switch (choice) {
case "1":
menuCreateBill(terminal);
break;
case "2":
menuAddProduct(scanner, terminal);
break;
case "3":
menuCloseAndSaveBill(scanner, terminal);
break;
case "4":
menuFindBillById(scanner, terminal);
break;
case "5":
menuFindSellerByLogin(scanner, terminal);
break;
case "6":
menuGetTopSalesman(terminal);
break;
case "7":
menuFilterForBills(scanner, terminal);
break;
case "8":
menuLogOut(scanner, terminal);
break;
case "q":
return;
default:
System.out.println("Wrong command! Try again!");
break;
}
} while (true);
}
}
// Methods
private void menuCreateBill(ITerminalController terminal) {
terminal.createBill();
int index = terminal.getAllBills().size() - 1;
System.out.println("bill was created with id " + index);
}
private void menuAddProduct(Scanner scanner, ITerminalController terminal) {
System.out.println("write bill id to add product");
int billId;
if (scanner.hasNextInt()) {
billId = scanner.nextInt();
} else {
System.out.println("you wrote non integer number");
return;
}
System.out.println("write index of product you want to add");
int productId;
if (scanner.hasNextInt()) {
productId = scanner.nextInt();
} else {
System.out.println("you wrote non integer number");
return;
}
if (billId > terminal.getAllBills().size() || (billId < 0)) {
System.out.println("There no bill with this id");
} else if (productId > terminal.getAllProducts().size() || (productId < 0)) {
System.out.println("There no product with this id");
} else {
terminal.addProduct(billId, terminal.getAllProducts().get(productId));
}
}
private void menuCloseAndSaveBill(Scanner scanner, ITerminalController terminal) {
System.out.println("write bill id to close");
int billId;
if (scanner.hasNextInt()) {
billId = scanner.nextInt();
} else {
System.out.println("you wrote non integer number");
return;
}
terminal.closeBill(billId);
}
private void menuFindBillById(Scanner scanner, ITerminalController terminal) {
System.out.println("write id of search bill");
int billId;
if (scanner.hasNextInt()) {
billId = scanner.nextInt();
} else {
System.out.println("you wrote non integer number");
return;
}
if (billId > terminal.getAllBills().size() || (billId < 0)) {
System.out.println("There no bill with this id");
return;
}
Bill bill = terminal.findBillById(billId);
if (bill == null) {
return;
}
System.out.println(bill.toString());
}
private void menuFindSellerByLogin(Scanner scanner, ITerminalController terminal) {
System.out.println("Write login of salesman, which you want to find");
String login = scanner.next();
Salesman salesman = terminal.findSalesmanByLogin(login);
if (salesman == null) {
return;
}
System.out.println(salesman.toString());
}
private void menuGetTopSalesman(ITerminalController terminal) {
Salesman salesman = terminal.getTopOfSalesmans();
if (salesman == null) {
return;
}
System.out.println("top saller is " + salesman.getName());
}
private void menuFilterForBills(Scanner scanner, ITerminalController terminal) {
System.out.println("write left limit of time in case like *Time: 9:15:09 Date: 2017.11.24*");
scanner.nextLine(); //this is needed to take empty string(enter from println)
String start = scanner.nextLine();
System.out.println("write right limit of time in case like *Time: 9:15:09 Date: 2017.11.24*");
String end = scanner.nextLine();
List<Bill> billList = terminal.filterForBills(start, end, new BillComparatorForSorting());
if (billList == null || billList.isEmpty()) {
System.out.println("something gone wrong. Maybe you wrote wrong input data");
return;
}
System.out.println(billList.toString());
}
private void menuLogOut(Scanner scanner, ITerminalController terminal) throws InvalidLoginException {
terminal.setCurrentSalesmanIndex(-1);
System.out.println("write login");
String login1 = scanner.next();
System.out.println("write pass");
String pass1 = scanner.next();
terminal.login(login1, pass1);
if (terminal.getCurrentSalesmanIndex() != -1) {
menu();
}
}
private void menu() {
System.out.println(
"1: Create bill.\n" +
"2: Add product. \n" +
"3: Close and Save bill. \n" +
"4: Find bill by id. \n" +
"5: Find salesman by login.\n" +
"6: Get top of Salesman.\n" +
"7: Filter Bills by time.\n" +
"8: log out.\n" +
"q: Exit from terminal.");
}
} |
<reponame>Pavelrst/targeted_dropout_pytorch<filename>conv_layer_with_td.py<gh_stars>0
import torch.nn as nn
from torch.functional import F
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
from torch.nn.modules.utils import _pair
from torch.nn.modules.conv import _ConvNd
from utils.mixup import MixUp
# Targeted dropout imports
from targetedDropout import targeted_unit_dropout
from targetedDropout import targeted_weight_dropout
from targetedDropout import ramping_targeted_unit_dropout
from targetedDropout import ramping_targeted_weight_dropout
# end imports
class Conv2d_with_td(_ConvNd):
"""
"""
def __init__(self, in_channels, out_channels, kernel_size, stride=1,
padding=0, dilation=1, groups=1,
bias=True, padding_mode='zeros', dropout_fn=None):
kernel_size = _pair(kernel_size)
stride = _pair(stride)
padding = _pair(padding)
dilation = _pair(dilation)
super(Conv2d_with_td, self).__init__(
in_channels, out_channels, kernel_size, stride, padding, dilation,
False, _pair(0), groups, bias, padding_mode)
self.dropout_fn = dropout_fn
def forward(self, input):
if self.dropout_fn is not None:
dropped_w = self.dropout_fn.forward(self.weight, self.training)
else:
dropped_w = self.weight
if self.padding_mode == 'circular':
expanded_padding = ((self.padding[1] + 1) // 2, self.padding[1] // 2,
(self.padding[0] + 1) // 2, self.padding[0] // 2)
return F.conv2d(F.pad(input, expanded_padding, mode='circular'),
dropped_w, self.bias, self.stride,
_pair(0), self.dilation, self.groups)
return F.conv2d(input, dropped_w, self.bias, self.stride,
self.padding, self.dilation, self.groups) |
#!/bin/bash
set -o pipefail
# this is run_tdnn_discriminative.sh
# This script does discriminative training on top of CE nnet3 system.
# note: this relies on having a cluster that has plenty of CPUs as well as GPUs,
# since the lattice generation runs in about real-time, so takes of the order of
# 1000 hours of CPU time.
#
. ./cmd.sh
stage=0
train_stage=-10 # can be used to start training in the middle.
get_egs_stage=-10
use_gpu=true # for training
cleanup=false # run with --cleanup true --stage 6 to clean up (remove large things like denlats,
# alignments and degs).
. ./cmd.sh
. ./path.sh
. ./utils/parse_options.sh
srcdir=exp/nnet3/tdnn
train_data_dir=data/train_nodup_sp_hires
online_ivector_dir=exp/nnet3/ivectors_train_nodup_sp
degs_dir= # If provided, will skip the degs directory creation
lats_dir= # If provided, will skip denlats creation
## Objective options
criterion=smbr
one_silence_class=true
dir=${srcdir}_${criterion}
## Egs options
frames_per_eg=150
frames_overlap_per_eg=30
truncate_deriv_weights=10
## Nnet training options
effective_learning_rate=0.00000125
max_param_change=1
num_jobs_nnet=4
num_epochs=2
regularization_opts= # Applicable for providing --xent-regularize and --l2-regularize options
minibatch_size=64
adjust_priors=true # May need to be set to false
# because it does not help in some setups
modify_learning_rates=true
last_layer_factor=0.1
## Decode options
decode_start_epoch=1 # can be used to avoid decoding all epochs, e.g. if we decided to run more.
if $use_gpu; then
if ! cuda-compiled; then
cat <<EOF && exit 1
This script is intended to be used with GPUs but you have not compiled Kaldi with CUDA
If you want to use GPUs (and have them), go to src/, and configure and make on a machine
where "nvcc" is installed. Otherwise, call this script with --use-gpu false
EOF
fi
num_threads=1
else
# Use 4 nnet jobs just like run_4d_gpu.sh so the results should be
# almost the same, but this may be a little bit slow.
num_threads=16
fi
if [ ! -f ${srcdir}/final.mdl ]; then
echo "$0: expected ${srcdir}/final.mdl to exist; first run run_tdnn.sh or run_lstm.sh"
exit 1;
fi
if [ $stage -le 1 ]; then
# hardcode no-GPU for alignment, although you could use GPU [you wouldn't
# get excellent GPU utilization though.]
nj=100 # have a high number of jobs because this could take a while, and we might
# have some stragglers.
steps/nnet3/align.sh --cmd "$decode_cmd" --use-gpu false \
--online-ivector-dir $online_ivector_dir \
--nj $nj $train_data_dir data/lang $srcdir ${srcdir}_ali ;
fi
if [ -z "$lats_dir" ]; then
lats_dir=${srcdir}_denlats
if [ $stage -le 2 ]; then
nj=100
# this doesn't really affect anything strongly, except the num-jobs for one of
# the phases of get_egs_discriminative.sh below.
num_threads_denlats=6
subsplit=40 # number of jobs that run per job (but 2 run at a time, so total jobs is 80, giving
# total slots = 80 * 6 = 480.
steps/nnet3/make_denlats.sh --cmd "$decode_cmd" --determinize true \
--online-ivector-dir $online_ivector_dir \
--nj $nj --sub-split $subsplit --num-threads "$num_threads_denlats" --config conf/decode.config \
$train_data_dir data/lang $srcdir ${lats_dir} ;
fi
fi
model_left_context=`nnet3-am-info $srcdir/final.mdl | grep "left-context:" | awk '{print $2}'`
model_right_context=`nnet3-am-info $srcdir/final.mdl | grep "right-context:" | awk '{print $2}'`
left_context=$[model_left_context + extra_left_context]
right_context=$[model_right_context + extra_right_context]
frame_subsampling_opt=
if [ -f $srcdir/frame_subsampling_factor ]; then
frame_subsampling_opt="--frame-subsampling-factor $(cat $srcdir/frame_subsampling_factor)"
fi
cmvn_opts=`cat $srcdir/cmvn_opts`
if [ -z "$degs_dir" ]; then
degs_dir=${srcdir}_degs
if [ $stage -le 3 ]; then
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d ${srcdir}_degs/storage ]; then
utils/create_split_dir.pl \
/export/b0{1,2,12,13}/$USER/kaldi-data/egs/fisher_swbd-$(date +'%m_%d_%H_%M')/s5/${srcdir}_degs/storage ${srcdir}_degs/storage
fi
# have a higher maximum num-jobs if
if [ -d ${srcdir}_degs/storage ]; then max_jobs=10; else max_jobs=5; fi
degs_opts="--determinize true --minimize true --remove-output-symbols true --remove-epsilons true --collapse-transition-ids true"
steps/nnet3/get_egs_discriminative.sh \
--cmd "$decode_cmd --max-jobs-run $max_jobs --mem 20G" --stage $get_egs_stage --cmvn-opts "$cmvn_opts" \
--adjust-priors $adjust_priors \
--online-ivector-dir $online_ivector_dir \
--left-context $left_context --right-context $right_context \
$frame_subsampling_opt \
--frames-per-eg $frames_per_eg --frames-overlap-per-eg $frames_overlap_per_eg ${degs_opts} \
$train_data_dir data/lang ${srcdir}_ali $lats_dir $srcdir/final.mdl $degs_dir ;
fi
fi
if [ $stage -le 4 ]; then
steps/nnet3/train_discriminative.sh --cmd "$decode_cmd" \
--stage $train_stage \
--effective-lrate $effective_learning_rate --max-param-change $max_param_change \
--criterion $criterion --drop-frames true \
--num-epochs $num_epochs --one-silence-class $one_silence_class --minibatch-size $minibatch_size \
--num-jobs-nnet $num_jobs_nnet --num-threads $num_threads \
--regularization-opts "$regularization_opts" \
--truncate-deriv-weights $truncate_deriv_weights --adjust-priors $adjust_priors \
--modify-learning-rates $modify_learning_rates --last-layer-factor $last_layer_factor \
${degs_dir} $dir
fi
graph_dir=exp/tri5a/graph_fsh_sw1_tg
if [ $stage -le 5 ]; then
for x in `seq $decode_start_epoch $num_epochs`; do
for decode_set in eval2000 rt03; do
(
num_jobs=`cat data/${decode_set}_hires/utt2spk|cut -d' ' -f2|sort -u|wc -l`
iter=epoch$x.adj
steps/nnet3/decode.sh --nj $num_jobs --cmd "$decode_cmd" --iter $iter \
--online-ivector-dir exp/nnet3/ivectors_${decode_set} \
$graph_dir data/${decode_set}_hires $dir/decode_${decode_set}_fsh_sw1_tg_$iter ;
steps/lmrescore_const_arpa.sh --cmd "$decode_cmd" \
data/lang_fsh_sw1_{tg,fg} data/${decode_set}_hires \
$dir/decode_${decode_set}_fsh_sw1_{tg,fg}_$iter ;
) &
done
done
fi
wait;
if [ $stage -le 6 ] && $cleanup; then
# if you run with "--cleanup true --stage 6" you can clean up.
rm ${lats_dir}/lat.*.gz || true
rm ${srcdir}_ali/ali.*.gz || true
steps/nnet2/remove_egs.sh ${srcdir}_degs || true
fi
exit 0;
|
<reponame>cane4044/fast-dna
import { DOM } from "../dom";
import { SyntheticViewTemplate, CaptureType } from "../template";
import { SyntheticView } from "../view";
import { Expression } from "../interfaces";
import { Behavior } from "./behavior";
import {
Observable,
GetterInspector,
inspectAndEvaluate,
} from "../observation/observable";
import { Subscriber } from "../observation/subscriber-collection";
import { Directive } from "./directive";
export class WhenDirective extends Directive {
createPlaceholder = DOM.createBlockPlaceholder;
constructor(public expression: Expression, public template: SyntheticViewTemplate) {
super();
}
public createBehavior(target: any) {
return new WhenBehavior(target, this.expression, this.template);
}
}
export class WhenBehavior implements Behavior, GetterInspector, Subscriber {
private view: SyntheticView | null = null;
private cachedView?: SyntheticView;
private source: unknown;
constructor(
private location: Node,
private expression: Expression,
private template: SyntheticViewTemplate
) {}
bind(source: unknown) {
this.source = source;
this.updateTarget(
inspectAndEvaluate<boolean>(this.expression, source, null as any, this)
);
}
unbind() {
if (this.view !== null) {
this.view.unbind();
}
this.source = null;
}
inspect(source: any, propertyName: string) {
Observable.getNotifier(source).subscribe(this, propertyName);
}
handleChange(source: any, propertyName: string): void {
DOM.queueUpdate(this);
}
public call() {
this.updateTarget(this.expression(this.source, null as any));
}
updateTarget(show: boolean) {
if (show && this.view == null) {
this.view = this.cachedView || (this.cachedView = this.template.create());
this.view.bind(this.source);
this.view.insertBefore(this.location);
} else if (!show && this.view !== null) {
// do not dispose, since we may want to use the view again
this.view.remove();
this.view.unbind();
this.view = null;
}
}
}
export function when<T = any, K = any>(
expression: Expression<T, K>,
template: SyntheticViewTemplate
): CaptureType<T> {
return new WhenDirective(expression, template);
}
|
angular.module('hnotes.auth', ['hnotes.config', 'auth0.lock', 'angular-jwt'])
.config(function ($stateProvider, lockProvider) {
lockProvider.init({
clientID: 'RqwHCef6ycF9tjPCUxFIDuujpzmHjcfE',
domain: 'anhminh.eu.auth0.com',
options: {
container: 'lock-container',
language: 'fr',
auth: {
scope: 'openid email'
}
}
});
$stateProvider
.state('auth', {
url: '/auth',
templateUrl: 'templates/auth.html',
controller: 'AuthCtrl'
})
.state('logout', {
url: '/logout',
controller: 'LogoutCtrl'
})
})
.run(function(lock) {
// Intercept the hash that comes back from authentication
// to ensure the `authenticated` event fires
lock.interceptHash()
})
.factory('Auth', function($http, $window, $rootScope, SERVER_URL) {
var loggedIn = function(response) {
console.log(JSON.stringify(response))
if(response.data.status == 'OK') {
$window.localStorage.token = response.data.token
$rootScope.user = response.data.user
$window.localStorage.user = JSON.stringify(response.data.user)
}
return response.data
}
var errLogin = function(response) {
return {status: 'KO', cause: response.data.cause || "couldn't connect to server"}
}
return {
login: function(loginData) {
return $http.post(SERVER_URL + '/login', loginData).then(loggedIn, errLogin)
},
signup: function(signupData) {
return $http.post(SERVER_URL + '/signup', signupData).then(loggedIn, errLogin)
},
logout: function() {
$window.localStorage.removeItem('token')
$window.localStorage.removeItem('user')
},
auth0Login: function(idToken) {
console.log("auth0 login")
return $http.post(SERVER_URL + '/auth0Login', {token: idToken}).then(loggedIn, errLogin)
}
}
})
.controller('AuthCtrl', function($scope, $state, Auth, authService, $rootScope) {
// Put the authService on $scope to access
// the login method in the view
$scope.authService = authService;
$scope.doLogin = function(loginData) {
Auth.login(loginData).then(function(result) {
if(result.status == 'KO') {
$scope.loginError = result.cause;
} else {
$state.go('app.notes');
}
})
}
$scope.doSignup = function(signupData) {
Auth.signup({email: signupData.email, password: signupData.password}).then(function(result) {
if(result.status == 'KO') {
$scope.signupError = result.cause;
} else {
$state.go('app.notes');
}
})
}
})
.controller('LogoutCtrl', function($scope, $state, Auth, authService) {
$scope.$on('$stateChangeSuccess', function(event, toState) {
if(toState.name == 'logout') {
console.log("logging out")
Auth.logout()
authService.logout()
$state.go('auth')
}
})
})
.directive("compareTo", function() {
return {
require: "ngModel",
scope: {
otherModelValue: "=compareTo"
},
link: function(scope, element, attributes, ngModel) {
ngModel.$validators.compareTo = function(modelValue, viewValue) {
return modelValue == scope.otherModelValue;
};
scope.$watch("otherModelValue", function() {
ngModel.$validate();
});
}
};
})
.service('authService', authService)
.run(function($rootScope, authService, Auth) {
// Put the authService on $rootScope so its methods
// can be accessed from the nav bar
$rootScope.authService = authService
authService.registerAuthenticationListener();
$rootScope.$on('idTokenSet', function(event, token) {
console.log("id token set received")
Auth.auth0Login(token)
})
});;
authService.$inject = ['$rootScope', 'lock', 'authManager'];
function authService($rootScope, lock, authManager) {
var userProfile = JSON.parse(localStorage.getItem('profile')) || {};
function login() {
console.log("authservice login")
lock.show();
}
// Logging out just requires removing the user's
// id_token and profile
function logout() {
localStorage.removeItem('id_token');
localStorage.removeItem('profile');
authManager.unauthenticate();
userProfile = {};
}
// Set up the logic for when a user authenticates
// This method is called from app.run.js
function registerAuthenticationListener() {
lock.on('authenticated', function(authResult) {
localStorage.setItem('id_token', authResult.idToken);
console.log(authResult.idToken)
$rootScope.$broadcast('idTokenSet', authResult.idToken)
console.log("broadcast id token set")
/*
authManager.authenticate();
lock.getProfile(authResult.idToken, function(error, profile) {
if (error) {
console.log(error);
}
localStorage.setItem('profile', JSON.stringify(profile));
$rootScope.$broadcast('userProfileSet', profile);
});
*/
});
}
return {
userProfile: userProfile,
login: login,
logout: logout,
registerAuthenticationListener: registerAuthenticationListener
}
}
|
package operatingsystem // import "github.com/docker/docker/pkg/parsers/operatingsystem"
import (
"fmt"
"golang.org/x/sys/windows/registry"
)
// GetOperatingSystem gets the name of the current operating system.
func GetOperatingSystem() (string, error) {
// Default return value
ret := "Unknown Operating System"
k, err := registry.OpenKey(registry.LOCAL_MACHINE, `SOFTWARE\Microsoft\WIndows NT\CurrentVersion`, registry.QUERY_VALUE)
if err != nil {
return ret, err
}
defer k.Close()
pn, _, err := k.GetStringValue("ProductName")
if err != nil {
return ret, err
}
ret = pn
ri, _, err := k.GetStringValue("ReleaseId")
if err != nil {
return ret, err
}
ret = fmt.Sprintf("%s Version %s", ret, ri)
cbn, _, err := k.GetStringValue("CurrentBuildNumber")
if err != nil {
return ret, err
}
ubr, _, err := k.GetIntegerValue("UBR")
if err != nil {
return ret, err
}
ret = fmt.Sprintf("%s (OS Build %s.%d)", ret, cbn, ubr)
return ret, nil
}
// IsContainerized returns true if we are running inside a container.
// No-op on Windows, always returns false.
func IsContainerized() (bool, error) {
return false, nil
}
|
var classarmnn_1_1_ref_depth_to_space_workload =
[
[ "Execute", "classarmnn_1_1_ref_depth_to_space_workload.xhtml#ae071e8822437c78baea75c3aef3a263a", null ]
]; |
$("#userForm").submit(function (e) {
e.preventDefault();
let username = $("#userInput").val();
document.location.href = `graph.html#/user/${username}`;
});
|
#!/bin/bash
function findCmake {
local res=$(find /usr/local/ -name "cmake" | grep "/bin" | sort | head -1)
echo "${res}"
}
function checkCmakeVers {
if [ "$1" == "" ]
then
echo "0"
return
fi
local version_major=$($1 --version | grep version | awk '{print $3}' | awk -F '.' '{print $1}')
if (( version_major < 3 )); then
echo "0"
else
echo "1"
fi
}
# trying to use default version first
CMAKE_BIN=$(which cmake | tr '\n' ' ')
CMAKE_VALID=$(checkCmakeVers ${CMAKE_BIN})
if [ "${CMAKE_VALID}" -eq 0 ]
then
echo "CMake version must be >= 3.0.0. Trying to search in /usr/local"
CMAKE_BIN=$(findCmake)
CMAKE_VALID=$(checkCmakeVers ${CMAKE_BIN})
if [ "${CMAKE_BIN}" == "" ] || [ "${CMAKE_VALID}" -eq 0 ]
then
echo "Didn't found correct version of CMake. Exiting..."
exit 1
else
echo "Found correct cmake: ${CMAKE_BIN}"
fi
fi
#git clone --recursive https://github.com/edwardstock/toolboxpp.git toolbox
#mkdir -p toolbox/build
#cd toolbox/build
#sudo ${CMAKE_BIN} .. -DCMAKE_BUILD_TYPE=Release
#${CMAKE_BIN} --build . --target toolboxpp
#sudo ${CMAKE_BIN} --build . --target install
#cd ../../
mkdir -p build && cd build
sudo ${CMAKE_BIN} .. -DCMAKE_BUILD_TYPE=Release
${CMAKE_BIN} --build . --target wsserver
sudo ${CMAKE_BIN} --build . --target install |
<filename>src/GetNrmHint.c
/***********************************************************
Copyright 1988 by Wyse Technology, Inc., San Jose, Ca,
Copyright 1987 by Digital Equipment Corporation, Maynard, Massachusetts,
All Rights Reserved
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name Digital not be
used in advertising or publicity pertaining to distribution of the
software without specific, written prior permission.
DIGITAL AND WYSE DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
EVENT SHALL DIGITAL OR WYSE BE LIABLE FOR ANY SPECIAL, INDIRECT OR
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF
USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
******************************************************************/
/*
Copyright 1987, 1988, 1998 The Open Group
Permission to use, copy, modify, distribute, and sell this software and its
documentation for any purpose is hereby granted without fee, provided that
the above copyright notice appear in all copies and that both that
copyright notice and this permission notice appear in supporting
documentation.
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE OPEN GROUP BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of The Open Group shall
not be used in advertising or otherwise to promote the sale, use or
other dealings in this Software without prior written authorization
from The Open Group.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <X11/Xlibint.h>
#include <X11/Xatom.h>
#include "Xatomtype.h"
#include <X11/Xutil.h>
#include <stdio.h>
Status XGetWMSizeHints (
Display *dpy,
Window w,
XSizeHints *hints,
long *supplied,
Atom property)
{
xPropSizeHints *prop = NULL;
Atom actual_type;
int actual_format;
unsigned long leftover;
unsigned long nitems;
if (XGetWindowProperty (dpy, w, property, 0L,
(long)NumPropSizeElements,
False, XA_WM_SIZE_HINTS, &actual_type,
&actual_format, &nitems, &leftover,
(unsigned char **)&prop)
!= Success)
return False;
if ((actual_type != XA_WM_SIZE_HINTS) ||
(nitems < OldNumPropSizeElements) || (actual_format != 32)) {
if (prop != NULL) Xfree ((char *)prop);
return False;
}
hints->flags = prop->flags;
/* XSizeHints misdeclares these as int instead of long */
hints->x = cvtINT32toInt (prop->x);
hints->y = cvtINT32toInt (prop->y);
hints->width = cvtINT32toInt (prop->width);
hints->height = cvtINT32toInt (prop->height);
hints->min_width = cvtINT32toInt (prop->minWidth);
hints->min_height = cvtINT32toInt (prop->minHeight);
hints->max_width = cvtINT32toInt (prop->maxWidth);
hints->max_height = cvtINT32toInt (prop->maxHeight);
hints->width_inc = cvtINT32toInt (prop->widthInc);
hints->height_inc = cvtINT32toInt (prop->heightInc);
hints->min_aspect.x = cvtINT32toInt (prop->minAspectX);
hints->min_aspect.y = cvtINT32toInt (prop->minAspectY);
hints->max_aspect.x = cvtINT32toInt (prop->maxAspectX);
hints->max_aspect.y = cvtINT32toInt (prop->maxAspectY);
*supplied = (USPosition | USSize | PAllHints);
if (nitems >= NumPropSizeElements) {
hints->base_width= cvtINT32toInt (prop->baseWidth);
hints->base_height= cvtINT32toInt (prop->baseHeight);
hints->win_gravity= cvtINT32toInt (prop->winGravity);
*supplied |= (PBaseSize | PWinGravity);
}
hints->flags &= (*supplied); /* get rid of unwanted bits */
Xfree((char *)prop);
return True;
}
Status XGetWMNormalHints (
Display *dpy,
Window w,
XSizeHints *hints,
long *supplied)
{
return (XGetWMSizeHints (dpy, w, hints, supplied, XA_WM_NORMAL_HINTS));
}
|
import { setStyle } from '@/helpers/utils';
import { key } from '../config';
export default {
value() {
this.foreTds.forEach((td) => {
setStyle(td, {
backgroundColor: this[key],
});
});
this.changeList[key].changed = false;
},
};
|
import tensorflow as tf
def process_and_visualize_image(inputs):
# Reshape the inputs tensor
shaped_inputs = tf.reshape(inputs, [-1, 28, 28, 1])
# Create image summary using TensorFlow's summary functionality
summary = tf.summary.image('input', shaped_inputs, 1)
# Initialize a TensorFlow session
sess = tf.Session()
# Create a directory for storing sample data
tf.gfile.MakeDirs(FLAGS.sample_dir) |
<gh_stars>1-10
'use strict';
/**
* egg-dubbo-tracer default config
* @member Config#dubboTracer
* @property {String} SOME_KEY - some description
*/
exports.dubboTracer = {
};
exports.opentracing = {
carrier: {
RPC: require('../lib/carrier/rpc_carrier'),
},
};
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
JoiFrontMatter as Joi, // Custom instance for frontmatter
URISchema,
validateFrontMatter,
FrontMatterTagsSchema,
FrontMatterTOCHeadingLevels,
} from '@docusaurus/utils-validation';
import type {BlogPostFrontMatter} from '@docusaurus/plugin-content-blog';
const BlogPostFrontMatterAuthorSchema = Joi.object({
key: Joi.string(),
name: Joi.string(),
title: Joi.string(),
url: URISchema,
imageURL: Joi.string(),
})
.or('key', 'name')
.rename('image_url', 'imageURL', {alias: true});
const FrontMatterAuthorErrorMessage =
'{{#label}} does not look like a valid blog post author. Please use an author key or an author object (with a key and/or name).';
const BlogFrontMatterSchema = Joi.object<BlogPostFrontMatter>({
id: Joi.string(),
title: Joi.string().allow(''),
description: Joi.string().allow(''),
tags: FrontMatterTagsSchema,
draft: Joi.boolean(),
date: Joi.date().raw(),
// New multi-authors frontmatter:
authors: Joi.alternatives()
.try(
Joi.string(),
BlogPostFrontMatterAuthorSchema,
Joi.array()
.items(Joi.string(), BlogPostFrontMatterAuthorSchema)
.messages({
'array.sparse': FrontMatterAuthorErrorMessage,
'array.includes': FrontMatterAuthorErrorMessage,
}),
)
.messages({
'alternatives.match': FrontMatterAuthorErrorMessage,
}),
// Legacy author frontmatter
author: Joi.string(),
author_title: Joi.string(),
author_url: URISchema,
author_image_url: URISchema,
// TODO enable deprecation warnings later
authorURL: URISchema,
// .warning('deprecate.error', { alternative: '"author_url"'}),
authorTitle: Joi.string(),
// .warning('deprecate.error', { alternative: '"author_title"'}),
authorImageURL: URISchema,
// .warning('deprecate.error', { alternative: '"author_image_url"'}),
slug: Joi.string(),
image: URISchema,
keywords: Joi.array().items(Joi.string().required()),
hide_table_of_contents: Joi.boolean(),
...FrontMatterTOCHeadingLevels,
}).messages({
'deprecate.error':
'{#label} blog frontMatter field is deprecated. Please use {#alternative} instead.',
});
export function validateBlogPostFrontMatter(
frontMatter: Record<string, unknown>,
): BlogPostFrontMatter {
return validateFrontMatter(frontMatter, BlogFrontMatterSchema);
}
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -ex
# Clone openwhisk repo at specified tag to get test suite
git clone -q https://github.com/apache/incubator-openwhisk openwhisk
cd /openwhisk
git checkout $OW_GIT_TAG_OPENWHISK
# compile test suite
./gradlew --console=plain compileTestsScala
# run tests:testSystemBasic
./gradlew --console=plain :tests:testSystemBasic -Dwhisk.auth="$WSK_AUTH" -Dwhisk.server=$WSK_API_HOST_URL -Dopenwhisk.home=/openwhisk
echo "PASSED! Successfully executed tests:testSystemBasic"
|
#!/usr/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
#=================================================
# System Required: CentOS/Debian/Ubuntu
# Description: Lightsocks
# Version: 1.0.1
# Author: Toyo
# Blog: https://doub.io/lightsocks-jc1/
#=================================================
sh_ver="1.0.1"
filepath=$(cd "$(dirname "$0")"; pwd)
file_1=$(echo -e "${filepath}"|awk -F "$0" '{print $1}')
file="/usr/local/lightsocks"
lightsocks_file="/usr/local/lightsocks/lightsocks"
lightsocks_conf=$(echo ${HOME})"/.lightsocks.json"
lightsocks_log="/usr/local/lightsocks/lightsocks.log"
Crontab_file="/usr/bin/crontab"
Green_font_prefix="\033[32m" && Red_font_prefix="\033[31m" && Green_background_prefix="\033[42;37m" && Red_background_prefix="\033[41;37m" && Font_color_suffix="\033[0m"
Info="${Green_font_prefix}[信息]${Font_color_suffix}"
Error="${Red_font_prefix}[错误]${Font_color_suffix}"
Tip="${Green_font_prefix}[注意]${Font_color_suffix}"
check_root(){
[[ $EUID != 0 ]] && echo -e "${Error} 当前非ROOT账号(或没有ROOT权限),无法继续操作,请更换ROOT账号或使用 ${Green_background_prefix}sudo su${Font_color_suffix} 命令获取临时ROOT权限(执行后可能会提示输入当前账号的密码)。" && exit 1
}
#检查系统
check_sys(){
if [[ -f /etc/redhat-release ]]; then
release="centos"
elif cat /etc/issue | grep -q -E -i "debian"; then
release="debian"
elif cat /etc/issue | grep -q -E -i "ubuntu"; then
release="ubuntu"
elif cat /etc/issue | grep -q -E -i "centos|red hat|redhat"; then
release="centos"
elif cat /proc/version | grep -q -E -i "debian"; then
release="debian"
elif cat /proc/version | grep -q -E -i "ubuntu"; then
release="ubuntu"
elif cat /proc/version | grep -q -E -i "centos|red hat|redhat"; then
release="centos"
fi
bit=`uname -m`
}
check_installed_status(){
[[ ! -e ${lightsocks_file} ]] && echo -e "${Error} Lightsocks 没有安装,请检查 !" && exit 1
}
check_crontab_installed_status(){
if [[ ! -e ${Crontab_file} ]]; then
echo -e "${Error} Crontab 没有安装,开始安装..." && exit 1
if [[ ${release} == "centos" ]]; then
yum install crond -y
else
apt-get install cron -y
fi
if [[ ! -e ${Crontab_file} ]]; then
echo -e "${Error} Crontab 安装失败,请检查!" && exit 1
else
echo -e "${Info} Crontab 安装成功!"
fi
fi
}
check_pid(){
PID=`ps -ef| grep "lightsocks"| grep -v "grep" | grep -v "lightsocks.sh"| grep -v "init.d" |grep -v "service" |awk '{print $2}'`
}
check_new_ver(){
lightsocks_new_ver=$(wget --no-check-certificate -qO- https://github.com/gwuhaolin/lightsocks/releases/latest | grep "<title>" | sed -r 's/.*Release (.+) · gwuhaolin.*/\1/')
if [[ -z ${lightsocks_new_ver} ]]; then
echo -e "${Error} Lightsocks 最新版本获取失败,请手动获取最新版本号[ https://github.com/gwuhaolin/lightsocks/releases/latest ]"
read -e -p "请输入版本号 [ 格式是日期 , 如 1.0.6 ] :" lightsocks_new_ver
[[ -z "${lightsocks_new_ver}" ]] && echo "取消..." && exit 1
else
echo -e "${Info} 检测到 Lightsocks 最新版本为 [ ${lightsocks_new_ver} ]"
fi
}
check_ver_comparison(){
check_pid
[[ ! -z $PID ]] && kill -9 ${PID}
rm -rf ${lightsocks_file}
Download_lightsocks
Start_lightsocks
}
Download_lightsocks(){
cd ${file}
if [ ${bit} == "x86_64" ]; then
wget --no-check-certificate -N "https://github.com/gwuhaolin/lightsocks/releases/download/${lightsocks_new_ver}/lightsocks_${lightsocks_new_ver}_linux_amd64.tar.gz"
mv "lightsocks_${lightsocks_new_ver}_linux_amd64.tar.gz" "lightsocks_linux.tar.gz"
else
wget --no-check-certificate -N "https://github.com/gwuhaolin/lightsocks/releases/download/${lightsocks_new_ver}/lightsocks_${lightsocks_new_ver}_linux_386.tar.gz"
mv "lightsocks_${lightsocks_new_ver}_linux_386.tar.gz" "lightsocks_linux.tar.gz"
fi
[[ ! -s "lightsocks_linux.tar.gz" ]] && echo -e "${Error} Lightsocks 压缩包下载失败 !" && rm -rf "${file}" && exit 1
tar -xzf "lightsocks_linux.tar.gz"
rm -rf lightsocks_linux.tar.gz
[[ ! -e "lightsocks-server" ]] && echo -e "${Error} Lightsocks 压缩包解压失败 !" && rm -rf "${file}" && exit 1
mv lightsocks-server lightsocks
chmod +x lightsocks
rm -rf lightsocks-local
rm -rf LICENSE
rm -rf readme.md
}
Service_lightsocks(){
if [[ ${release} = "centos" ]]; then
if ! wget --no-check-certificate "https://raw.githubusercontent.com/ToyoDAdoubi/doubi/master/service/lightsocks_centos" -O /etc/init.d/lightsocks; then
echo -e "${Error} Lightsocks服务 管理脚本下载失败 !" && rm -rf "${file}" && exit 1
fi
chmod +x "/etc/init.d/lightsocks"
chkconfig --add lightsocks
chkconfig lightsocks on
else
if ! wget --no-check-certificate "https://raw.githubusercontent.com/ToyoDAdoubi/doubi/master/service/lightsocks_debian" -O /etc/init.d/lightsocks; then
echo -e "${Error} Lightsocks服务 管理脚本下载失败 !" && rm -rf "${file}" && exit 1
fi
chmod +x "/etc/init.d/lightsocks"
update-rc.d -f lightsocks defaults
fi
echo -e "${Info} Lightsocks服务 管理脚本下载完成 !"
}
Installation_dependency(){
if [[ ${release} == "centos" ]]; then
Centos_yum
else
Debian_apt
fi
cp -f /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
mkdir ${file}
}
Centos_yum(){
cat /etc/redhat-release |grep 7\..*|grep -i centos>/dev/null
if [[ $? = 0 ]]; then
yum update
yum install -y net-tools
fi
}
Debian_apt(){
cat /etc/issue |grep 9\..*>/dev/null
if [[ $? = 0 ]]; then
apt-get update
apt-get install -y net-tools
fi
}
Generate_the_port(){
min=$1
max=$(($2-$min+1))
num=$(date +%s%N)
echo $(($num%$max+$min))
}
Write_config(){
cat > ${lightsocks_conf}<<-EOF
{
"listen": ":${ls_port}",
"remote": ""
}
EOF
}
Read_config(){
[[ ! -e ${lightsocks_conf} ]] && echo -e "${Error} Lightsocks 配置文件不存在 !" && exit 1
user_all=$(cat ${lightsocks_conf}|sed "1d;$d")
[[ -z ${user_all} ]] && echo -e "${Error} Lightsocks 配置文件中用户配置为空 !" && exit 1
port=$(echo "${user_all}"|grep "listen"|awk -F ': ' '{print $NF}'|sed 's/\"//g;s/://g;s/,//g')
password=$(echo "${user_all}"|grep "password"|awk -F ': ' '{print $NF}'|sed 's/\"//g')
}
Set_port(){
while true
do
echo -e "请输入 Lightsocks 端口 [1-65535](端口不能重复,避免冲突)"
read -e -p "(默认: 随机端口):" ls_port
[[ -z "${ls_port}" ]] && ls_port=$(Generate_the_port 443 65500)
echo $((${ls_port}+0)) &>/dev/null
if [[ $? -eq 0 ]]; then
if [[ ${ls_port} -ge 1 ]] && [[ ${ls_port} -le 65535 ]]; then
echo && echo "========================"
echo -e " 端口 : ${Red_background_prefix} ${ls_port} ${Font_color_suffix}"
echo "========================" && echo
break
else
echo "输入错误, 请输入正确的端口。"
fi
else
echo "输入错误, 请输入正确的端口。"
fi
done
}
Set_lightsocks(){
check_installed_status
echo && echo -e "你要做什么?
${Green_font_prefix}1.${Font_color_suffix} 修改 端口配置
${Green_font_prefix}2.${Font_color_suffix} 修改 密码配置
————————————————
${Green_font_prefix}3.${Font_color_suffix} 监控 运行状态
${Tip} 因为 Lightsocks 限制,所以密码只能自动生成 !" && echo
read -e -p "(默认: 取消):" ls_modify
[[ -z "${ls_modify}" ]] && echo "已取消..." && exit 1
if [[ ${ls_modify} == "1" ]]; then
Modify_user "port"
elif [[ ${ls_modify} == "2" ]]; then
Modify_user "password"
elif [[ ${ls_modify} == "3" ]]; then
Set_crontab_monitor_lightsocks
else
echo -e "${Error} 请输入正确的数字(1-2)" && exit 1
fi
}
Modify_user(){
Read_config
Modify_user_type=$1
if [[ ${Modify_user_type} == "port" ]]; then
Set_port
Modify_config_port
Del_iptables
Add_iptables
Save_iptables
else
Modify_config_password
fi
Restart_lightsocks
}
Modify_config_port(){
sed -i 's/"listen": ":'"$(echo ${port})"'"/"listen": ":'"$(echo ${ls_port})"'"/g' ${lightsocks_conf}
}
Modify_config_password(){
Read_config
password_num=$(cat "${lightsocks_conf}"|grep -n '"password":'|awk -F ':' '{print $1}')
if [[ ${password_num} -gt 0 ]];then
sed -i "${password_num}d" ${lightsocks_conf}
password_num_1=$(echo $((${password_num}-1)))
sed -i "${password_num_1}s/,//g" ${lightsocks_conf}
else
echo -e "${Error} 配置文件修改错误!"
fi
}
Install_lightsocks(){
check_root
[[ -e ${lightsocks_file} ]] && echo -e "${Error} 检测到 Lightsocks 已安装 !" && exit 1
echo -e "${Info} 开始设置 用户配置..."
Set_port
echo -e "${Info} 开始安装/配置 依赖..."
Installation_dependency
echo -e "${Info} 开始检测最新版本..."
check_new_ver
echo -e "${Info} 开始下载/安装..."
Download_lightsocks
echo -e "${Info} 开始下载/安装 服务脚本(init)..."
Service_lightsocks
echo -e "${Info} 开始写入 配置文件..."
Write_config
echo -e "${Info} 开始设置 iptables防火墙..."
Set_iptables
echo -e "${Info} 开始添加 iptables防火墙规则..."
Add_iptables
echo -e "${Info} 开始保存 iptables防火墙规则..."
Save_iptables
echo -e "${Info} 所有步骤 安装完毕,开始启动..."
Start_lightsocks
}
Start_lightsocks(){
check_installed_status
check_pid
[[ ! -z ${PID} ]] && echo -e "${Error} Lightsocks 正在运行,请检查 !" && exit 1
/etc/init.d/lightsocks start
sleep 1s
check_pid
[[ ! -z ${PID} ]] && View_lightsocks
}
Stop_lightsocks(){
check_installed_status
check_pid
[[ -z ${PID} ]] && echo -e "${Error} Lightsocks 没有运行,请检查 !" && exit 1
/etc/init.d/lightsocks stop
}
Restart_lightsocks(){
check_installed_status
check_pid
[[ ! -z ${PID} ]] && /etc/init.d/lightsocks stop
/etc/init.d/lightsocks start
sleep 1s
check_pid
[[ ! -z ${PID} ]] && View_lightsocks
}
Update_lightsocks(){
check_installed_status
check_new_ver
check_ver_comparison
}
Uninstall_lightsocks(){
check_installed_status
echo "确定要卸载 Lightsocks ? (y/N)"
echo
read -e -p "(默认: n):" unyn
[[ -z ${unyn} ]] && unyn="n"
if [[ ${unyn} == [Yy] ]]; then
check_pid
[[ ! -z $PID ]] && kill -9 ${PID}
if [[ -e ${lightsocks_conf} ]]; then
Read_config
Del_iptables
Save_iptables
rm -rf "${lightsocks_conf}"
fi
rm -rf "${file}"
if [[ ${release} = "centos" ]]; then
chkconfig --del lightsocks
else
update-rc.d -f lightsocks remove
fi
rm -rf "/etc/init.d/lightsocks"
echo && echo "Lightsocks 卸载完成 !" && echo
else
echo && echo "卸载已取消..." && echo
fi
}
View_lightsocks(){
check_installed_status
Read_config
ip=$(wget -qO- -t1 -T2 ipinfo.io/ip)
if [[ -z "${ip}" ]]; then
ip=$(wget -qO- -t1 -T2 api.ip.sb/ip)
if [[ -z "${ip}" ]]; then
ip=$(wget -qO- -t1 -T2 members.3322.org/dyndns/getip)
if [[ -z "${ip}" ]]; then
ip="VPS_IP"
fi
fi
fi
clear && echo
echo -e "Lightsocks 用户配置:"
lightsocks_link
echo -e "————————————————"
echo -e " 地址\t: ${Green_font_prefix}${ip}${Font_color_suffix}"
echo -e " 端口\t: ${Green_font_prefix}${port}${Font_color_suffix}"
echo -e " 密码\t: ${Green_font_prefix}${password}${Font_color_suffix}"
echo -e "${Lightsocks_link_1}"
echo
echo -e "${Tip} Lightsocks链接 仅适用于Windows系统的 Lightsocks Tools客户端[https://doub.io/dbrj-12/]。"
echo
}
urlsafe_base64(){
date=$(echo -n "$1"|base64|sed ':a;N;s/\n//g;s/=//g;s/+/-/g;s/\//_/g;ta')
echo -e "${date}"
}
lightsocks_link(){
Lightsocks_URL_1=$(urlsafe_base64 "${ip}:${port}:${password}")
Lightsocks_URL="lightsocks://${Lightsocks_URL_1}"
Lightsocks_QRcode="http://doub.pw/qr/qr.php?text=${Lightsocks_URL}"
Lightsocks_link_1=" 链接\t: ${Red_font_prefix}${Lightsocks_URL}${Font_color_suffix} \n 二维码 : ${Red_font_prefix}${Lightsocks_QRcode}${Font_color_suffix} \n "
}
View_Log(){
check_installed_status
[[ ! -e ${lightsocks_log} ]] && echo -e "${Error} Lightsocks 日志文件不存在 !" && exit 1
echo && echo -e "${Tip} 按 ${Red_font_prefix}Ctrl+C${Font_color_suffix} 终止查看日志(正常情况下是没有多少日志输出的)" && echo -e "如果需要查看完整日志内容,请用 ${Red_font_prefix}cat ${lightsocks_log}${Font_color_suffix} 命令。" && echo
tail -f ${lightsocks_log}
}
# 显示 连接信息
debian_View_user_connection_info(){
format_1=$1
Read_config
IP_total=`netstat -anp |grep 'ESTABLISHED' |grep 'lightsocks' |grep 'tcp6' |awk '{print $5}' |awk -F ":" '{print $1}' |sort -u |grep -E -o "([0-9]{1,3}[\.]){3}[0-9]{1,3}" |wc -l`
echo -e "链接IP总数: ${Green_background_prefix} "${IP_total}" ${Font_color_suffix} "
user_IP_1=`netstat -anp |grep 'ESTABLISHED' |grep 'lightsocks' |grep 'tcp6' |grep ":${port} " |awk '{print $5}' |awk -F ":" '{print $1}' |sort -u |grep -E -o "([0-9]{1,3}[\.]){3}[0-9]{1,3}"`
if [[ -z ${user_IP_1} ]]; then
user_IP_total="0"
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: "
else
user_IP_total=`echo -e "${user_IP_1}"|wc -l`
if [[ ${format_1} == "IP_address" ]]; then
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: "
get_IP_address
echo
else
user_IP=$(echo -e "\n${user_IP_1}")
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: ${Green_font_prefix}${user_IP}${Font_color_suffix}\n"
fi
fi
}
centos_View_user_connection_info(){
format_1=$1
Read_config
IP_total=`netstat -anp |grep 'ESTABLISHED' |grep 'lightsocks' |grep 'tcp' | grep '::ffff:' |awk '{print $5}' |awk -F ":" '{print $4}' |sort -u |grep -E -o "([0-9]{1,3}[\.]){3}[0-9]{1,3}" |wc -l`
echo -e "链接IP总数: ${Green_background_prefix} "${IP_total}" ${Font_color_suffix} "
user_IP_1=`netstat -anp |grep 'ESTABLISHED' |grep 'lightsocks' |grep 'tcp' |grep ":${port} "|grep '::ffff:' |awk '{print $5}' |awk -F ":" '{print $4}' |sort -u |grep -E -o "([0-9]{1,3}[\.]){3}[0-9]{1,3}"`
if [[ -z ${user_IP_1} ]]; then
user_IP_total="0"
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: "
else
user_IP_total=`echo -e "${user_IP_1}"|wc -l`
if [[ ${format_1} == "IP_address" ]]; then
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: "
get_IP_address
echo
else
user_IP=$(echo -e "\n${user_IP_1}")
echo -e "端口: ${Green_font_prefix}"${port}"${Font_color_suffix}\t 链接IP总数: ${Green_font_prefix}"${user_IP_total}"${Font_color_suffix}\t 当前链接IP: ${Green_font_prefix}${user_IP}${Font_color_suffix}\n"
fi
fi
}
View_user_connection_info(){
check_installed_status
echo && echo -e "请选择要显示的格式:
${Green_font_prefix}1.${Font_color_suffix} 显示 IP 格式
${Green_font_prefix}2.${Font_color_suffix} 显示 IP+IP归属地 格式" && echo
read -e -p "(默认: 1):" lightsocks_connection_info
[[ -z "${lightsocks_connection_info}" ]] && lightsocks_connection_info="1"
if [[ "${lightsocks_connection_info}" == "1" ]]; then
View_user_connection_info_1 ""
elif [[ "${lightsocks_connection_info}" == "2" ]]; then
echo -e "${Tip} 检测IP归属地(ipip.net),如果IP较多,可能时间会比较长..."
View_user_connection_info_1 "IP_address"
else
echo -e "${Error} 请输入正确的数字(1-2)" && exit 1
fi
}
View_user_connection_info_1(){
format=$1
if [[ ${release} = "centos" ]]; then
cat /etc/redhat-release |grep 7\..*|grep -i centos>/dev/null
if [[ $? = 0 ]]; then
debian_View_user_connection_info "$format"
else
centos_View_user_connection_info "$format"
fi
else
debian_View_user_connection_info "$format"
fi
}
get_IP_address(){
#echo "user_IP_1=${user_IP_1}"
if [[ ! -z ${user_IP_1} ]]; then
#echo "user_IP_total=${user_IP_total}"
for((integer_1 = ${user_IP_total}; integer_1 >= 1; integer_1--))
do
IP=$(echo "${user_IP_1}" |sed -n "$integer_1"p)
#echo "IP=${IP}"
IP_address=$(wget -qO- -t1 -T2 http://freeapi.ipip.net/${IP}|sed 's/\"//g;s/,//g;s/\[//g;s/\]//g')
#echo "IP_address=${IP_address}"
#user_IP="${user_IP}\n${IP}(${IP_address})"
echo -e "${Green_font_prefix}${IP}${Font_color_suffix} (${IP_address})"
#echo "user_IP=${user_IP}"
sleep 1s
done
fi
}
Set_crontab_monitor_lightsocks(){
check_crontab_installed_status
crontab_monitor_lightsocks_status=$(crontab -l|grep "lightsocks.sh monitor")
if [[ -z "${crontab_monitor_lightsocks_status}" ]]; then
echo && echo -e "当前监控模式: ${Green_font_prefix}未开启${Font_color_suffix}" && echo
echo -e "确定要开启 ${Green_font_prefix}Lightsocks 服务端运行状态监控${Font_color_suffix} 功能吗?(当进程关闭则自动启动SSR服务端)[Y/n]"
read -e -p "(默认: y):" crontab_monitor_lightsocks_status_ny
[[ -z "${crontab_monitor_lightsocks_status_ny}" ]] && crontab_monitor_lightsocks_status_ny="y"
if [[ ${crontab_monitor_lightsocks_status_ny} == [Yy] ]]; then
crontab_monitor_lightsocks_cron_start
else
echo && echo " 已取消..." && echo
fi
else
echo && echo -e "当前监控模式: ${Green_font_prefix}已开启${Font_color_suffix}" && echo
echo -e "确定要关闭 ${Green_font_prefix}Lightsocks 服务端运行状态监控${Font_color_suffix} 功能吗?(当进程关闭则自动启动SSR服务端)[y/N]"
read -e -p "(默认: n):" crontab_monitor_lightsocks_status_ny
[[ -z "${crontab_monitor_lightsocks_status_ny}" ]] && crontab_monitor_lightsocks_status_ny="n"
if [[ ${crontab_monitor_lightsocks_status_ny} == [Yy] ]]; then
crontab_monitor_lightsocks_cron_stop
else
echo && echo " 已取消..." && echo
fi
fi
}
crontab_monitor_lightsocks_cron_start(){
crontab -l > "$file_1/crontab.bak"
sed -i "/lightsocks.sh monitor/d" "$file_1/crontab.bak"
echo -e "\n* * * * * /bin/bash $file_1/lightsocks.sh monitor" >> "$file_1/crontab.bak"
crontab "$file_1/crontab.bak"
rm -r "$file_1/crontab.bak"
cron_config=$(crontab -l | grep "lightsocks.sh monitor")
if [[ -z ${cron_config} ]]; then
echo -e "${Error} Lightsocks 服务端运行状态监控功能 启动失败 !" && exit 1
else
echo -e "${Info} Lightsocks 服务端运行状态监控功能 启动成功 !"
fi
}
crontab_monitor_lightsocks_cron_stop(){
crontab -l > "$file_1/crontab.bak"
sed -i "/lightsocks.sh monitor/d" "$file_1/crontab.bak"
crontab "$file_1/crontab.bak"
rm -r "$file_1/crontab.bak"
cron_config=$(crontab -l | grep "lightsocks.sh monitor")
if [[ ! -z ${cron_config} ]]; then
echo -e "${Error} Lightsocks 服务端运行状态监控功能 停止失败 !" && exit 1
else
echo -e "${Info} Lightsocks 服务端运行状态监控功能 停止成功 !"
fi
}
crontab_monitor_lightsocks(){
check_installed_status
check_pid
echo "${PID}"
if [[ -z ${PID} ]]; then
echo -e "${Error} [$(date "+%Y-%m-%d %H:%M:%S %u %Z")] 检测到 Lightsocks服务端 未运行 , 开始启动..." | tee -a ${lightsocks_log}
/etc/init.d/lightsocks start
sleep 1s
check_pid
if [[ -z ${PID} ]]; then
echo -e "${Error} [$(date "+%Y-%m-%d %H:%M:%S %u %Z")] Lightsocks服务端 启动失败..." | tee -a ${lightsocks_log}
else
echo -e "${Info} [$(date "+%Y-%m-%d %H:%M:%S %u %Z")] Lightsocks服务端 启动成功..." | tee -a ${lightsocks_log}
fi
else
echo -e "${Info} [$(date "+%Y-%m-%d %H:%M:%S %u %Z")] Lightsocks服务端 进程运行正常..." | tee -a ${lightsocks_log}
fi
}
Add_iptables(){
iptables -I INPUT -m state --state NEW -m tcp -p tcp --dport ${ls_port} -j ACCEPT
iptables -I INPUT -m state --state NEW -m udp -p udp --dport ${ls_port} -j ACCEPT
}
Del_iptables(){
iptables -D INPUT -m state --state NEW -m tcp -p tcp --dport ${port} -j ACCEPT
iptables -D INPUT -m state --state NEW -m udp -p udp --dport ${port} -j ACCEPT
}
Save_iptables(){
if [[ ${release} == "centos" ]]; then
service iptables save
else
iptables-save > /etc/iptables.up.rules
fi
}
Set_iptables(){
if [[ ${release} == "centos" ]]; then
service iptables save
chkconfig --level 2345 iptables on
else
iptables-save > /etc/iptables.up.rules
echo -e '#!/bin/bash\n/sbin/iptables-restore < /etc/iptables.up.rules' > /etc/network/if-pre-up.d/iptables
chmod +x /etc/network/if-pre-up.d/iptables
fi
}
Update_Shell(){
sh_new_ver=$(wget --no-check-certificate -qO- -t1 -T3 "https://raw.githubusercontent.com/ToyoDAdoubi/doubi/master/lightsocks.sh"|grep 'sh_ver="'|awk -F "=" '{print $NF}'|sed 's/\"//g'|head -1) && sh_new_type="github"
[[ -z ${sh_new_ver} ]] && echo -e "${Error} 无法链接到 Github !" && exit 0
if [[ -e "/etc/init.d/lightsocks" ]]; then
rm -rf /etc/init.d/lightsocks
Service_lightsocks
fi
wget -N --no-check-certificate "https://raw.githubusercontent.com/ToyoDAdoubi/doubi/master/lightsocks.sh" && chmod +x lightsocks.sh
echo -e "脚本已更新为最新版本[ ${sh_new_ver} ] !(注意:因为更新方式为直接覆盖当前运行的脚本,所以可能下面会提示一些报错,无视即可)" && exit 0
}
check_sys
action=$1
if [[ "${action}" == "monitor" ]]; then
crontab_monitor_lightsocks
else
echo && echo -e " Lightsocks 一键管理脚本 ${Red_font_prefix}[v${sh_ver}]${Font_color_suffix}
---- Toyo | doub.io/lightsocks-jc1 ----
${Green_font_prefix} 0.${Font_color_suffix} 升级脚本
————————————
${Green_font_prefix} 1.${Font_color_suffix} 安装 Lightsocks
${Green_font_prefix} 2.${Font_color_suffix} 升级 Lightsocks
${Green_font_prefix} 3.${Font_color_suffix} 卸载 Lightsocks
————————————
${Green_font_prefix} 4.${Font_color_suffix} 启动 Lightsocks
${Green_font_prefix} 5.${Font_color_suffix} 停止 Lightsocks
${Green_font_prefix} 6.${Font_color_suffix} 重启 Lightsocks
————————————
${Green_font_prefix} 7.${Font_color_suffix} 设置 账号配置
${Green_font_prefix} 8.${Font_color_suffix} 查看 账号信息
${Green_font_prefix} 9.${Font_color_suffix} 查看 日志信息
${Green_font_prefix}10.${Font_color_suffix} 查看 链接信息
————————————" && echo
if [[ -e ${lightsocks_file} ]]; then
check_pid
if [[ ! -z "${PID}" ]]; then
echo -e " 当前状态: ${Green_font_prefix}已安装${Font_color_suffix} 并 ${Green_font_prefix}已启动${Font_color_suffix}"
else
echo -e " 当前状态: ${Green_font_prefix}已安装${Font_color_suffix} 但 ${Red_font_prefix}未启动${Font_color_suffix}"
fi
else
echo -e " 当前状态: ${Red_font_prefix}未安装${Font_color_suffix}"
fi
echo
read -e -p " 请输入数字 [0-10]:" num
case "$num" in
0)
Update_Shell
;;
1)
Install_lightsocks
;;
2)
Update_lightsocks
;;
3)
Uninstall_lightsocks
;;
4)
Start_lightsocks
;;
5)
Stop_lightsocks
;;
6)
Restart_lightsocks
;;
7)
Set_lightsocks
;;
8)
View_lightsocks
;;
9)
View_Log
;;
10)
View_user_connection_info
;;
*)
echo "请输入正确数字 [0-10]"
;;
esac
fi |
#!/bin/bash
# Copyright (c) 2021 Intel Corporation.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
source /opt/intel/openvino/bin/setupvars.sh
python3 main.py
|
package br.com.controle.financeiro.controller.api.linkbuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.hateoas.EntityModel;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import br.com.controle.financeiro.model.dto.CardDTO;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { CardDTOResourceAssembler.class })
@AutoConfigureMockMvc
@ActiveProfiles(profiles = "test")
public class CardDTOResourceAssemblerTest {
@InjectMocks
private CardDTOResourceAssembler resourceAssembler;
@Test
public void testToResource() {
CardDTO cardMock = new CardDTO();
cardMock.setId(UUID.randomUUID());
cardMock.setName("mock");
EntityModel<CardDTO> response = resourceAssembler.toModel(cardMock);
assertTrue(response.hasLinks());
assertNotNull(response.getLink("self"));
assertNotNull(response.getLink("cards"));
assertEquals(cardMock, response.getContent());
}
}
|
#!/usr/bin/env bash
#
# Copyright (C) 2011-2018 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
# Removing the SDK folder
rm -fr /opt/intel/sgxsdk 2> /dev/null
if [ $? -ne 0 ]; then
echo "Superuser privilege is required."
exit 1
fi
echo "Intel(R) SGX SDK uninstalled."
|
<reponame>pyohei/rirakkuma-crawller
#!/usr/local/bin/python
# -*- coding: utf-8 -*-
""" Tweet rirrakuma 4kuma submit.
"""
import tweepy
TWEET_CONTENT = (
"リラックマの4クマ漫画が更新されました!\n"
"http://www.shufu.co.jp/contents/4kuma/"
)
consumer_key = ""
consumer_secret = ""
access_key = ""
access_secret = ""
def main():
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_key, access_secret)
api = tweepy.API(auth_handler=auth)
api.update_status(status=TWEET_CONTENT)
if __name__ == '__main__':
main()
|
<reponame>emegson/financasapi
package br.com.controle.financeiro.controller.api.linkbuilder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.UUID;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.hateoas.EntityModel;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import br.com.controle.financeiro.model.dto.InstitutionDTO;
@RunWith(SpringRunner.class)
@SpringBootTest(classes = { InstitutionDTOResourceAssembler.class })
@AutoConfigureMockMvc
@ActiveProfiles(profiles = "test")
public class InstitutionDTOResourceAssemblerTest {
@InjectMocks
private InstitutionDTOResourceAssembler resourceAssembler;
@Test
public void testToResource() {
InstitutionDTO institutionMock = new InstitutionDTO();
institutionMock.setId(UUID.randomUUID());
institutionMock.setName("mock");
EntityModel<InstitutionDTO> response = resourceAssembler.toModel(institutionMock);
assertTrue(response.hasLinks());
assertNotNull(response.getLink("self"));
assertNotNull(response.getLink("institutions"));
assertEquals(institutionMock, response.getContent());
}
}
|
import requests
from bs4 import BeautifulSoup
def get_page_content(url):
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
return soup
# Get the title of the page
def get_page_title(soup):
return soup.find('title').text
# Get all the text of the page
def get_page_text(soup):
text = ''
for p in soup.find_all('p'):
text += p.text
return text
# Get the contents of the div with id 'content'
def get_content(soup):
return soup.find('div', {'id': 'content'}).text
url = 'https://example.com'
soup = get_page_content(url)
title = get_page_title(soup)
text = get_page_text(soup)
content = get_content(soup)
print(title)
print(text)
print(content) |
<reponame>z184924/zjyRDF-RPC<filename>system-facade/src/main/java/cn/zhangjingyao/util/toekn/TokenPool.java
package cn.zhangjingyao.util.toekn;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Set;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
/**
* @author
*/
public class TokenPool extends ConcurrentHashMap<String,Token> {
private static final long serialVersionUID = 1L;
private static TokenPool tokenPool;
private TokenPool() {}
public static TokenPool getInstance() {
if(tokenPool==null) {
tokenPool=new TokenPool();
cleanPoolTask();
}
return tokenPool;
}
public boolean addToken(Token token) {
tokenPool.remove(token.getToken());
tokenPool.put(token.getToken(), token);
return true;
}
public Token getToken(String tokenStr) {
return tokenPool.get(tokenStr);
}
public boolean flushToken(String tokenStr) {
Token token = tokenPool.getToken(tokenStr);
token.flushExpiryTime();
tokenPool.remove(tokenStr);
tokenPool.addToken(token);
return true;
}
private static boolean cleanPoolTask() {
ScheduledExecutorService scheduledExecutorService = new ScheduledThreadPoolExecutor(1);
TimerTask task = new TimerTask() {
@Override
public void run() {
cleanPool();
}
};
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
try {
Date startTime=simpleDateFormat.parse("2017-12-01 00:00:00");
scheduledExecutorService.scheduleAtFixedRate(task,startTime.getTime(), 24*60*60*1000, TimeUnit.MILLISECONDS);
}catch (Exception e) {
System.out.println("TokenPool定时清理任务设置出现异常");
return false;
}
return true;
}
public static boolean cleanPool() {
if(tokenPool!=null) {
System.out.println("TokenPool Clean Start");
Set<String> keySet = tokenPool.keySet();
for (String tokenStr : keySet) {
Token token = tokenPool.get(tokenStr);
if(token.isExpiry()) {
tokenPool.remove(tokenStr);
}
}
}
return true;
}
}
|
class Api::V1::GetProviderSubscriptionsController < ApplicationController
skip_before_filter :verify_authenticity_token,
:if => Proc.new { |c| c.request.format == 'application/json' }
# Just skip the authentication for now
# before_filter :authenticate_user!
respond_to :json
def create
provider_subscriptions = SubscriptionType.where(:provider_id => params[:provider_id])
render :status => 200,
:json => { :success => true,
:info => "Prvider Subscriptions",
:data => {
"provider subscriptions" => provider_subscriptions}
}
end
end |
const responseStatus = require("../config/responseStatusConfig");
function errorHandler(error, req, res, next) {
switch (error) {
case responseStatus.badRequest:
res.status(responseStatus.badRequest).json({
statusCode: error,
message: "Required fields cannot be blank.",
});
break;
case responseStatus.notFound:
res
.status(responseStatus.notFound)
.json({ statusCode: error, message: "This resource does not exist." });
break;
case responseStatus.serverError:
res.status(responseStatus.serverError).json({
statusCode: error,
message: `The request could not be completed. Please try again.`,
});
break;
case responseStatus.badCredentials:
res.status(responseStatus.badCredentials).json({
statusCode: error,
message: "Incorrect credentials. Please try again.",
});
case responseStatus.forbiddenAccess:
res.status(responseStatus.forbiddenAccess).json({
statusCode: error,
message: "You are not authorized to view this content.",
});
default:
res.json({ statusCode: error, message: error.message });
}
next();
}
module.exports = errorHandler;
|
import React, { useState } from "react";
const App = () => {
const [inputValue, setInputValue] = useState("");
const [outputValue, setOutputValue] = useState("");
const handleChange = (e) => {
setInputValue(e.target.value);
};
const handleClick = () => {
const numbers = inputValue.split(",").map(Number);
const multiplied = numbers.reduce((acc, curr) => acc * curr, 1);
setOutputValue(multiplied);
};
return (
<div>
<input value={inputValue} onChange={handleChange} />
<button onClick={handleClick}>Multiply</button>
<div>Result: {outputValue}</div>
</div>
);
};
export default App; |
sameList :: [Int] -> [Int] -> Bool
sameList list1 list2 = list1 == list2
main :: IO ()
main = do
let list1 = [1,2,3]
let list2 = [1,2,3]
let result = sameList list1 list2
print result |
<gh_stars>0
package com.example.videly.authentication;
import com.example.videly.video.Video;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.hibernate.annotations.GenericGenerator;
import javax.persistence.*;
import java.util.Set;
@Data
@Entity
@Table(name = "users")
@NoArgsConstructor
public class User {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "native")
@GenericGenerator(name = "native", strategy = "native")
@Column(name = "id", nullable = false, updatable = false)
private Long id;
@Column(name = "username", nullable = false)
private String username;
@Column(name = "password", nullable = false)
private String password;
@Column(name = "email", nullable = false)
private String email;
@Column(name = "is_account_non_expired", nullable = false)
private boolean isAccountNonExpired;
@Column(name = "is_account_non_locked", nullable = false)
private boolean isAccountNonLocked;
@Column(name = "is_credentials_non_expired", nullable = false)
private boolean isCredentialsNonExpired;
@Column(name = "is_enabled", nullable = false)
private boolean isEnabled;
@ManyToMany
@JoinTable(name = "users_roles", joinColumns = @JoinColumn(name = "user_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "role_id", referencedColumnName = "id"))
private Set<Role> roles;
@ManyToMany
@JoinTable(name = "users_videos", joinColumns = @JoinColumn(name = "user_id", referencedColumnName = "id"),
inverseJoinColumns = @JoinColumn(name = "video_id", referencedColumnName = "id")
)
private Set<Video> videos;
public User(String username, String password) {
this.username = username;
this.password = password;
}
public User(Long id,
String username,
String password,
String email,
boolean isAccountNonExpired,
boolean isAccountNonLocked,
boolean isCredentialsNonExpired,
boolean isEnabled) {
this.email = email;
this.id = id;
this.username = username;
this.password = password;
this.isAccountNonExpired = isAccountNonExpired;
this.isAccountNonLocked = isAccountNonLocked;
this.isCredentialsNonExpired = isCredentialsNonExpired;
this.isEnabled = isEnabled;
}
public User(String username,
String password,
String email,
boolean isAccountNonExpired,
boolean isAccountNonLocked,
boolean isCredentialsNonExpired,
boolean isEnabled,
Set<Role> roles) {
this.email = email;
this.username = username;
this.password = password;
this.isAccountNonExpired = isAccountNonExpired;
this.isAccountNonLocked = isAccountNonLocked;
this.isCredentialsNonExpired = isCredentialsNonExpired;
this.isEnabled = isEnabled;
this.roles = roles;
}
public User(String username,
String password,
String email,
boolean isAccountNonExpired,
boolean isAccountNonLocked,
boolean isCredentialsNonExpired,
boolean isEnabled) {
this(
username,
password,
email,
isAccountNonExpired,
isAccountNonLocked,
isCredentialsNonExpired,
isEnabled,
null);
}
}
|
#!/bin/sh
#
# Build the testcase
#
# arg 1 : WSDL file
# arg 2 : language (c/c++)
mkdir -p $OUTPUT_DIR
SERVICE_HOST=$(echo $(basename $1 .wsdl):host=)
SERVICE_PORT=$(echo $(basename $1 .wsdl):port=)
echo $SERVICE_HOST > service_host
echo $SERVICE_PORT > service_port
URI=
# If a config file exists then alter the endpoint to use the config data
if [ -f "$SERVICE_CONFIG" ]
then
# Extract the soap address from the WSDL so we can alter the endpoint for the tests, i.e.
# point to a different server and/or port
URI=$(grep -F soap:address $1 | cut -d\" -f2)
CONTEXT=$(echo $URI | cut -d\" -f2 | cut -d/ -f4-)
URI_HOST=$(grep -f "service_host" $SERVICE_CONFIG | cut -d= -f2)
URI_PORT=$(grep -f "service_port" $SERVICE_CONFIG | cut -d= -f2)
#URI_HOST=$(grep -E "^host=" $SERVICE_CONFIG | cut -d= -f2)
#URI_PORT=$(grep -E "^port=" $SERVICE_CONFIG | cut -d= -f2)
if [ -n "$URI_HOST" -a -n "$URI_PORT" ]
then
#echo "WSDL URI = $URI"
NEW_URI="http://$URI_HOST:$URI_PORT/$CONTEXT"
URI=$NEW_URI
#echo "modified = $URI"
else
URI=
fi
fi
TARGET=$(echo $(basename $1 .wsdl).$2 | tr '+' 'p')_client
OUTPUT=$(echo $(basename $1 .wsdl).out)
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Running tests in $TARGET"
echo " Endpoint <${URI:-default}>"
if [ ! -f output/$OUTPUT ]
then
OUTPUT=$(echo $(basename $1 .wsdl).$2.out | tr '+' 'p')
fi
rm -rf $OUTPUT_DIR/$TARGET
# Set the exist status. Assume the worst
status=1
# Build the test client, including the generation of the stubs
buildTestCase.sh $* >>$OUTPUT_DIR/buildTestCase.log 2>&1
if [ ! -f $OUTPUT_DIR/$TARGET/client ]
then
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Regression test on $TARGET: BUILD FAILED" |tee -a $OUTPUT_DIR/runTestCase.log
else
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Regression test on $TARGET: BUILD SUCCESS" |tee -a $OUTPUT_DIR/buildTestCase.log
export LD_LIBRARY_PATH=$AXISCPP_HOME_BIN:$LIB_XERCES_BIN:$LD_LIBRARY_PATH
# Pass in the URI if it has been set.
$OUTPUT_DIR/$TARGET/client $URI > $OUTPUT_DIR/$TARGET/$OUTPUT
if [ -f $OUTPUT_DIR/$TARGET/$OUTPUT ]
then
diff -q testcases/output/$OUTPUT $OUTPUT_DIR/$TARGET 2>/dev/null
if [ $? -eq 0 ]
then
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Regression test on $TARGET: RUN SUCCESS" |tee -a $OUTPUT_DIR/runTestCase.log
status=0
else
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Regression test on $TARGET: RUN FAILED" |tee -a $OUTPUT_DIR/runTestCase.log
fi
else
TIME=$(date "+%d/%m/%Y %H:%M:%S")
echo "${TIME}: Regression test on $TARGET: RUN FAILED" |tee -a $OUTPUT_DIR/runTestCase.log
fi
fi
exit $status
# The End
##########
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/examples/AnytimeNetwork/densenet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--ds_name=svhn \
-f=2 \
--opt_at=23 \
-n=32 \
-g=32 \
-s=3 \
--dense_select_method=3 \
--batch_size=64 \
--samloss=0
|
python setup.py sdist
python setup.py install
twine upload dist/* |
module KubeDSL::DSL::V1
class FlexPersistentVolumeSource < ::KubeDSL::DSLObject
value_field :driver
value_field :fs_type
key_value_field(:options, format: :string)
value_field :read_only
object_field(:secret_ref) { KubeDSL::DSL::V1::SecretReference.new }
validates :driver, field: { format: :string }, presence: false
validates :fs_type, field: { format: :string }, presence: false
validates :options, kv: { value_format: :string }, presence: false
validates :read_only, field: { format: :boolean }, presence: false
validates :secret_ref, object: { kind_of: KubeDSL::DSL::V1::SecretReference }
def serialize
{}.tap do |result|
result[:driver] = driver
result[:fsType] = fs_type
result[:options] = options.serialize
result[:readOnly] = read_only
result[:secretRef] = secret_ref.serialize
end
end
def kind_sym
:flex_persistent_volume_source
end
end
end
|
<reponame>melkishengue/cpachecker<gh_stars>1-10
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.cpa.cache;
import java.util.HashMap;
import java.util.Map;
import org.sosy_lab.cpachecker.core.interfaces.AbstractState;
import org.sosy_lab.cpachecker.core.interfaces.MergeOperator;
import org.sosy_lab.cpachecker.core.interfaces.Precision;
import org.sosy_lab.cpachecker.exceptions.CPAException;
public class CacheMergeOperator implements MergeOperator {
private final MergeOperator mCachedMergeOperator;
private final Map<Precision, Map<AbstractState, Map<AbstractState, AbstractState>>> mCache;
public CacheMergeOperator(MergeOperator pCachedMergeOperator) {
mCachedMergeOperator = pCachedMergeOperator;
mCache = new HashMap<>();
}
@Override
public AbstractState merge(AbstractState pElement1,
AbstractState pElement2, Precision pPrecision) throws CPAException, InterruptedException {
Map<AbstractState, Map<AbstractState, AbstractState>> lCache1 = mCache.get(pPrecision);
if (lCache1 == null) {
lCache1 = new HashMap<>();
mCache.put(pPrecision, lCache1);
}
Map<AbstractState, AbstractState> lCache2 = lCache1.get(pElement2);
if (lCache2 == null) {
lCache2 = new HashMap<>();
lCache1.put(pElement2, lCache2);
}
AbstractState lMergedElement = lCache2.get(pElement1);
if (lMergedElement == null) {
lMergedElement = mCachedMergeOperator.merge(pElement1, pElement2, pPrecision);
lCache2.put(pElement1, lMergedElement);
}
return lMergedElement;
}
}
|
#!/bin/bash
# Generate build output for deploy environment
echo Starting Alexa Skill Build
# get rid of the old dist folder
rm -rf ./dist
# npm install
yarn
# transpile typescript to js (into ./dist subfolder)
yarn build
# copy package.json to dist
cp package.json ./dist
# copy node_modules to dist
cp -R node_modules dist/node_modules/
# zip the files
echo Creating
cd ./dist
zip -r dist.zip .
echo "Build Finished. Look for dist.zip in the dist folder if you are wanting to push it out live!" |
import numpy as np
import matplotlib.pyplot as plt
def plot_confusion_matrix(cm, label_list, title='Confusion matrix', cmap=None):
cm = np.asarray(cm, dtype=np.float32)
for i, row in enumerate(cm):
cm[i] = cm[i] / np.sum(cm[i])
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
num_classes = len(label_list)
plt.xticks(np.arange(num_classes), label_list, rotation=45)
plt.yticks(np.arange(num_classes), label_list)
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show() |
<gh_stars>0
import {ExtendQueriesDatasource} from './datasource';
import {ExtendQueriesQueryCtrl} from './query_ctrl';
import {ExtendQueriesConfigCtrl} from './config_ctrl';
export {
ExtendQueriesDatasource as Datasource,
ExtendQueriesQueryCtrl as QueryCtrl,
ExtendQueriesConfigCtrl as ConfigCtrl
};
|
#!/bin/bash
TASK=19
SHOT=25
LANG=ar
MODEL=ctrl_muniter
MODEL_CONFIG=ctrl_muniter_base
TASKS_CONFIG=iglue_fewshot_tasks_boxes36.dtu
TRTASK=XVNLI${LANG}_${SHOT}
TEXT_TR=/home/projects/ku_00062/data/XVNLI/annotations/${LANG}/train_${SHOT}.jsonl
TEXT_TE=/home/projects/ku_00062/data/XVNLI/annotations_machine-translate/${LANG}/dev_gmt.jsonl
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xvnli/${MODEL}/XVNLI_${MODEL_CONFIG}/pytorch_model_best.bin
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
for lr in 1e-4 5e-5 1e-5; do
OUTPUT_DIR=/home/projects/ku_00062/checkpoints/iglue/few_shot.mt/xvnli/${TRTASK}/${MODEL}/${lr}
LOGGING_DIR=/home/projects/ku_00062/logs/iglue/few_shot.mt/xvnli/${TRTASK}/${lr}/${MODEL_CONFIG}
python train_task.py \
--bert_model /home/projects/ku_00062/huggingface/bert-base-multilingual-cased --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --num_epoch 20 \
--train_split train --train_annotations_jsonpath $TEXT_TR \
--val_split dev --val_annotations_jsonpath $TEXT_TE \
--lr $lr --batch_size 64 --gradient_accumulation_steps 1 --num_workers 0 --save_every_num_epochs 5 --eval_batch_size 64 \
--adam_epsilon 1e-6 --adam_betas 0.9 0.999 --adam_correct_bias --weight_decay 0.0001 --warmup_proportion 0.1 --clip_grad_norm 1.0 \
--output_dir ${OUTPUT_DIR} \
--logdir ${LOGGING_DIR} \
&> ${here}/train.${lr}.log
done
deactivate
|
cd ./script
MODEL="test"
DATASET="test"
project_path=""
GPU="0"
config=""
verobose=""
while getopts "m:d:p:g:c:v" opt
do
case $opt in
m)
MODEL=$OPTARG
echo "model:"$MODEL
;;
d)
DATASET=$OPTARG
echo "dataset":$DATASET
;;
p)
project_path=$OPTARG
echo "project_path:"$project_path
;;
g)
GPU=$OPTARG
echo "GPUS:"$GPU
;;
c)
config=$OPTARG
echo "config:"$config
;;
v)
verobose=$OPTARG
echo "verbose:"$verobose
;;
?)
exit 1
;;
esac
done
SCRIPT_NAME="train_"$MODEL"_"$DATASET".sh"
echo "Using the training script: $SCRIPT_NAME"
sh $SCRIPT_NAME $GPU $project_path $config $verobose
|
echo "put the number of processes and memory-slot"
read n
for((i=1;i<=$n;i++))
do
echo "put the size of process and memory slot"
read p[$i]
read m[$i]
done
for((i=1;i<=$n;i++))
do
lowest=10000
for((j=1;j<=$n;j++))
do
if((${m[$j]} !=0))
then
if((${p[$i]}<=${m[$j]}))
then
differ=`expr ${m[$j]} - ${p[$i]}`
if(($lowest > $differ ))
then
lowest=$differ
index=$j
fi
fi
fi
done
m[$index]=0
echo "porcess $i is allocated to memory location $index"
done
|
/*
* Copyright (c) 2021 Huawei Device Co., Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "os/library_loader.h"
#include <dlfcn.h>
namespace panda::os::library_loader {
Expected<LibraryHandle, Error> Load(std::string_view filename)
{
void *handle = dlopen(filename.data(), RTLD_LAZY);
if (handle != nullptr) {
return LibraryHandle(handle);
}
char *msg = dlerror();
return msg != nullptr ? Unexpected(Error(msg)) : Unexpected(Error("no error message"));
}
Expected<void *, Error> ResolveSymbol(const LibraryHandle &handle, std::string_view name)
{
void *p = dlsym(handle.GetNativeHandle(), name.data());
if (p != nullptr) {
return p;
}
char *msg = dlerror();
return msg != nullptr ? Unexpected(Error(msg)) : Unexpected(Error("no error message"));
}
} // namespace panda::os::library_loader
|
package com.lambdaschool.shoppingcart.models;
import javax.persistence.Embeddable;
import java.io.Serializable;
@Embeddable
public class CartItemId implements Serializable
{
private long user;
private long product;
public CartItemId()
{
}
public CartItemId(
long user,
long product)
{
this.user = user;
this.product = product;
}
public long getUser()
{
return user;
}
public void setUser(long user)
{
this.user = user;
}
public long getProduct()
{
return product;
}
public void setProduct(long product)
{
this.product = product;
}
@Override
public boolean equals(Object o)
{
if (this == o)
{
return true;
}
if (o == null || getClass() != o.getClass())
{
return false;
}
CartItemId that = (CartItemId) o;
return user == that.user &&
product == that.product;
}
@Override
public int hashCode()
{
return 37;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_arrow_back = void 0;
var ic_arrow_back = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M20 11H7.83l5.59-5.59L12 4l-8 8 8 8 1.41-1.41L7.83 13H20v-2z"
},
"children": []
}]
};
exports.ic_arrow_back = ic_arrow_back; |
#!/bin/bash
#!/bin/bash
# storm.zookeeper.servers
ZOOKEEPER_SERVERS_ESCAPED=
if ! [ -z "$STORM_ZOOKEEPER_SERVERS" ]; then
# All ZooKeeper server IPs in an array
IFS=', ' read -r -a ZOOKEEPER_SERVERS_ARRAY <<< "$STORM_ZOOKEEPER_SERVERS"
for index in "${!ZOOKEEPER_SERVERS_ARRAY[@]}"
do
ZOOKEEPER_SERVERS_ESCAPED=$ZOOKEEPER_SERVERS_ESCAPED,"\\\"${ZOOKEEPER_SERVERS_ARRAY[index]}\\\""
done
ZOOKEEPER_SERVERS_ESCAPED=[${ZOOKEEPER_SERVERS_ESCAPED:1}]
ZOOKEEPER_SERVERS_ESCAPED=" -c storm.zookeeper.servers=\"$ZOOKEEPER_SERVERS_ESCAPED\""
fi
# storm.local.hostname
HOST=" -c storm.local.hostname=$(hostname -i | awk '{print $1;}')"
# For the nimbus, apply "nimbus" as default hostname
for arg in "$@"
do
if [[ $arg == "nimbus" ]] ; then
HOST=" -c storm.local.hostname=\"nimbus\""
fi
done
# supervisor.slots.ports
SUPERVISOR_SLOTS=
# For a supervisor, set worker slots
for arg in "$@"
do
if [[ $arg == "supervisor" ]] ; then
SUPERVISOR_SLOTS=" -c supervisor.slots.ports=\"[6700,6701,6702,6703]\""
fi
done
# nimbus.seeds
NIMBUS_SEEDS=" -c nimbus.seeds=\"[\"nimbus\"]\""
# Make sure provided arguments are not overridden
for arg in "$@"
do
if [[ $arg == *"storm.zookeeper.servers"* ]] ; then
ZOOKEEPER_SERVERS_ESCAPED=
fi
if [[ $arg == *"storm.local.hostname"* ]] ; then
HOST=
fi
if [[ $arg == *"supervisor.slots.ports"* ]] ; then
SUPERVISOR_SLOTS=
fi
if [[ $arg == *"nimbus.seeds"* ]] ; then
NIMBUS_SEEDS=
fi
if [[ $arg == *"nimbus.host"* ]] ; then
NIMBUS_SEEDS=
fi
done
if ! [ -z "$initial_delay_seconds" ]; then
echo "sleep: $initial_delay_seconds"
sleep "$initial_delay_seconds"
fi
#statements
CMD="exec bin/storm $@$NIMBUS_SEEDS$SUPERVISOR_SLOTS$HOST$ZOOKEEPER_SERVERS_ESCAPED"
echo "$CMD"
eval "$CMD"
|
package io.opensphere.core.util;
import java.util.Arrays;
import java.util.function.Predicate;
import io.opensphere.core.util.ref.Reference;
import net.jcip.annotations.ThreadSafe;
/**
* An object property that supports having three phase change listeners.
*
* @param <S> the state type of the listener.
*
* @see ThreePhaseChangeSupport
*/
@ThreadSafe
public class ThreePhaseProperty<S>
{
/** The change support. */
private final ThreePhaseChangeSupport<S, ThreePhaseChangeListener<S>> myChangeSupport = new StrongThreePhaseChangeSupport<>();
/** The value of the property. */
private volatile S myValue;
/**
* Constructor.
*
* @param initialValue The initial value.
*/
public ThreePhaseProperty(S initialValue)
{
myValue = initialValue;
}
/**
* Adds the listener.
*
* @param listener The listener.
*/
public void addListener(ThreePhaseChangeListener<S> listener)
{
myChangeSupport.addListener(listener);
}
/**
* Gets the listeners of this property.
*
* @return The listeners.
*/
public Reference<ThreePhaseChangeListener<S>>[] getListeners()
{
return myChangeSupport.getListeners();
}
/**
* Get a service that handles adding and removing a listener. When
* {@link Service#open()} is called, the listener will be added to this
* change support. When {@link Service#close()} is called, the listener will
* be removed. The service holds a strong reference to the listener, but no
* reference is held to the service.
*
* @param listener The listener.
* @return The service.
*/
public ReferenceService<ThreePhaseChangeListener<S>> getListenerService(ThreePhaseChangeListener<S> listener)
{
return myChangeSupport.getListenerService(listener);
}
/**
* Get the value.
*
* @return The value.
*/
public S getValue()
{
return myValue;
}
/**
* Removes the listener that satisfies a predicate.
*
* @param predicate The predicate.
*/
public synchronized void removeListener(Predicate<? super ThreePhaseChangeListener<?>> predicate)
{
Arrays.stream(myChangeSupport.getListeners()).map(r -> r.get()).filter(l -> l != null && predicate.test(l))
.forEach(l -> removeListener(l));
}
/**
* Removes the listener.
*
* @param listener The listener.
*/
public void removeListener(ThreePhaseChangeListener<S> listener)
{
myChangeSupport.removeListener(listener);
}
/**
* Set the value of the property. This may timeout or a listener may refuse
* the new value.
*
* @param value The new value.
* @param perPhaseTimeoutMillis Wait timeout in milliseconds. Each phase
* gets its own timeout, so the actual time to complete
* processing may be up to three times the timeout.
* @param failOnTimeout When {@code true}, the update will be aborted when
* the timeout is reached. When {@code false}, once the timeout
* is reached, processing will continue without waiting for
* listeners until the commit is performed.
*
* @return {@code true} if the value was changed successfully.
* @throws PropertyChangeException If there is a problem attempting the
* state change.
* @throws InterruptedException If the thread is interrupted.
*/
public boolean setValue(S value, long perPhaseTimeoutMillis, boolean failOnTimeout)
throws PropertyChangeException, InterruptedException
{
synchronized (this)
{
if (myChangeSupport.updateState(value, perPhaseTimeoutMillis, failOnTimeout))
{
myValue = value;
return true;
}
return false;
}
}
@Override
public String toString()
{
return new StringBuilder(32).append(getClass().getSimpleName()).append(" [").append(myValue).append(']').toString();
}
}
|
import React from 'react';
import { connect } from 'react-redux';
import { BackToPage } from '../../chrome/link';
import { performCreateItem } from '../../actions/create-item';
import ItemDetails from './details';
const AdminItemNew = ({ performCreateItem, params: { code: storeCode } }) => (
<ItemDetails
onSubmit={details => performCreateItem({ details, storeCode })}
left={<BackToPage path={`/admin/listing/${storeCode}`} title="Listings" />}
/>
);
export default connect(props => props, { performCreateItem })(AdminItemNew);
|
package com.google.code.geocoder;
import com.google.code.geocoder.model.GeocodeResponse;
import com.google.code.geocoder.model.GeocoderRequest;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.junit.Ignore;
import org.junit.Test;
import java.io.FileInputStream;
import java.io.InputStream;
@Ignore
public class MassGeocoderIT extends BaseGeocoderTest {
@Test
public void testGeocode() throws Exception {
GeocodeResponse geocoderResponse;
InputStream file = new FileInputStream("src/test/resources/ATM_partners.xls ");
try {
HSSFWorkbook wb = new HSSFWorkbook(file);
final HSSFSheet sheet = wb.getSheetAt(0);
for (int i = sheet.getLeftCol(); i < Math.min(2000, sheet.getLastRowNum()); i++) {
final HSSFRow row = sheet.getRow(i);
final String street = row.getCell(3).getStringCellValue();
final String city = row.getCell(1).getStringCellValue();
final String region = row.getCell(2).getStringCellValue();
String address = street + ", " + city;
if (!region.startsWith("м.")) {
address += ", " + region + " обл.";
}
System.out.println("address = " + address);
geocoderResponse = geocoder.geocode(new GeocoderRequest(address, "uk"));
System.out.println("geocoderResponse = " + geocoderResponse);
Thread.sleep(1000);
}
} finally {
file.close();
}
}
}
|
<gh_stars>0
enum DomainsEnum {
ADME_CORE = "ADME_CORE",
ADME_MONEY = "ADME_MONEY"
}
enum Model {
company = "company",
campaign = "campaign",
advertisement = "advertisement",
post = "post",
postExternal = "postExternal",
insight = "insight",
person = "person",
historic = "historic",
region = "region",
sex = "sex",
age = "age",
category = "category",
star = "star",
sponsorshipEffectiveness = "sponsorshipEffectiveness",
spotEffectiveness = "spotEffectiveness",
advertisingEffectiveness = "advertisingEffectiveness",
insightTypePrice = "insightTypePrice",
nationality = "nationality",
religion = "religion",
all = "*",
allModelAndAllField = ">",
permission_role = "permission_role",
permission = "permission",
person_company = "person_company",
role = "role",
person_credential = "person_credential",
people_relationship = "people_relationship",
referral = "referral",
trace = "trace",
webSession = "webSession",
socialMediaImplementation = "socialMediaImplementation",
fullStackWorkFlowState = "fullStackWorkFlowState",
termsAndConditions = "termsAndConditions",
payment = "payment",
transactionItem = "transactionItem",
transactionItemTransaction = "transactionItemTransaction",
transaction = "transaction"
}
enum transactionItemFields {
_id = "_id",
createdAt = "createdAt"
}
enum transactionItemTransactionFields {
_id = "_id",
createdAt = "createdAt"
}
enum transactionFields {
_id = "_id",
createdAt = "createdAt"
}
enum transactionItemTransaction {
_id = "_id",
}
enum paymentFields {
_id = "_id",
createdAt = "createdAt"
}
enum syncTradeTypeEnum {
PaymentToInfluencer = "PaymentToInfluencer",
PaymentToAdme = "PaymentToAdme",
PaymentToReferent = "PaymentToReferent",
PaymentFromReferred = "PaymentFromReferred",
DistributeToReferents = "DistributeToReferents",
CampaignPayed = "CampaignPayed",
TaxCollected = "TaxCollected",
TaxPayment = "TaxPayment",
WithdrawByShopping = "WithdrawByShopping"
}
enum syncFields {
syncDirection = "syncDirection",
syncFowardStatus = "syncFowardStatus",
syncForwardStatusDt = "syncForwardStatusDt",
syncParentDomain = "syncParentDomain",
syncParentModel = "syncParentModel",
syncParentEntityId = "syncParentEntityId",
originCampaignId = "originCampaignId",
originAdvertisementId = "originAdvertisementId",
originPostId = "originPostId",
originInsightId = "originInsightId",
tradeType = "tradeType",
amount = "amount"
}
enum syncFowardStatusEnum {
Pending = "Pending",
InProgress = "InProgress",
Failed = "Failed",
Done = "Done",
NotNeeded = "NotNeeded"
}
enum moneyDistributionStatusEnum {
WaitingForApproval = "WaitingForApproval",
Approved = "Approved",
Done = "Done",
Failed = "Failed"
}
enum syncDirectionEnum {
Up = "Up",
Injection = "Injection",
Ejection = "Ejection"
}
enum cronJobs {
SocialMedia_ReadInsights = "SocialMedia_ReadInsights",
SocialMedia_ReadRelationships = "SocialMedia_ReadRelationships",
Money_Ejection_From_Core_Advertisement_DistributeMoney = "Money_Ejection_From_Core_Advertisement_DistributeMoney",
Money_Ejection_From_Core_Insight_DistributeMoney = "Money_Ejection_From_Core_Insight_DistributeMoney",
Money_Ejection_From_Core_Payment_PayToInfluencer = "Money_Ejection_From_Core_Payment_PayToInfluencer",
Money_Ejection_From_Core_Payment_PayToAdme = "Money_Ejection_From_Core_Payment_PayToAdme",
Money_Ejection_From_Core_Payment_PayToReferent = "Money_Ejection_From_Core_Payment_PayToReferent",
Money_Ejection_From_Money_TransactionItem_PayTaxes = "Money_Ejection_From_Money_TransactionItem_PayTaxes",
Money_Injection_From_Core_Payment_CampaignPayed = "Money_Injection_From_Core_Payment_CampaignPayed",
Money_Injection_From_Money_TransactionItem_CampaignPayed = "Money_Injection_From_Money_TransactionItem_CampaignPayed",
Money_Injection_From_Money_TransactionItemTransaction_CampaignPayed = "Money_Injection_From_Money_TransactionItemTransaction_CampaignPayed",
Money_Injection_From_Money_TransactionItemTransaction_PayTaxes = "Money_Injection_From_Money_TransactionItemTransaction_PayTaxes",
Money_Injection_From_Money_TransactionItemTransaction_WithdrawByShopping = "Money_Injection_From_Money_TransactionItemTransaction_WithdrawByShopping",
Money_Up_From_Core_Payment_DistributeToReferents = "Money_Up_From_Core_Payment_DistributeToReferents"
}
enum messagingPayloadBaseFields {
_id = "_id",
eventUuid = 'eventUuid'
}
enum appTypes {
Web = "Web",
Mobile = "Mobile",
Feed = "Feed",
Others ="Others"
}
enum permission_roleFields {
_id = "_id",
roleId = "roleId",
permissionId = "permissionId",
creationDt = "creationDt"
}
enum permissionFields {
_id = "_id",
app = "app",
permission = "permission",
description = "description",
enabled = "enabled",
creationDt = "creationDt"
}
enum person_companyFields {
_id = "_id",
personId = "personId",
companyId = "companyId",
roleId = "roleId",
creationDt = "creationDt",
}
enum roleFields {
_id = "_id",
role = "role",
app = "app",
description = "description",
creationDt = "creationDt",
}
enum insightTypePriceFields {
_id = "_id",
name = "name",
thumbnail = "thumbnail",
price = "price"
}
enum regionFields {
_id = "_id",
country = "country",
state = "state",
city = "city",
iso2 = "iso2",
iso3 = "iso3",
personIds = "personIds",
thumbnail = "thumbnail",
geoPoint = "geoPoint"
}
enum nationalityFields {
_id = "_id",
name = "name",
personIds = "personIds",
geoPoint = "geoPoint"
}
enum religionFields {
_id = "_id",
name = "name",
personIds = "personIds"
}
enum sexFields {
_id = "_id",
name = "name",
personIds = "personIds"
}
enum ageFields {
_id = "_id",
age = "age",
personIds = "personIds"
}
enum categoryFields {
_id = "_id",
name = "name",
personIds = "personIds",
updatedDt = "updatedDt"
}
enum starFields {
_id = "_id",
stars = "stars",
customerPersonIds = "customerPersonIds",
platformPersonIds = "platformPersonIds"
}
enum insightTypeFields {
_id = "_id",
name = "name",
thumbnail = "thumbnail",
price = "price"
}
enum sponsorshipEffectivenessFields {
_id = "_id",
from = "from",
to = "to",
AdPrice = "AdPrice",
framePrice = "framePrice",
hashtagPrice = "hashtagPrice",
webSitePrice = "webSitePrice",
tagMentionMePrice = "tagMentionMePrice",
sealPrice = "sealPrice",
tagMentionPeoplePrice = "tagMentionPeoplePrice",
productUsagePrice = "productUsagePrice",
insightAveragePrice = "insightAveragePrice",
freezedBudget = "freezedBudget",
insightPriceFactor = "insightPriceFactor",
totalPrice = "totalPrice",
personIds = "personIds"
}
enum spotEffectivenessFields {
_id = "_id",
from = "from",
to = "to",
AdPrice = "AdPrice",
insightAveragePrice = "insightAveragePrice",
freezedBudget = "freezedBudget",
insightPriceFactor = "insightPriceFactor",
totalPrice = "totalPrice",
personIds = "personIds"
}
enum advertisingEffectivenessFields {
_id = "_id",
from = "from",
to = "to",
AdPrice = "AdPrice",
framePrice = "framePrice",
hashtagPrice = "hashtagPrice",
webSitePrice = "webSitePrice",
tagMentionMePrice = "tagMentionMePrice",
sealPrice = "sealPrice",
tagMentionPeoplePrice = "tagMentionPeoplePrice",
productUsagePrice = "productUsagePrice",
insightAveragePrice = "insightAveragePrice",
freezedBudget = "freezedBudget",
insightPriceFactor = "insightPriceFactor",
totalPrice = "totalPrice",
personIds = "personIds"
}
enum advertisementFields {
_id = "_id",
campaignId = "campaignId",
campaignName = "campaignName",
campaignType = "campaignType",
companyId = "companyId",
companyName = "companyName",
companyLogo = "companyLogo",
personId = "personId",
address_geoPoint = "address_geoPoint",
personGenre = "personGenre",
active = "active",
multimediaUri = "multimediaUri",
sponsored = "sponsored",
caption = "caption",
taggedPeople = "taggedPeople",
status = "status",
rejectionReason = "rejectionReason",
platformScore = "platformScore",
custumerScore = "custumerScore",
creationDt = "creationDt",
bannerIncluded = "bannerIncluded",
watermarkIncluded = "watermarkIncluded",
linkIncluded = "linkIncluded",
mentionToCompanyIncluded = "mentionIncluded",
hashtagIncluded = "hashtagIncluded",
mentionToOtherIncluded = "mentionToOtherIncluded",
productUsageIncluded = "productUsageIncluded",
productUsageOficialIncluded = "productUsageOficialIncluded",
engagementVelocityExpected = "engagementVelocityExpected",
engagementVelocityReal = "engagementVelocityReal",
budgetFreezed = "budgetFreezed",
moneyAvailable = "moneyAvailable",
moneyEarned = "moneyEarned",
zeroBudgetDt = "zeroBudgetDt",
advertisementPrice = "advertisementPrice",
resourceFramePrice = "resourceFramePrice",
resourceHashtagPrice = "resourceHashtagPrice",
resourceWebSitePrice = "resourceWebSitePrice",
resourceTagMentionMePrice = "resourceTagMentionMePrice",
resourceSealPrice = "resourceSealPrice",
resourceTagMentionPeoplePrice = "resourceTagMentionPeoplePrice",
resourceUsagePrice = "resourceProductUsagePrice",
resourceUsageOfficialPrice = "resourceUsageOfficialPrice",
socialMediaTarget = "socialMediaTarget",
facebookStatus = "facebookStatus",
facebookStatusDt = "facebookStatusDt",
instagramStatus = "instagramStatus",
instagramStatusDt = "instagramStatusDt",
twitterStatus = "twitterStatus",
twitterStatusDt = "twitterStatusDt",
tagPrice = "tagPrice",
tagCount = "tagCount",
likePrice = "likePrice",
likeCount = "likeCount",
sharedPrice = "sharedPrice",
sharedCount = "sharedCount",
linkPrice = "linkPrice",
linkCount = "linkCount",
printPrice = "printPrice",
printCount = "printCount",
mentionPrice = "mentionPrice",
mentionCount = "mentionCount",
hashtagPrice = "hashtagPrice",
hashtagCount = "hashtagCount",
commentPrice = "commentPrice",
commentCount = "commentCount",
notSponsoredTagCount = "notSponsoredTagCount",
notSponsoredLikeCount = "notSponsoredLikeCount",
notSponsoredSharedCount = "notSponsoredSharedCount",
notSponsoredLinkCount = "notSponsoredLinkCount",
notSponsoredPrintCount = "notSponsoredPrintCount",
notSponsoredMentionCount = "notSponsoredMentionCount",
notSponsoredHashtagCount = "notSponsoredHashtagCount",
notSponsoredCommentCount = "notSponsoredCommentCount",
facebookLikeCount = "facebookLikeCount",
facebookLinkCount = "facebookLinkCount",
facebookTagCount = "facebookTagCount",
facebookSharedCount = "facebookSharedCount",
facebookMentionCount = "facebookMentionCount",
facebookHashtagCount = "facebookHashtagCount",
facebookPrintCount = "facebookPrintCount",
facebookCommentCount = "facebookCommentCount",
twitterLikeCount = "twitterLikeCount",
twitterLinkCount = "twitterLinkCount",
twitterTagCount = "twitterTagCount",
twitterSharedCount = "twitterSharedCount",
twitterMentionCount = "twitterMentionCount",
twitterHashtagCount = "twitterHashtagCount",
twitterPrintCount = "twitterPrintCount",
twitterCommentCount = "twitterCommentCount",
instagramLikeCount = "instagramLikeCount",
instagramLinkCount = "instagramLinkCount",
instagramTagCount = "instagramTagCount",
instagramSharedCount = "instagramSharedCount",
instagramMentionCount = "instagramMentionCount",
instagramHashtagCount = "instagramHashtagCount",
instagramPrintCount = "instagramPrintCount",
instagramCommentCount = "instagramCommentCount",
resources = "resources",
moneyDistributionStatus = "moneyDistributionStatus"
}
enum advertisementStatusEnum {
NotSponsored = "NotSponsored",
WaitingForPlatformAudit = "WaitingForPlatformAudit",
WaitingForCustomerAudit = "WaitingForCustomerAudit",
RejectedByPlatform = "RejectedByPlatform",
RejectedByCustomer = "RejectedByCustomer",
Approved = "Approved"
}
enum postExternalFields {
advertisementId = "advertisementId",
platform = "platform",
platformObjectIdentity = "platformObjectIdentity"
}
enum postFields {
_id = "_id",
advertisementId = "advertisementId",
personId = "personId",
campaignId = "campaignId",
companyId = "companyId",
platform = "platform",
postPlatformId = "postPlatformId",
creationDt = "creationDt",
feedDt = "feedDt",
feedStatus = "feedStatus",
status = "status",
advertisementBudgetFreezed = "advertisementBudgetFreezed",
engagementVelocity = "engagementVelocity",
tagCount = "tagCount",
likeCount = "likeCount",
sharedCount = "sharedCount",
linkCount = "linkCount",
printCount = "printCount",
mentionCount = "mentionCount",
hashtagCount = "hashtagCount",
commentCount = "commentCount",
notSponsoredTagCount = "notSponsoredTagCount",
notSponsoredLikeCount = "notSponsoredLikeCount",
notSponsoredSharedCount = "notSponsoredSharedCount",
notSponsoredLinkCount = "notSponsoredLinkCount",
notSponsoredPrintCount = "notSponsoredPrintCount",
notSponsoredMentionCount = "notSponsoredMentionCount",
notSponsoredHashtagCount = "notSponsoredHashtagCount",
notSponsoredCommentCount = "notSponsoredCommentCount",
facebookLikeCount = "facebookLikeCount",
facebookLinkCount = "facebookLinkCount",
facebookTagCount = "facebookTagCount",
facebookSharedCount = "facebookSharedCount",
facebookMentionCount = "facebookMentionCount",
facebookHashtagCount = "facebookHashtagCount",
facebookPrintCount = "facebookPrintCount",
facebookCommentCount = "facebookCommentCount",
twitterLikeCount = "twitterLikeCount",
twitterLinkCount = "twitterLinkCount",
twitterTagCount = "twitterTagCount",
twitterSharedCount = "twitterSharedCount",
twitterMentionCount = "twitterMentionCount",
twitterHashtagCount = "twitterHashtagCount",
twitterPrintCount = "twitterPrintCount",
twitterCommentCount = "twitterCommentCount",
instagramLikeCount = "instagramLikeCount",
instagramLinkCount = "instagramLinkCount",
instagramTagCount = "instagramTagCount",
instagramSharedCount = "instagramSharedCount",
instagramMentionCount = "instagramMentionCount",
instagramHashtagCount = "instagramHashtagCount",
instagramPrintCount = "instagramPrintCount",
instagramCommentCount = "instagramCommentCount",
}
enum socialMediaStatusEnum {
None = "None",
PostRequired = "PostRequired",
Posting = "Posting",
Posted = "Posted",
Failed = "Failed",
Removed = "Removed",
}
enum feedStatusEnum {
Idle = "Idle",
Fetching = "Fetching",
Failed = "Failed"
}
enum historicFields {
_id = "_id",
model = "model",
field = "field",
entityId = "entityId",
value = "value",
creationDt = "creationDt"
}
enum insightFields {
_id = "_id",
postId = "postId",
platform = "platform",
advertisementId = "advertisementId",
campaignId = "campaignId",
platformObjectIdentity = "platformObjectIdentity",
companyId = "companyId",
address_geoPoint = "address_geoPoint",
genre = "genre",
genderId = "genderId",
birthDateYear = "birthDateYear",
birthDateMonth = "birthDateMonth",
birthDateDay = "birthDateDay",
type = "type",
moneyDistributionStatus = "moneyDistributionStatus",
creationDt = "creationDt",
}
enum insightGenreEnum {
Male = "Male",
Female = "Female",
Unknown = "Unknown"
}
enum insightTypeEnum {
Like = "Like",
Link = "Link",
Tag = "Tag",
Mention = "Mention",
Hashtag = "Hashtag",
Print = "Print",
Shared = "Shared",
Comment = "Comment",
Amazesme = 'Amazesme',
Iloveit = 'Iloveit',
Ienjoy = 'Ienjoy',
Angersme = 'Angersme',
Icare = 'Icare',
Saddensme = 'Saddensme'
}
enum resourceTypeEnum {
frame = "frame",
hashtag = "hashtag",
webSite = "webSite",
tagMentionMe = "tagMentionMe",
seal = "seal",
tagMentionPeople = "tagMentionPeople",
productUsage = "productUsage",
spot = "spot"
}
enum person_credentialFields {
_id = "_id",
personId = "personId",
platform = "platform",
friendsFeedDt = "friendsFeedDt",
friendsFeedStatus = "friendsFeedStatus",
platformObjectIdentity = "platformObjectIdentity",
status = "status",
displayName = "displayName",
access_token = "access_token",
expires = "expires",
userName = "userName",
password = "password",
picture = "picture",
birthday = "birthday",
firstName = "firstName",
lastName = "lastName",
email = "email",
verTermsAndConditions = "verTermsAndConditions",
creationDt = "creationDt",
enabled = "enabled"
}
enum person_credential_statusEnum {
NOT_LINKED = "NOT_LINKED",
LINKING = "LINKING",
LINKED = "LINKED",
FAILED = "FAILED",
EXPIRED = "EXPIRED",
MANUAL_ACTION_REQUIRED = "MANUAL_ACTION_REQUIRED",
TERMS_SIGNATURE_REQUIRED = "TERMS_SIGNATURE_REQUIRED"
}
enum person_credential_statusEnumDescription {
NOT_LINKED = "Not linked yet!",
LINKING = "Linking...",
LINKED = "Linked",
FAILED = "Process has failed",
EXPIRED = "Expired",
MANUAL_ACTION_REQUIRED = "Manual action is required",
TERMS_SIGNATURE_REQUIRED = "Terms signature is required"
};
enum platformEnum {
Facebook = "Facebook",
Twitter = "Twitter",
Instagram = "Instagram"
}
enum companyFields {
_id = "_id",
name = "name",
logo = "logo",
active = "active",
campaignsOnGoingCount = "campaignsOnGoingCount",
campaignsWaitingForApprovalCount = "campaignsWaitingForApprovalCount",
campaignsPendingForApprovalCount = "campaignsPendingForApprovalCount",
campaignsFinishedCount = "campaignsFinishedCount",
campaignsWaitingForPaymentCount = "campaignsWaitingForPaymentCount",
campaignsStoppedCount = "campaignsStoppedCount",
campaignsDraftCount = "campaignsDraftCount",
moneyPerEngagementExpected = "moneyPerEngagementExpected",
moneyPerEngagementReal = "moneyPerEngagementReal",
investment = "investment",
budgetAvailable = "budgetAvailable",
budgetFreezed = "budgetFreezed",
budgetSpent = "budgetSpent",
platformStars = "platformStars",
customerStars = "customerStars"
}
enum people_relationshipFields {
_id = "_id",
personId = "personId",
platformObjectIdentity = "platformObjectIdentity",
platform = "platform",
relationship = "relationship",
invitationStatus = "invitationStatus",
creationDt = "creationDt"
}
enum people_relationshipEnum {
FRIEND_OF = "FRIEND_OF",
FOLLOWS_TO = "FOLLOWS_TO",
FOLLOWED_BY = "FOLLOWED_BY"
}
enum campaignInfluencePeopleAsEnum {
IR = "IR",
ID = "ID"
}
enum personAvailableCampaignFields {
_id = "_id",
companyId = "companyId",
companyName = "companyName",
companyLogo = "companyLogo",
companyPlatformStars = "companyPlatformStars",
companyCustomerStars = "companyCustomerStars",
name = "name",
type = "type",
slogan = "slogan",
brief = "brief",
resources = "resources",
paymentType = "paymentType",
specificTarget = "specificTarget",
categoryCriterias = "categoryCriterias"
}
enum campaignFields {
_id = "_id",
companyId = "companyId",
companyName = "companyName",
companyLogo = "companyLogo",
companyPlatformStars = "companyPlatformStars",
companyCustomerStars = "companyCustomerStars",
name = "name",
slogan = "slogan",
brief = "brief",
specificTarget = "specificTarget",
influencePeopleAs = "influencePeopleAs",
influencerPerEachOfThem = "influencerPerEachOfThem",
followers = "followers",
type = "type",
paymentType = "paymentType",
paymentStatus = "paymentStatus",
productPaymentDescription = "productPaymentDescription",
paymentPerEach = "paymentPerEach",
regionCriterias = "regionCriterias",
regionScope = "regionScope",
nationalityCriterias = "nationalityCriterias",
nationalityScope = "nationalityScope",
religionCriterias = "religionCriterias",
religionScope = "religionScope",
sexCriterias = "sexCriterias",
sexScope = "sexScope",
adsPerIR = "adsPerIR",
resources = "resources",
nextStatuses = "nextStatuses",
customerStarCriterias = "customerStarCriterias",
platformStarCriterias = "platformStarCriterias",
customerStarScope = "customerStarScope",
platformStarScope = "platformStarScope",
sponsorshipEffectivenessCriterias = "sponsorshipEffectivenessCriterias",
sponsorshipEffectivenessScope = "sponsorshipEffectivenessScope",
sponsorshipEffectivenessScopeAll = "sponsorshipEffectivenessScopeAll",
spotEffectivenessCriterias = "spotEffectivenessCriterias",
spotEffectivenessScope = "spotEffectivenessScope",
spotEffectivenessScopeAll = "spotEffectivenessScopeAll",
advertisingEffectivenessCriterias = "advertisingEffectivenessCriterias",
advertisingEffectivenessScope = "advertisingEffectivenessScope",
advertisingEffectivenessScopeAll = "advertisingEffectivenessScopeAll",
categoryCriterias = "categoryCriterias",
categoryScope = "categoryScope",
ageCriterias = "ageCriterias",
ageScope = "ageScope",
specificTargetScope = "specificTargetScope",
regionCriteriaUpdatedDt = "regionCriteriaUpdatedDt",
nationalityCriteriaUpdatedDt = "nationalityCriteriaUpdatedDt",
religionCriteriaUpdatedDt = "religionCriteriaUpdatedDt",
ageCriteriaUpdatedDt = "ageCriteriaUpdatedDt",
sexCriteriaUpdatedDt = "sexCriteriaUpdatedDt",
customerStarCriteriaUpdatedDt = "customerStarCriteriaUpdatedDt",
platformStarCriteriaUpdatedDt = "platformStarCriteriaUpdatedDt",
sponsorshipEffectivenessCriteriaUpdatedDt = "sponsorshipEffectivenessCriteriaUpdatedDt",
spotEffectivenessCriteriaUpdatedDt = "spotEffectivenessCriteriaUpdatedDt",
advertisingEffectivenessCriteriaUpdatedDt = "advertisingEffectivenessCriteriaUpdatedDt",
categoryCriteriaUpdatedDt = "categoryCriteriaUpdatedDt",
endDt = "endDt",
startDt = "startDt",
active = "active",
status = "status",
investment = "investment",
budgetAvailable = "budgetAvailable",
budgetFreezed = "budgetFreezed",
budgetSpent = "budgetSpent",
moneyPerEngagementExpected = "moneyPerEngagementExpected",
moneyPerEngagementReal = "moneyPerEngagementReal",
engagementReal = "engagementReal",
engagementExpected = "engagementExpected",
influencersExpected = "influencersExpected",
engagementVelocityReal = "engagementVelocityReal",
engagementVelocityExpected = "engagementVelocityExpected",
forecastDays = "forecastDays",
influencersScope = "influencersScope",
influencedScopeCount = "influencedScopeCount",
influencedExtraScopeCount = "influencedExtraScopeCount",
influencersScopeCount = "influencersScopeCount",
engagementScopeCount = "engagementScopeCount",
influencersScopeAll = "influencersScopeAll",
peopleCollectionUpdateDt = "peopleCollectionUpdateDt",
creationDt = "creationDt",
engagementFacebook = "engagementFacebook",
engagementInstagram = "engagementInstagram",
engagementTwitter = "engagementTwitter",
tagCount = "tagCount",
likeCount = "likeCount",
sharedCount = "sharedCount",
linkCount = "linkCount",
printCount = "printCount",
mentionCount = "mentionCount",
hashtagCount = "hashtagCount",
commentCount = "commentCount",
engagementNotSponsored = "engagementNotSponsored",
engagementMaleInfluencer = "engagementMaleInfluencer",
engagementFemaleInfluencer = "engagementFemaleInfluencer",
engagementMaleInfluenced = "engagementMaleInfluenced",
engagementFemaleInfluenced = "engagementFemaleInfluenced",
engagementAnonymousInfluenced = "engagementAnonymousInfluenced",
investmentRequired = "investmentRequired",
influencersTotal = "influencersTotal",
influencersTotalHistoric = "influencersTotalHistoric",
engagementRealHistoric = "engagementRealHistoric",
engagementNotSponsoredTotalHistory = "engagementNotSponsoredTotalHistory",
facebookLikeCount = "facebookLikeCount",
facebookLinkCount = "facebookLinkCount",
facebookTagCount = "facebookTagCount",
facebookSharedCount = "facebookSharedCount",
facebookMentionCount = "facebookMentionCount",
facebookHashtagCount = "facebookHashtagCount",
facebookPrintCount = "facebookPrintCount",
facebookCommentCount = "facebookCommentCount",
twitterLikeCount = "twitterLikeCount",
twitterLinkCount = "twitterLinkCount",
twitterTagCount = "twitterTagCount",
twitterSharedCount = "twitterSharedCount",
twitterMentionCount = "twitterMentionCount",
twitterHashtagCount = "twitterHashtagCount",
twitterPrintCount = "twitterPrintCount",
twitterCommentCount = "twitterCommentCount",
instagramLikeCount = "instagramLikeCount",
instagramLinkCount = "instagramLinkCount",
instagramTagCount = "instagramTagCount",
instagramSharedCount = "instagramSharedCount",
instagramMentionCount = "instagramMentionCount",
instagramHashtagCount = "instagramHashtagCount",
instagramPrintCount = "instagramPrintCount",
instagramCommentCount = "instagramCommentCount",
facebookMaleInfluencerCount = "facebookMaleInfluencerCount",
facebookFemaleInfluencerCount = "facebookFemaleInfluencerCount",
facebookMaleInfluencedCount = "facebookMaleInfluencedCount",
facebookAnonymousInfluencedCount = "facebookAnonymousInfluencedCount",
facebookFemaleInfluencedCount = "facebookFemaleInfluencedCount",
twitterMaleInfluencerCount = "twitterMaleInfluencerCount",
twitterFemaleInfluencerCount = "twitterFemaleInfluencerCount",
twitterMaleInfluencedCount = "twitterMaleInfluencedCount",
twitterAnonymousInfluencedCount = "twitterAnonymousInfluencedCount",
twitterFemaleInfluencedCount = "twitterFemaleInfluencedCount",
instagramMaleInfluencerCount = "instagramMaleInfluencerCount",
instagramFemaleInfluencerCount = "instagramFemaleInfluencerCount",
instagramMaleInfluencedCount = "instagramMaleInfluencedCount",
instagramAnonymousInfluencedCount = "instagramAnonymousInfluencedCount",
instagramFemaleInfluencedCount = "instagramFemaleInfluencedCount",
facebookMaleInfluencerCardinal = "facebookMaleInfluencerCardinal",
facebookFemaleInfluencerCardinal = "facebookFemaleInfluencerCardinal",
twitterMaleInfluencerCardinal = "twitterMaleInfluencerCardinal",
twitterFemaleInfluencerCardinal = "twitterFemaleInfluencerCardinal",
instagramMaleInfluencerCardinal = "instagramMaleInfluencerCardinal",
instagramFemaleInfluencerCardinal = "instagramFemaleInfluencerCardinal",
facebookMaleInfluencedCardinal = "facebookMaleInfluencedCardinal",
facebookFemaleInfluencedCardinal = "facebookFemaleInfluencedCardinal",
twitterMaleInfluencedCardinal = "twitterMaleInfluencedCardinal",
twitterFemaleInfluencedCardinal = "twitterFemaleInfluencedCardinal",
instagramMaleInfluencedCardinal = "instagramMaleInfluencedCardinal",
instagramFemaleInfluencedCardinal = "instagramFemaleInfluencedCardinal",
facebookInfluencedsCardinal = "facebookInfluencedsCardinal",
facebookInfluencersCardinal = "facebookInfluencersCardinal",
instagramInfluencedsCardinal = "instagramInfluencedsCardinal",
instagramInfluencersCardinal = "instagramInfluencersCardinal",
twitterInfluencedsCardinal = "twitterInfluencedsCardinal",
twitterInfluencersCardinal = "twitterInfluencersCardinal"
}
enum campaignPaymentStatusEnum {
Done = "Done",
Pending = "Pending",
Failed = "Failed"
}
enum campaignSpecificTargetScopeFields {
personId = "personId",
firstName = "firstName",
lastName = "lastName",
key = "key",
sponsorshipEffectiveness = "sponsorshipEffectiveness",
spotEffectiveness = "spotEffectiveness",
advertisingEffectiveness = "advertisingEffectiveness"
}
enum itemMoment {
timespan = "timespan",
value = "value"
}
enum genreType {
Male = "Male",
Female = "Female"
}
enum coordinate {
value = "value"
}
enum campaignStatusEnum {
PreDraft = "PreDraft",
Draft = "Draft",
OnGoing = "OnGoing",
WaitingForPayment = "WaitingForPayment",
WaitingForApproval = "WaitingForApproval",
Stopped = "Stopped",
Finished = "Finished"
}
enum campaignTypeEnum {
Spot = "Spot",
Sponsorship = "Sponsorship",
Advertising = "Advertising"
}
enum campaignPaymentTypeEnum {
Money = "Money",
Product = "Product"
}
enum personFields {
_id = "_id",
username = "username",
password = "password",
referentCode = "referentCode",
signUpReferentPersonId = "signUpReferentPersonId",
roleIds = "roleIds",
firstName = "firstName",
lastName = "lastName",
email = "email",
nationality = "nationality",
religion = "religion",
thumbnail = "thumbnail",
genre = "genre",
genderId = "genderId",
birthDateYear = "birthDateYear",
birthDateMonth = "birthDateMonth",
birthDateDay = "birthDateDay",
age = "age",
facebookId = "facebookId",
instagramId = "instagramId",
twitterId = "twitterId",
key = "key",
categories = "categories",
customerStars = "customerStars",
platformStars = "platformStars",
sponsorshipEffectiveness = "sponsorshipEffectiveness",
sponsorshipFreezedBudgetReference = "sponsorshipFreezedBudgetReference",
sponsorshipEffectivenessReference = "sponsorshipEffectivenessReference",
spotEffectiveness = "spotEffectiveness",
spotFreezedBudgetReference = "spotFreezedBudgetReference",
spotEffectivenessReference = "spotEffectivenessReference",
advertisingEffectiveness = "advertisingEffectiveness",
advertisingFreezedBudgetReference = "advertisingFreezedBudgetReference",
advertisingEffectivenessReference = "advertisingEffectivenessReference",
countryId = "countryId",
stateId = "stateId",
address_regionId = "address_regionId",
address_street = "address_street",
address_postalCode = "address_postalCode",
address_geoPoint = "address_geoPoint",
sponsorshipCampaignsUpdatedDt = "sponsorshipCampaignsUpdatedDt",
advertisingCampaignsUpdatedDt = "advertisingCampaignsUpdatedDt",
spotCampaignsUpdatedDt = "spotCampaignsUpdatedDt",
sponsorshipCampaigns = "sponsorshipCampaigns",
spotCampaigns = "spotCampaigns",
advertisingCampaigns = "advertisingCampaigns",
sponsorshipCampaignsCount = "sponsorshipCampaignsCount",
advertisingCampaignsCount = "advertisingCampaignsCount",
spotCampaignsCount = "spotCampaignsCount",
referralsUpdateDt = "referralsUpdateDt",
referralsCount = "referralsCount",
referralClanCount = "referralClanCount",
referralTitheTotal = "referralTitheTotal",
nonSponsoredAdsCount = "nonSponsoredAdsCount",
sponsorshipAdsCount = "sponsorshipAdsCount",
sponsorshipEarnedMoneyTotal = "sponsorshipEarnedMoneyTotal",
spotAdsCount = "spotAdsCount",
spotEarnedMoneyTotal = "spotEarnedMoneyTotal",
advertisingAdsCount = "advertisingAdsCount",
advertisingEarnedMoneyTotal = "advertisingEarnedMoneyTotal",
influencerCategory = "influencerCategory",
sponsorshipCampaignCategories = "sponsorshipCampaignCategories",
spotCampaignCategories = "spotCampaignCategories",
advertisingCampaignCategories = "advertisingCampaignCategories",
potentialReferralsUpdatedDt = "potentialReferralsUpdatedDt",
potentialReferrals = "potentialReferrals",
socialMediaAccountsReadyToUse = "socialMediaAccountsReadyToUse"
}
enum personPotentialReferralsFields {
status = "status",
potentialReferral = "potentialReferral"
}
enum People_relationshipInvitationStatusEnum {
DIDNOTSEND = "DIDNOTSEND",
SENT = "SENT",
ACCEPTED = "ACCEPTED",
REJECTED = "REJECTED"
}
enum influencerCategoryEnum {
KING = "KING",
QUEEN = "QUEEN",
PRINCE = "PRINCE",
PRINCESS = "PRINCESS",
DUKE = "DUKE",
DUCHESS = "DUCHESS",
PEASANT = "PEASANT"
}
enum nationalityEnum {
Argentina = "Argentina",
EstadosUnidos = "EstadosUnidos",
Venezuela = "Venezuela",
Colombia = "Colombia"
}
enum religionEnum {
Cristiana = "Cristiana",
Judia = "Judia",
Indu = "Indu",
Protestante = "Protestante"
}
enum referralFields {
_id = "_id",
referentPersonId = "referentPersonId",
referralPersonId = "referralPersonId",
titheTotal = "titheTotal",
creationDt = "creationDt",
referralFirstName = "referralFirstName",
referralLastName = "referralLastName",
referralThumbnail = "referralThumbnail",
referralCustomerStars = "referralCustomerStars",
referralPlatformStars = "referralPlatformStars",
referralReferralsCount = "referralReferralsCount",
referralReferralClanCount = "referralReferralClanCount",
referralInfluencerCategory = "referralInfluencerCategory",
referralNonSponsoredAdsCount = "referralNonSponsoredAdsCount",
referralSponsorshipAdsCount = "referralSponsorshipAdsCount",
referralAdvertisingAdsCount = "referralAdvertisingAdsCount",
referralSpotAdsCount = "referralSpotAdsCount"
}
enum appTypeEnum {
Web = "Web",
Mobile = "Mobile",
Feed = "Feed",
Others = "Others"
}
enum traceFields {
_id = "_id",
traceId = "traceId",
traceUseCase = "traceUseCase",
traceCauseOnOrigin = "traceCauseOnOrigin",
traceOriginModel = "traceOriginModel",
traceOriginModelId = "traceOriginModelId",
traceOriginModelField = "traceOriginModelField",
traceConsecuenceOnDestination = "traceConsecuenceOnDestination",
traceHandler = "traceHandler",
traceHandlerExecutionId = "traceHandlerExecutionId",
traceDestinationModel = "traceDestinationModel",
traceDestinationModelId = "traceDestinationModelId",
traceDestinationModelField = "traceDestinationModelField",
traceCreatedAt = "traceCreatedAt",
}
enum webSessionFields {
_id = "_id",
browser = "browser",
domain = "domain",
personId = "personId",
cookies = "cookies",
createdAt = "createdAt"
}
enum webSessionDomainEnum {
Chrome = "Chrome",
Firefox = "Firefox",
Edge = "Edge",
Safari = "Safari"
}
enum languagesEnum {
English = "English",
Spanish = "Spanish"
}
enum socialMediaImplementationFields {
_id = "_id",
platform = "platform",
method = "method",
failuresCount = "failuresCount",
methodVersion = "methodVersion",
createdAt = "createdAt"
}
enum fullStackWorkFlowStateFields{
_id = "_id",
personUserName = "personUserName",
key = "key",
state = "state",
payload = "payload",
lastUpdateDt = "lastUpdateDt"
}
enum socialMediaAuthenticationValuesWorkFlowStateEnum{
STARTED = "STARTED",
CANCELED = "CANCELED",
NONE = "NONE",
EMAIL_SAME_CODE_NEEDED = "EMAIL_SAME_CODE_NEEDED",
EMAIL_SAME_CODE_NEEDED_PROVIDED = "EMAIL_SAME_CODE_NEEDED_PROVIDED",
EMAIL_NEW_CODE_NEEDED = "EMAIL_NEW_CODE_NEEDED",
EMAIL_NEW_CODE_NEEDED_PROVIDED = "EMAIL_NEW_CODE_NEEDED_PROVIDED",
SMS_NEW_CODE_NEEDED = "SMS_NEW_CODE_NEEDED",
SMS_NEW_CODE_NEEDED_PROVIDED = "SMS_NEW_CODE_NEEDED_PROVIDED",
SMS_SAME_CODE_NEEDED = "SMS_SAME_CODE_NEEDED",
SMS_SAME_CODE_NEEDED_PROVIDED = "SMS_SAME_CODE_NEEDED_PROVIDED",
INVALID_CREDENTIALS = "INVALID_CREDENTIALS",
INVALID_CREDENTIALS_PROVIDED = "INVALID_CREDENTIALS_PROVIDED",
SUCCESS = "SUCCESS",
FAIL = "FAIL"
}
enum socialMediaAuthenticationKeysWorkFlowStateEnum{
LINK_STATE = "LINK_STATE"
}
enum termsAndConditionsFields{
_id = "_id",
platform = "platform",
version = "version",
status = "status",
lastUpdateDt = "lastUpdateDt"
}
enum termsAndConditionsStatusEnum{
DEVELOPING = "DEVELOPING",
INREVIEW = "INREVIEW",
PENDINGAPPROVAL = "PENDINGAPPROVAL",
APPROVED = "APPROVED",
DISMISSED = "DISMISSED",
EXPIRED = "EXPIRED",
NONE = "NONE"
}
export {
DomainsEnum,
Model,
appTypes,
permission_roleFields,
permissionFields,
person_companyFields,
roleFields,
insightTypePriceFields,
regionFields,
nationalityFields,
religionFields,
sexFields,
ageFields,
categoryFields,
starFields,
insightTypeFields,
sponsorshipEffectivenessFields,
spotEffectivenessFields,
advertisingEffectivenessFields,
advertisementFields,
postExternalFields,
postFields,
feedStatusEnum,
historicFields,
insightFields,
insightTypeEnum,
resourceTypeEnum,
companyFields,
campaignFields,
campaignSpecificTargetScopeFields,
itemMoment,
genreType,
coordinate,
campaignStatusEnum,
campaignTypeEnum,
campaignPaymentTypeEnum,
personFields,
appTypeEnum,
person_credentialFields,
people_relationshipFields,
campaignInfluencePeopleAsEnum,
socialMediaStatusEnum,
person_credential_statusEnum,
person_credential_statusEnumDescription,
platformEnum,
people_relationshipEnum,
influencerCategoryEnum,
referralFields,
nationalityEnum,
religionEnum,
advertisementStatusEnum,
personAvailableCampaignFields,
insightGenreEnum,
personPotentialReferralsFields,
People_relationshipInvitationStatusEnum,
traceFields,
webSessionFields,
webSessionDomainEnum,
languagesEnum,
socialMediaImplementationFields,
socialMediaAuthenticationValuesWorkFlowStateEnum,
socialMediaAuthenticationKeysWorkFlowStateEnum,
fullStackWorkFlowStateFields,
termsAndConditionsFields,
termsAndConditionsStatusEnum,
messagingPayloadBaseFields,
syncFields,
syncFowardStatusEnum,
syncDirectionEnum,
cronJobs,
paymentFields,
syncTradeTypeEnum,
transactionItemFields,
transactionItemTransaction,
moneyDistributionStatusEnum,
campaignPaymentStatusEnum,
transactionItemTransactionFields,
transactionFields
};
|
#!/bin/sh
# Baseline: resnet50 DeeplabV3+ 20000iter 0.5RandomFlip
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_config.py --work-dir /root/autodl-tmp/baseline --seed 210
# python ./tools/test.py /root/autodl-tmp/baseline/my_baseline_config.py /root/autodl-tmp/baseline/latest.pth --format-only --eval-options "imgfile_prefix=/root/autodl-tmp/baseline/results_random"
# python evaluate.py
# 本次流程中第二次,也就是没有random的输出是有random flip的
# python ./tools/test.py /root/autodl-tmp/baseline/my_baseline_config.py /root/autodl-tmp/baseline/latest.pth --format-only --eval-options "imgfile_prefix=/root/autodl-tmp/baseline/results"
# python evaluate.py
# Baseline_noflip: resnet50 DeeplabV3+ 20000iter
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_noflip --seed 210
# Baseline: resnet50 DeeplabV3+ 20000iter Focal:Dice 1:2
# dict(type='FocalLoss', loss_name='loss_focal',loss_weight=1.0, class_weight=[0.95, 0.95, 0.95, 1, 1, 1, 1.05, 1.05, 1.05, 1.05, 1, 1, 1, 0.95, 1.05, 1.5, 2, 1, 1.05]),
# dict(type='DiceLoss', loss_name='loss_dice', loss_weight=2.0)]),
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_F1D2 --seed 210
# Baseline: resnet50 DeeplabV3+ 20000iter CE:FL 1:1
# dict(type='CrossEntropyLoss', loss_name='loss_ce', loss_weight=1.0, class_weight=[0.95, 0.95, 0.95, 1, 1, 1, 1.05, 1.05, 1.05, 1.05, 1, 1, 1, 0.95, 1.05, 1.5, 2, 1, 1.05]),
# dict(type='FocalLoss', loss_name='loss_focal', loss_weight=1.0, gamma=3., alpha=0.25)]),
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_C1F1 --seed 210
# Baseline: resnet50 DeeplabV3+ 20000iter CE:FL 1:2
# dict(type='CrossEntropyLoss', loss_name='loss_ce', loss_weight=1.0, class_weight=[0.95, 0.95, 0.95, 1, 1, 1, 1.05, 1.05, 1.05, 1.05, 1, 1, 1, 0.95, 1.05, 1.5, 2.5, 1, 1.05]),
# dict(type='FocalLoss', loss_name='loss_focal', loss_weight=2.0, gamma=3., alpha=0.25)]
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_C1F2 --seed 210
# Baseline: resnet50 DeeplabV3+ 20000iter CE:FL 1:10
# dict(type='CrossEntropyLoss', loss_name='loss_ce', loss_weight=1.0, class_weight=[0.95, 0.95, 0.95, 1, 1, 1, 1.05, 1.05, 1.05, 1.05, 1, 1, 1, 0.95, 1.05, 1.5, 8, 1, 1.05]),
# dict(type='FocalLoss', loss_name='loss_focal', loss_weight=10.0, gamma=3., alpha=0.25)]),
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_C1F10 --seed 210 && shutdown
# Baseline: resnet50 DeeplabV3+ 20000iter CE:FL 1:0
# dict(type='CrossEntropyLoss', loss_name='loss_ce', loss_weight=1.0, class_weight=[0.95, 0.95, 0.95, 1, 1, 1, 1.05, 1.05, 1.05, 1.05, 1, 1, 1, 0.95, 1.05, 1.5, 2, 1, 1.05])
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_C1F0 --seed 210
# Baseline: largeset resnet50 DeeplabV3+ 40000iter CE:FL 1:0 Crop dict(type='Resize', img_scale=(620, 512), ratio_range=(0.8, 1.2)),
# dict(type='RandomCrop', crop_size=img_scale, cat_max_ratio=0.75),
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_Crop_620-512 --seed 210
# Baseline: oriset resnet50 DeeplabV3+ 40000iter CE:FL 1:0 Crop dict(type='Resize', img_scale=(620, 512), ratio_range=(0.8, 1.2)),
# dict(type='RandomCrop', crop_size=img_scale, cat_max_ratio=0.75),
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_Crop_620-512_oriset --seed 210
# ce_w 1, 0.4,
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/baseline_cew_oriset --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_CFD.py --work-dir /root/autodl-tmp/base_partJ_CFD --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_CF.py --work-dir /root/autodl-tmp/base_partJ_CF --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_C1L0.5.py --work-dir /root/autodl-tmp/base_partJ_C1L0.5 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_C1L1.py --work-dir /root/autodl-tmp/base_partJ_C1L1 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_C1L2.py --work-dir /root/autodl-tmp/base_partJ_C1L2 --seed 210 && shutdown
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_CWhigh.py --work-dir /root/autodl-tmp/base_partJ_CWhigh --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ.py --work-dir /root/autodl-tmp/base_partJ --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_noJ.py --work-dir /root/autodl-tmp/base_noJ --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_noW.py --work-dir /root/autodl-tmp/base_partJ_noW --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rC.py --work-dir /root/autodl-tmp/base_partJ_rC --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_large.py --work-dir /root/autodl-tmp/base_partJ_large --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate.py --work-dir /root/autodl-tmp/base_partJ_rotate --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_new --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_large.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_large --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_large_40k.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_large_40 --seed 210
# [N] python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_40k.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_40 --seed 210
# [N] python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_large_40k_crop.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_large_40_rc --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_35.py --work-dir /root/autodl-tmp/base_partJ_rotate_35 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_longer.py --work-dir /root/autodl-tmp/base_partJ_40k --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_r101-v3+.py --work-dir /root/autodl-tmp/r101_deeplab --seed 42
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_cutout.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_cutout --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_0.5.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_0.5 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_0.3.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_0.3 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_partJ_rotate_25_cutout_0.3.py --work-dir /root/autodl-tmp/base_partJ_rotate_25_cutout_0.3 --seed 210
# ===== ResNeSt =====
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_rs101-v3+.py --work-dir /root/autodl-tmp/rs101_deeplab --seed 42
# ===== PointRend
# r101
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_pointrend_r101.py --work-dir /root/autodl-tmp/point_r101_10k --seed 42
# ===== Swin =====
# swin - b - 5k test
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_swin_config.py --work-dir /root/autodl-tmp/swin_b_5k --seed 210
# swin - b - 5k test with weight
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_swin_config.py --work-dir /root/autodl-tmp/swin_b_5k_weight --seed 210
# swin - b - 5k test with weight ,laege
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_swin_config.py --work-dir /root/autodl-tmp/swin_b_5k_w_large --seed 210 && shutdown
# swin - b - 160k - large dataset - C1F1 - batch16
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_swin_config.py --work-dir /root/autodl-tmp/swin_b_160k_c1f1_b16 --seed 210
# swin - l - 160k - large - CE_W, PJ
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_swin_l_mmpara.py --work-dir /root/autodl-tmp/swin_l_160k_cw_pj_mmpara --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_swin_small.py --work-dir /root/autodl-tmp/swin_b_160k_c1f1_b16_small --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_swin_crop.py --work-dir /root/autodl-tmp/swin_b_160k_c1f1_b16_crop --seed 210
# ===== SegFormer ======
# python tools/model_converters/mit2mmseg.py /root/autodl-tmp/pretrain/mit_b5_ori.pth /root/autodl-tmp/pretrain/mit_b5.pth
# segformer - 160k - C1F1 - batch 4
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_segformer_config.py --work-dir /root/autodl-tmp/segformer_b_160k_c1f1_b4 --seed 210
# segformer - 160k - C1F1 - batch 16 [Not Finish!]
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_segformer_config.py --work-dir /root/autodl-tmp/segformer_b_160k_c1f1_b16 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_segformer_160k_cew_pJ_b4.py --work-dir /root/autodl-tmp/segformer_160k_cew_pJ_b4 --seed 210
# ===== ResNet 101 + DeepLabV3+ - 80k - batch 16
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_baseline_noflip_config.py --work-dir /root/autodl-tmp/r101_deeplab_80k_b16 --seed 210
# ===== Convnext - 160k - batch8
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/configs/my_convnext_config.py --work-dir /root/autodl-tmp/convext_160k_b8 --seed 210
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/my_convext_160k_pJ_b8.py --work-dir /root/autodl-tmp/convext_160k_pJ_b8 --seed 210
# ===== K-net ====
# python /root/mmsegmentation/tools/train.py /root/autodl-tmp/knet/my_knet.py --work-dir /root/autodl-tmp/knet --seed 210
# Final Train: ===============
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_swin_b.py --work-dir /root/autodl-tmp/final_swin_b --seed 42
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_swin_l.py --work-dir /root/autodl-tmp/final_swin_l --seed 42
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_segformer_b5.py --work-dir /root/autodl-tmp/final_segformer_b5 --seed 42
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_convnext.py --work-dir /root/autodl-tmp/final_convnext --seed 42
# python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_convnext_new.py --work-dir /root/autodl-tmp/final_convnext_new --seed 42
python /root/mmsegmentation/tools/train.py /root/mmsegmentation/final/final_convnext_l.py --work-dir /root/autodl-tmp/final_convnext_l --resume-from /root/autodl-tmp/final_convnext_l/iter_144000.pth --seed 42
|
class BraceMessage(object):
def __init__(self, fmt, *args, **kwargs):
self.fmt = fmt
self.args = args
self.kwargs = kwargs
def __str__(self):
return self.fmt.format(*self.args, **self.kwargs)
_F = BraceMessage
|
#!/bin/sh
# Setup script
echo
echo "setup script..."
echo "OS release informations:"
cat /etc/os-release
# Create odi user
# Operation to be handled performed, or test if odi user already exists
# sudo adduser odi
# sudo adduser odi sudo
# sudo adduser odi audio
# sudo adduser odi gpio
#su odi
#sudo mkdir /home/odi/framebot
# TODO: add git clone last tag
# Il restera toujours les répertoires /security /media absents !
#sudo mkdir /home/odi/framebot/tmp
cd /home/odi/framebot/tmp
# Run the following command to fix the $HOME directory permissions for the current $USER:
# sudo chown -R $USER:$USER $HOME/
# Uninstall npm & nodejs
sudo apt-get remove -y nodejs
sudo apt-get remove -y npm
# Install npm & nodejs
curl -sL https://deb.nodesource.com/setup_tls.x | sudo bash -
sudo apt-get install -y nodejs
# Install mplayer & sound tools
# sudo apt-get install -y mplayer
sudo apt-get install -y omxplayer alsa-base alsa-utils alsa-tools pulseaudio mpg123 lame
# Set audio output to headphones
amixer cset numid=3 1
# Reset volume
# sudo amixer set PCM 100%
amixer sset 'Master' 100%
# Install espeak
sudo apt-get install -y espeak
# Install voices for mbrola
wget http://tcts.fpms.ac.be/synthesis/mbrola/dba/fr1/fr1-990204.zip
sudo unzip fr1-990204.zip -d /opt/mbrola
sudo mkdir -p /usr/share/mbrola/voices/
sudo cp -r /opt/mbrola/fr1/* /usr/share/mbrola/voices/
# wget http://tcts.fpms.ac.be/synthesis/mbrola/bin/raspberri_pi/mbrola.tgz
# tar xvzf mbrola.tgz
# sudo chmod 755 mbrola
# sudo mv ./mbrola /usr/local/bin/
wget http://steinerdatenbank.de/software/mbrola3.0.1h_armhf.deb
sudo dpkg -i mbrola3.0.1h_armhf.deb
sudo apt-get install -y mbrola mbrola-fr1 mbrola-fr4
# Install pico2wave
sudo apt-get install -y libttspico-utils
# Install fbi (framebuffer imageviewer: diapo)
sudo apt-get -y install fbi
# Install Certbot
sudo apt install -y certbot
# Install fail2ban
sudo apt-get -y install fail2ban
# DEPRECATED
# Give odi user's access to needed repositories
#sudo chown -R odi /root
#sudo chown -R odi /dev/ttyUSB0
#echo "odi user granted to needed repositories"
# DEPRECATED
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# gpio export _pin_ in/out
# Test
espeak -s 125 -v mb/mb-fr1 'installation terminée.'
exit 0
#################
## After npm i ##
#################
# TODO put these lines in /etc/rc.local file (before 'exit 0')
sudo adduser odi audio
sudo adduser odi gpio
# gpio access
sudo chmod -R 777 /sys/class/gpio
# rfxcom gateway access
sudo chmod -R 777 /dev/ttyUSB0 |
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.cats.ui.custome.barbtn;
public enum BarIconType
{
TITLE("title" ,(int)Math.pow(2, 0)),
BANNER("banner" ,(int)Math.pow(2, 1)),
MENU("menu" ,(int)Math.pow(2, 2)),
MIN("min" ,(int)Math.pow(2, 3)),
MAX("max" ,(int)Math.pow(2, 4)),
CLOSE("close" ,(int)Math.pow(2, 5));
String name;
int iType;
BarIconType(String name, int iType)
{
this.name = name;
this.iType = iType;
}
public String getName()
{
return this.name;
}
public int getType()
{
return this.iType;
}
} |
echo_section "Installing PyCharm"
install_pkg_aur pycharm-community
|
def generate_migration_file(app_name, dependencies):
migration_file_content = f"from django.db import migrations\n\n" \
f"class Migration(migrations.Migration):\n" \
f" dependencies = [\n" \
f" ('{app_name}', '{dependencies[0]}'),\n" \
f" ]\n" \
f" # Additional migration operations would be defined here"
with open(f"{app_name}/migrations/{dependencies[0]}_auto.py", "w") as file:
file.write(migration_file_content)
# Example usage
generate_migration_file('apis', ['0001_initial']) |
#!/usr/bin/env bash
#
# Copyright (c) 2009-2012 VMware, Inc.
set -e
base_dir=$(readlink -nf $(dirname $0)/../..)
source $base_dir/lib/prelude_apply.bash
# set the infrastructure for the agent to "vsphere" when building the vcloud stemcell
if [ "${stemcell_infrastructure}" == "vcloud" ]; then
stemcell_infrastructure=vsphere
fi
echo -n $stemcell_infrastructure > $chroot/var/vcap/bosh/etc/infrastructure
# Temporary workaround: if we are building a RHEL stemcell, tell the BOSH agent
# it's a CentOS machine. This is required because the current version of bosh-agent
# does not recognize the OS type "rhel".
#
# This workaround should be reverted once we can go back to the latest version of
# the bosh-agent submodule. See
os="${stemcell_operating_system}"
if [ "${os}" == "rhel" ]; then
os="centos"
fi
echo -n ${os} > $chroot/var/vcap/bosh/etc/operating_system
echo -n ${stemcell_version} > $chroot/var/vcap/bosh/etc/stemcell_version
pushd /bosh
has_uncommitted_changes=""
if ! git diff --quiet --exit-code; then
has_uncommitted_changes="+"
fi
echo -n $(git rev-parse HEAD)${has_uncommitted_changes} > $chroot/var/vcap/bosh/etc/stemcell_git_sha1
popd
|
<filename>cloud-yblog-admin/cloud-yblog-admin91/src/main/java/com/boot/controller/pearAdmin/CatchDataController.java
package com.boot.controller.pearAdmin;
import com.alibaba.fastjson.JSON;
import com.boot.annotation.Operation;
import com.boot.annotation.Visitor;
import com.boot.data.ResponseData.layuiJSON;
import com.boot.feign.article.CatchDataFeign;
import com.wf.captcha.utils.CaptchaUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
/**
* @author 游政杰
* @Date 2021/8/19
*/
@Controller("pearCatchDataController")
@RequestMapping(path = "/pear")
public class CatchDataController {
@Autowired
private CatchDataFeign catchDataFeign;
//爬取数据
@Operation("进入爬取数据页面")
@Visitor(desc = "爬取数据")
@RequestMapping(path = "/toCatchData")
public String toCatchData() {
return "back/newback/article/catch_list";
}
/**
* 抓取文章接口
*/
@Operation("抓取文章数据")
@RequestMapping(path = "/catch/Article")
@ResponseBody
public String catchArticle(String url, String code, HttpServletRequest request, Model model) {
layuiJSON json = new layuiJSON();
//验证码不正确
if (!CaptchaUtil.ver(code, request)) {
// 清除session中的验证码
CaptchaUtil.clear(request);
json.setMsg("验证码不正确");
json.setSuccess(false);
return JSON.toJSONString(json);
} else { //验证码正确
// 清除session中的验证码
CaptchaUtil.clear(request);
try {
//抓取数据
catchDataFeign.catchData_csdn(url);
json.setMsg("抓取CSDN文章数据成功");
json.setSuccess(true);
return JSON.toJSONString(json);
} catch (Exception e) {
e.printStackTrace();
json.setMsg("抓取CSDN文章数据失败");
json.setSuccess(false);
return JSON.toJSONString(json);
}
}
}
/**
* 抓取模块接口
*/
@Operation("抓取模块中的文章数据")
@RequestMapping(path = "/catch/ModelArticle")
@ResponseBody
public String catchModelArticle(String url, String code, HttpServletRequest request, Model model) {
layuiJSON json = new layuiJSON();
if (!CaptchaUtil.ver(code, request)) {
// 清除session中的验证码
CaptchaUtil.clear(request);
json.setMsg("验证码不正确");
json.setSuccess(false);
return JSON.toJSONString(json);
} else {
// 清除session中的验证码
CaptchaUtil.clear(request);
try {
//抓取数据
catchDataFeign.batchCatchArticleByModel_csdn(url);
json.setMsg("抓取CSDN模块数据成功");
json.setSuccess(true);
return JSON.toJSONString(json);
} catch (Exception e) {
e.printStackTrace();
json.setMsg("抓取CSDN模块数据失败");
json.setSuccess(false);
return JSON.toJSONString(json);
}
}
}
}
|
#! /bin/bash
# Does basic static checks on the code.
# to check compliance before a git commit: ln -s '../../check.sh' .git/hooks/pre-commit
# To fix the linting: ./check.sh --fix
# Requires standard js: npm install --global standard
if [ "$1" == '--fix' ]; then
standard --fix --env mocha --env browser --globals assert tests/**.js
standard --fix --env browser js/**
standard --fix --env serviceworker worker.js
exit 0
fi
set -e
echo checking…
! grep -r 'console\.log' js/
standard --env mocha --env browser --globals assert tests/**.js
standard --env browser js/**
standard --env serviceworker worker.js
git diff --cached --word-diff=plain --no-color -- worker.js \
| grep -qP '^const CACHE_KEY =' || (echo 'Please update CACHE_KEY in worker.js — Aborting' && false)
echo all good!
|
<filename>pkg/execution/plugins/atlassian/secretenvvar/kubecompute/podsecretenvvar_plugin_test.go
package kubecompute
import (
"testing"
smith_v1 "github.com/atlassian/smith/pkg/apis/smith/v1"
smith_plugin "github.com/atlassian/smith/pkg/plugin"
"github.com/atlassian/voyager/pkg/execution/plugins/atlassian/secretenvvar"
plugin_testing "github.com/atlassian/voyager/pkg/execution/plugins/testing"
sc_v1b1 "github.com/kubernetes-incubator/service-catalog/pkg/apis/servicecatalog/v1beta1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
core_v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/runtime"
)
const (
defaultNamespace = "ns"
)
func testEnvVars(t *testing.T, dependencies map[smith_v1.ResourceName]smith_plugin.Dependency, expectedResult map[string]string) {
testEnvVarsFull(t, map[string]string{}, "", dependencies, expectedResult)
}
func testEnvVarsFull(t *testing.T, renameEnvVar map[string]string, ignoreKeyRegex string, dependencies map[smith_v1.ResourceName]smith_plugin.Dependency, expectedResult map[string]string) {
p, err := New()
require.NoError(t, err)
spec, err := runtime.DefaultUnstructuredConverter.ToUnstructured(&secretenvvar.PodSpec{
RenameEnvVar: renameEnvVar,
IgnoreKeyRegex: ignoreKeyRegex,
})
require.NoError(t, err)
context := &smith_plugin.Context{
Namespace: defaultNamespace,
Dependencies: dependencies,
}
result, err := p.Process(spec, context)
require.NoError(t, err)
secret := result.Object.(*core_v1.Secret)
for expectedKey, expectedVal := range expectedResult {
actualVal, ok := secret.Data[expectedKey]
require.True(t, ok, "missing output secret key: %q", expectedKey)
assert.Equal(t, expectedVal, string(actualVal))
}
assert.Equal(t, len(expectedResult), len(secret.Data))
}
func TestNoDependencies(t *testing.T) {
t.Parallel()
testEnvVars(t, map[smith_v1.ResourceName]smith_plugin.Dependency{}, map[string]string{})
}
func TestBasic(t *testing.T) {
t.Parallel()
input1 := map[string][]byte{
"a-b-c": []byte("val1"),
}
input2 := map[string][]byte{
"a-b-c": []byte("val2"),
}
expectedResult := map[string]string{
"SECRET1_MYSECRET": "1",
"SQS_QUEUE1_A_B_C": "val1",
"SQS_QUEUE2_A_B_C": "val2",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1),
"y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2),
"z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}),
}
testEnvVars(t, dependencies, expectedResult)
}
func TestDashReplacement(t *testing.T) {
t.Parallel()
input1 := map[string][]byte{
"a0DASH0b0DASH0c": []byte("val1"),
}
input2 := map[string][]byte{
"a-b0DASH0c": []byte("val2"),
}
expectedResult := map[string]string{
"SQS_QUEUE1_A_B_C": "val1",
"SQS_QUEUE2_A_B_C": "val2",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1),
"y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2),
}
testEnvVars(t, dependencies, expectedResult)
}
func TestAnnotationPrefixes(t *testing.T) {
t.Parallel()
input1 := map[string][]byte{
"a-b-c": []byte("val1"),
}
input2 := map[string][]byte{
"a-b-c": []byte("val2"),
}
expectedResult := map[string]string{
"MYSQS_QUEUE1_A_B_C": "val1",
"OTHERSQS_QUEUE2_A_B_C": "val2",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1),
"y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2),
}
dependencies["x"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{
"voyager.atl-paas.net/envResourcePrefix": "MYSQS",
}
dependencies["y"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{
"voyager.atl-paas.net/envResourcePrefix": "MYSQS",
}
dependencies["y"].Actual.(*sc_v1b1.ServiceBinding).Annotations = map[string]string{
"voyager.atl-paas.net/envResourcePrefix": "OTHERSQS",
}
testEnvVars(t, dependencies, expectedResult)
}
func TestIgnoreKeyRegex(t *testing.T) {
t.Parallel()
input1 := map[string][]byte{
"a-b-c": []byte("val1"),
}
input2 := map[string][]byte{
"a-b-c": []byte("val2"),
}
expectedResult := map[string]string{
"SQS_QUEUE1_A_B_C": "val1",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1),
"y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2),
"z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}),
}
testEnvVarsFull(t, map[string]string{}, "^S(ECRET1|QS_.*2)", dependencies, expectedResult)
}
func TestRenameEnvVars(t *testing.T) {
t.Parallel()
input1 := map[string][]byte{
"a-b-c": []byte("val1"),
}
input2 := map[string][]byte{
"a-b-c": []byte("val2"),
}
expectedResult := map[string]string{
"SECRET1_MYSECRET": "val1",
"SQS_QUEUE1_A_B_C": "1",
"SQS_QUEUE2_A_B_C": "val2",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency("binding1", defaultNamespace, "secret1", "queue1", "sqs", input1),
"y": plugin_testing.ConstructBindingDependency("binding2", defaultNamespace, "secret2", "queue2", "sqs", input2),
"z": plugin_testing.ConstructSecretDependency("secret1", defaultNamespace, map[string][]byte{"MYSECRET": []byte("1")}),
}
testEnvVarsFull(t, map[string]string{
"SQS_QUEUE1_A_B_C": "SECRET1_MYSECRET",
"SECRET1_MYSECRET": "SQS_QUEUE1_A_B_C",
}, "", dependencies, expectedResult)
}
func TestRenameAsapKey(t *testing.T) {
t.Parallel()
asapCredentials := map[string][]byte{
"AUDIENCE": []byte("audience"),
"ISSUER": []byte("issuer"),
"KEY_ID": []byte("keyId"),
"PRIVATE_KEY": []byte("privateKey"),
}
expectedResult := map[string]string{
"ASAP_AUDIENCE": "audience",
"ASAP_ISSUER": "issuer",
"ASAP_KEY_ID": "keyId",
"ASAP_PRIVATE_KEY": "privateKey",
}
dependencies := map[smith_v1.ResourceName]smith_plugin.Dependency{
"x": plugin_testing.ConstructBindingDependency(
"asap-binding",
defaultNamespace,
"asap-secret",
"myasap",
"asap",
asapCredentials),
}
dependencies["x"].Auxiliary[0].(*sc_v1b1.ServiceInstance).Annotations = map[string]string{
"voyager.atl-paas.net/envResourcePrefix": "ASAPKey",
}
testEnvVarsFull(t, map[string]string{}, "", dependencies, expectedResult)
}
|
from typing import List
def count_ones(ls: List[int]) -> int:
count = 0
for l in ls:
if l == 1:
count += 1
return count |
#!/bin/sh
cd "`dirname "$0"`/doc"
rm -rf internal
doxygen DoxyInternal
|
<gh_stars>0
// write your javascript code here.
// feel free to change the preset attributes as you see fit
let margin = {
top: 60,
left: 50,
right: 30,
bottom: 35
},
width = 500 - margin.left - margin.right,
height = 500 - margin.top - margin.bottom;
// first visualization
let svg1 = d3.select('#vis1')
.append('svg')
.attr('preserveAspectRatio', 'xMidYMid meet') // this will scale your visualization according to the size of its parent element and the page.
.attr('width', '100%') // this is now required by Chrome to ensure the SVG shows up at all
.style('background-color', '#ccc') // change the background color to light gray
.attr('viewBox', [0, 0, width + margin.left + margin.right, height + margin.top + margin.bottom].join(' '))
let data1 = d3.csv("data/cars.csv"),function(row){
//console.log(row)
return{
Car: row.Car,
Origin: row.Origin,
Horsepower: +row.Horsepower
}}.then(function(data)){
//console.log(data)
}
d3.csv("data/cars.csv", function(data){
// Removing headers from the data
var subgroups = data.columns.slice(1)
// X-axis initialization (Car Maker)
var x = d3.scaleBand()
.domain(groups)
.range([0, width])
.padding([0.2])
svg.append("g")
.attr("transform", "translate(0," + height + ")")
.call(d3.axisBottom(x).tickSize(0));
// Y-axis initialization (Horsepower)
var y = d3.scaleLinear()
.domain([0, 40])
.range([ height, 0 ]);
svg.append("g")
.call(d3.axisLeft(y));
// Adding on scale for the subgroup, being country of origin
var xSubgroup = d3.scaleBand()
.domain(subgroups)
.range([0, x.bandwidth()])
.padding([0.05])
// color palette = one color per subgroup
var color = d3.scaleOrdinal()
.domain(subgroups)
.range(['#e41a1c','#377eb8','#4daf4a'])
// Show the bars
svg.append("g")
.selectAll("g")
// Enter in data = loop group per group
.data(data)
.enter()
.append("g")
.attr("transform", function(d) { return "translate(" + x(d.group) + ",0)"; })
.selectAll("rect")
.data(function(d) { return subgroups.map(function(key) { return {key: key, value: d[key]}; }); })
.enter().append("rect")
.attr("x", function(d) { return xSubgroup(d.key); })
.attr("y", function(d) { return y(d.value); })
.attr("width", xSubgroup.bandwidth())
.attr("height", function(d) { return height - y(d.value); })
.attr("fill", function(d) { return color(d.key); });
}}
// second visualization
let svg2 = d3.select('#vis2')
.append('svg')
.attr('preserveAspectRatio', 'xMidYMid meet') // this will scale your visualization according to the size of its parent element and the page.
.attr('width', '100%') // this is now required by Chrome to ensure the SVG shows up at all
.style('background-color', '#ccc') // change the background color to light gray
.attr('viewBox', [0, 0, width + margin.left + margin.right, height + margin.top + margin.bottom].join(' '))
let data2 = d3.csv("data/grad_rates.csv"),function(row){
//console.log(row)
return{
School_Name: row.School_Name,
All_Subgroups_Percentage_Graduated: +row.All_Subgroups_Percentage_Graduated
}}.then(function(data)){
//console.log(data)
}
d3.csv("data/grad_rates.csv", function(data){
// Removing headers from the data
var subgroups = data.columns.slice(1)
function(data) {
// X-axis initialization: Year (2012-2020)
var x = d3.scaleTime()
.domain(d3.extent(data, function(d) { return d; }))
.range([ 0, width ]);
svg.append("g")
.attr("transform", "translate(0," + height + ")")
.call(d3.axisBottom(x));
// Y-axis initliazation: Graduation Rate % (Among all subgroups in Tempe schools)
var y = d3.scaleLinear()
.domain([0, d3.max(data, function(d) { return +d.value; })])
.range([ height, 0 ]);
svg.append("g")
.call(d3.axisLeft(y));
// Physical addition of the line for the chart
svg.append("path")
.datum(data)
.attr("fill", "none")
.attr("stroke", "steelblue")
.attr("stroke-width", 1.5)
.attr("d", d3.line()
.x(function(d) { return x(d.value) })
.y(function(d) { return y(d.value) })
)
})
|
<gh_stars>0
from numpy import array
from numpy.linalg import solve
default_input = "1, 1, 1, 6, -1, -2, 1, -2, 2, 1, 3, 13"
user_input = raw_input("Insert equations (or press enter for default): ").strip()
user_input = default_input if user_input == '' else user_input
user_input = [float(x) for x in user_input.split(', ')]
a = []
b = []
N = 3
for i in range(N):
eq = user_input[:N+1]
user_input = user_input[N+1:]
a.append(eq[:-1])
b.append(eq[-1])
result = solve(array(a), array(b))
print result
|
#!/bin/bash
# Copyright (c) 2017, 2020, Oracle Corporation and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
# This script starts a node manager for either a WebLogic Server pod,
# or for the WebLogic Operator introspector job.
#
# Requires the following to already be set:
#
# DOMAIN_UID = Domain UID
# JAVA_HOME = Existing java home
# DOMAIN_HOME = Existing WebLogic domain home directory
# NODEMGR_HOME = Target directory for NM setup files, this script
# will append this value with /$DOMAIN_UID/$SERVER_NAME
#
# Optionally set:
#
# SERVER_NAME = If not set, assumes this is introspector.
#
# ORACLE_HOME = Oracle Install Home - defaults via utils.sh/exportInstallHomes
# MW_HOME = MiddleWare Install Home - defaults to ${ORACLE_HOME}
# WL_HOME = WebLogic Install Home - defaults to ${ORACLE_HOME}/wlserver
#
# NODEMGR_LOG_HOME = Directory that will contain contain both
# ${DOMAIN_UID}/${SERVER_NAME}_nodemanager.log
# ${DOMAIN_UID}/${SERVER_NAME}_nodemanager.out
# Default:
# Use LOG_HOME. If LOG_HOME not set, use NODEMGR_HOME.
# NODEMGR_LOG_FILE_MAX = max NM .log and .out files to keep around (default=11)
#
# ADMIN_PORT_SECURE = "true" if the admin protocol is secure. Default is false
#
# FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR = "true" if WebLogic server should fail to
# boot if situational configuration related errors are
# found. Default to "true" if unspecified.
#
# NODEMGR_MEM_ARGS = JVM mem args for starting the Node Manager instance
# NODEMGR_JAVA_OPTIONS = Java options for starting the Node Manager instance
#
# If SERVER_NAME is set, then this NM is for a WL Server and these must also be set:
#
# SERVICE_NAME = Internal DNS name for WL Server SERVER_NAME
# ADMIN_NAME = Admin server name
# AS_SERVICE_NAME = Internal DNS name for Admin Server ADMIN_NAME
# USER_MEM_ARGS = JVM mem args for starting WL server
# JAVA_OPTIONS = Java options for starting WL server
#
###############################################################################
#
# Assert that expected global env vars are already set, pre-req files/dirs exist, etc.
#
SCRIPTPATH="$( cd "$(dirname "$0")" > /dev/null 2>&1 ; pwd -P )"
source ${SCRIPTPATH}/utils.sh
[ $? -ne 0 ] && echo "[SEVERE] Missing file ${SCRIPTPATH}/utils.sh" && exit 1
# Set ORACLE_HOME/WL_HOME/MW_HOME to defaults if needed
exportInstallHomes
stm_script=${WL_HOME}/server/bin/startNodeManager.sh
SERVER_NAME=${SERVER_NAME:-introspector}
ADMIN_PORT_SECURE=${ADMIN_PORT_SECURE:-false}
trace "Starting node manager for domain-uid='$DOMAIN_UID' and server='$SERVER_NAME'."
checkEnv JAVA_HOME NODEMGR_HOME DOMAIN_HOME DOMAIN_UID ORACLE_HOME MW_HOME WL_HOME || exit 1
if [ "${SERVER_NAME}" = "introspector" ]; then
SERVICE_NAME=localhost
trace "Contents of '${DOMAIN_HOME}/config/config.xml':"
cat ${DOMAIN_HOME}/config/config.xml
else
checkEnv SERVER_NAME ADMIN_NAME AS_SERVICE_NAME SERVICE_NAME USER_MEM_ARGS || exit 1
fi
[ ! -d "${JAVA_HOME}" ] && trace SEVERE "JAVA_HOME directory not found '${JAVA_HOME}'." && exit 1
[ ! -d "${DOMAIN_HOME}" ] && trace SEVERE "DOMAIN_HOME directory not found '${DOMAIN_HOME}'." && exit 1
[ ! -f "${DOMAIN_HOME}/config/config.xml" ] && trace SEVERE "'${DOMAIN_HOME}/config/config.xml' not found." && exit 1
[ ! -d "${WL_HOME}" ] && trace SEVERE "WL_HOME '${WL_HOME}' not found." && exit 1
[ ! -f "${stm_script}" ] && trace SEVERE "Missing script '${stm_script}' in WL_HOME '${WL_HOME}'." && exit 1
#
# Helper fn to create a folder
# Arg $1 - path of folder to create
#
function createFolder {
mkdir -m 750 -p "$1"
if [ ! -d "$1" ]; then
trace SEVERE "Unable to create folder '$1'."
exit 1
fi
}
###############################################################################
#
# Determine WebLogic server log and out files locations
#
# -Dweblogic.Stdout system property is used to tell node manager to send server .out
# file to the configured location
#
if [ "${SERVER_NAME}" = "introspector" ]; then
# introspector pod doesn't start a WL server
serverOutOption=""
else
# setup ".out" location for a WL server
serverLogHome="${LOG_HOME:-${DOMAIN_HOME}/servers/${SERVER_NAME}/logs}"
export SERVER_OUT_FILE="${serverLogHome}/${SERVER_NAME}.out"
export SERVER_PID_FILE="${serverLogHome}/${SERVER_NAME}.pid"
export SHUTDOWN_MARKER_FILE="${serverLogHome}/${SERVER_NAME}.shutdown"
serverOutOption="-Dweblogic.Stdout=${SERVER_OUT_FILE}"
createFolder "${serverLogHome}"
rm -f ${SHUTDOWN_MARKER_FILE}
fi
###############################################################################
#
# Init/create nodemanager home and nodemanager log env vars and directory
#
export NODEMGR_HOME=${NODEMGR_HOME}/${DOMAIN_UID}/${SERVER_NAME}
createFolder ${NODEMGR_HOME}
NODEMGR_LOG_HOME=${NODEMGR_LOG_HOME:-${LOG_HOME:-${NODEMGR_HOME}/${DOMAIN_UID}}}
FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR=${FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR:-true}
trace "NODEMGR_HOME='${NODEMGR_HOME}'"
trace "LOG_HOME='${LOG_HOME}'"
trace "SERVER_NAME='${SERVER_NAME}'"
trace "DOMAIN_UID='${DOMAIN_UID}'"
trace "NODEMGR_LOG_HOME='${NODEMGR_LOG_HOME}'"
trace "FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR='${FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR}'"
createFolder ${NODEMGR_LOG_HOME}
nodemgr_log_file=${NODEMGR_LOG_HOME}/${SERVER_NAME}_nodemanager.log
nodemgr_out_file=${NODEMGR_LOG_HOME}/${SERVER_NAME}_nodemanager.out
nodemgr_lck_file=${NODEMGR_LOG_HOME}/${SERVER_NAME}_nodemanager.log.lck
checkEnv NODEMGR_LOG_HOME nodemgr_log_file nodemgr_out_file nodemgr_lck_file
trace "remove nodemanager .lck file"
rm -f ${nodemgr_lck_file}
###############################################################################
#
# Determine domain name by parsing ${DOMAIN_HOME}/config/config.xml
#
# We need the domain name to register the domain with the node manager
# but we only have the domain home.
#
# The 'right' way to find the domain name is to use offline wlst to
# read the domain then get it from the domain mbean, but that's slow
# and complicated. Instead, just get it by reading config.xml directly.
#
# Look for the 1st occurence of <name>somestring</name> and assume somestring
# is the domain name:
domain_name=`cat ${DOMAIN_HOME}/config/config.xml | sed 's/[[:space:]]//g' | grep '^<name>' | head -1 | awk -F'<|>' '{print $3}'`
if [ "$domain_name" = "" ]; then
trace SEVERE "Could not determine domain name"
exit 1
fi
###############################################################################
#
# Create nodemanager.properties and nodemanager.domains files in NM home
#
nm_domains_file=${NODEMGR_HOME}/nodemanager.domains
cat <<EOF > ${nm_domains_file}
${domain_name}=${DOMAIN_HOME}
EOF
[ ! $? -eq 0 ] && trace SEVERE "Failed to create '${nm_domains_file}'." && exit 1
nm_props_file=${NODEMGR_HOME}/nodemanager.properties
cat <<EOF > ${nm_props_file}
#Node manager properties
NodeManagerHome=${NODEMGR_HOME}
JavaHome=${JAVA_HOME}
DomainsFile=${nm_domains_file}
DomainsFileEnabled=true
DomainsDirRemoteSharingEnabled=true
NativeVersionEnabled=true
PropertiesVersion=12.2.1
ListenAddress=127.0.0.1
ListenPort=5556
ListenBacklog=50
AuthenticationEnabled=false
SecureListener=false
weblogic.StartScriptEnabled=true
weblogic.StartScriptName=startWebLogic.sh
weblogic.StopScriptEnabled=false
QuitEnabled=false
StateCheckInterval=500
CrashRecoveryEnabled=false
LogFile=${nodemgr_log_file}
LogToStderr=true
LogFormatter=weblogic.nodemanager.server.LogFormatter
LogAppend=true
LogLimit=0
LogLevel=FINEST
LogCount=1
EOF
[ ! $? -eq 0 ] && trace SEVERE "Failed to create '${nm_props_file}'." && exit 1
###############################################################################
#
# If we're a WL Server pod, cleanup its old state file and
# create its NM startup.properties file.
#
if [ ! "${SERVER_NAME}" = "introspector" ]; then
wl_data_dir=${DOMAIN_HOME}/servers/${SERVER_NAME}/data/nodemanager
wl_state_file=${wl_data_dir}/${SERVER_NAME}.state
wl_props_file=${wl_data_dir}/startup.properties
createFolder ${wl_data_dir}
# Remove state file, because:
# 1 - The liveness probe checks this file
# 2 - It might have a stale value
# 3 - NM checks this file, and may auto-start the server if it's missing
if [ -f "$wl_state_file" ]; then
trace "Removing stale file '$wl_state_file'."
rm -f ${wl_state_file}
[ ! $? -eq 0 ] && trace SEVERE "Could not remove stale file '$wl_state_file'." && exit 1
fi
cat <<EOF > ${wl_props_file}
# Server startup properties
AutoRestart=true
RestartMax=2
RestartInterval=3600
NMHostName=${SERVICE_NAME}
Arguments=${USER_MEM_ARGS} -Dweblogic.SituationalConfig.failBootOnError=${FAIL_BOOT_ON_SITUATIONAL_CONFIG_ERROR} ${serverOutOption} ${JAVA_OPTIONS}
EOF
[ ! $? -eq 0 ] && trace SEVERE "Failed to create '${wl_props_file}'." && exit 1
if [ ! "${ADMIN_NAME}" = "${SERVER_NAME}" ]; then
ADMIN_URL=$(getAdminServerUrl)
echo "AdminURL=$ADMIN_URL" >> ${wl_props_file}
fi
fi
###############################################################################
#
# Set additional env vars required to start NM
#
# Customized properties
export JAVA_PROPERTIES="-DLogFile=${nodemgr_log_file} -DNodeManagerHome=${NODEMGR_HOME}"
# Copied from ${DOMAIN_HOME}/bin/setNMJavaHome.sh
# (We assume a Oracle Sun Hotspot JVM since we're only using Linux VMs
# and only support other JVM types on non-Linux OS (HP-UX, IBM AIX, IBM zLinux)).
export BEA_JAVA_HOME=""
export DEFAULT_BEA_JAVA_HOME=""
export SUN_JAVA_HOME="${JAVA_HOME?}"
export DEFAULT_SUN_JAVA_HOME="${JAVA_HOME?}"
export JAVA_VENDOR="Oracle"
export VM_TYPE="HotSpot"
# Copied from ${DOMAIN_HOME}/bin/startNodeManager.sh
export NODEMGR_HOME="${NODEMGR_HOME?}"
export DOMAIN_HOME="${DOMAIN_HOME?}"
# Apply JAVA_OPTIONS to Node Manager if NODEMGR_JAVA_OPTIONS not specified
if [ -z ${NODEMGR_JAVA_OPTIONS} ]; then
NODEMGR_JAVA_OPTIONS="${JAVA_OPTIONS}"
fi
if [ -z "${NODEMGR_MEM_ARGS}" ]; then
# Default JVM memory arguments for Node Manager
NODEMGR_MEM_ARGS="-Xms64m -Xmx100m -Djava.security.egd=file:/dev/./urandom "
fi
# We prevent USER_MEM_ARGS from being applied to the NM here and only pass
# USER_MEM_ARGS to WL Servers via the WL Server startup properties file above.
# This is so that WL Servers and NM can have different tuning. Use NODEMGR_MEM_ARGS or
# NODEMGR_JAVA_OPTIONS to specify JVM memory arguments for NMs.
# NOTE: Specifying USER_MEM_ARGS with ' ' (space, not empty string)
# prevents MEM_ARGS from being implicitly set by the WebLogic env
# scripts in the WebLogic installation and WLS from inserting default
# values for memory arguments. (See commBaseEnv.sh).
USER_MEM_ARGS=" "
export USER_MEM_ARGS
# NODEMGR_MEM_ARGS and NODEMGR_JAVA_OPTIONS are exported to Node Manager as JAVA_OPTIONS
# environment variable.
export JAVA_OPTIONS="${NODEMGR_MEM_ARGS} ${NODEMGR_JAVA_OPTIONS} -Dweblogic.RootDirectory=${DOMAIN_HOME}"
###############################################################################
#
# Start the NM
# 1) rotate old NM log file, and old NM out file, if they exist
# 2) start NM in background
# 3) wait up to ${NODE_MANAGER_MAX_WAIT:-60} seconds for NM by monitoring NM's .out file
# 4) log SEVERE, log INFO with 'exit 1' if wait more than ${NODE_MANAGER_MAX_WAIT:-60} seconds
#
trace "Start the nodemanager, node manager home is '${NODEMGR_HOME}', log file is '${nodemgr_log_file}', out file is '${nodemgr_out_file}'."
logFileRotate ${nodemgr_log_file} ${NODEMGR_LOG_FILE_MAX:-11}
logFileRotate ${nodemgr_out_file} ${NODEMGR_LOG_FILE_MAX:-11}
${stm_script} > ${nodemgr_out_file} 2>&1 &
wait_count=0
start_secs=$SECONDS
max_wait_secs=${NODE_MANAGER_MAX_WAIT:-60}
while [ 1 -eq 1 ]; do
sleep 1
if [ -e ${nodemgr_log_file} ] && [ `grep -c "Plain socket listener started" ${nodemgr_log_file}` -gt 0 ]; then
break
fi
if [ $((SECONDS - $start_secs)) -ge $max_wait_secs ]; then
trace INFO "Trying to put a node manager thread dump in '$nodemgr_out_file'."
kill -3 `jps -l | grep weblogic.NodeManager | awk '{ print $1 }'`
trace INFO "Contents of node manager log '$nodemgr_log_file':"
cat ${nodemgr_log_file}
trace INFO "Contents of node manager out '$nodemgr_out_file':"
cat ${nodemgr_out_file}
trace SEVERE "Node manager failed to start within $max_wait_secs seconds."
exit 1
fi
wait_count=$((wait_count + 1))
done
trace "Nodemanager started in $((SECONDS - start_secs)) seconds."
|
const fs = require( 'fs' );
let filename = './streetNames.txt';
let finalArr = [];
fs.readFile( filename, (err, text) => {
if ( err ) throw err;
const arr = text.toString().replace( /\r\n/g, '\n' ).split( '\n' );
for ( let i of arr ) {
console.log( i );
finalArr.push(`"${i}"`);
}
console.log( 'finalArr: ', finalArr );
fs.writeFile( `${filename}.js`, `let ${filename.slice(2).split('.').slice(0, -1).join('.')}Arr = [${finalArr}];`, ( err ) => {
// In case of a error throw err.
if ( err ) throw err;
} )
} );
|
#!/bin/sh
# Package
PACKAGE="gateone"
DNAME="GateOne"
# Others
INSTALL_DIR="/usr/local/${PACKAGE}"
SSS="/var/packages/${PACKAGE}/scripts/start-stop-status"
PYTHON_DIR="/usr/local/python"
PATH="${INSTALL_DIR}/bin:${INSTALL_DIR}/env/bin:${PYTHON_DIR}/bin:${PATH}"
USER="gateone"
GROUP="nobody"
PYTHON="${INSTALL_DIR}/env/bin/python"
VIRTUALENV="${PYTHON_DIR}/bin/virtualenv"
TMP_DIR="${SYNOPKG_PKGDEST}/../../@tmp"
preinst ()
{
exit 0
}
postinst ()
{
# Link
ln -s ${SYNOPKG_PKGDEST} ${INSTALL_DIR}
# Install busybox stuff
${INSTALL_DIR}/bin/busybox --install ${INSTALL_DIR}/bin
# Create user
adduser -h ${INSTALL_DIR}/var -g "${DNAME} User" -G ${GROUP} -s /bin/sh -S -D ${USER}
# Create a Python virtualenv
${VIRTUALENV} --system-site-packages ${INSTALL_DIR}/env > /dev/null
# Install the bundle
${INSTALL_DIR}/env/bin/pip install --no-index -U ${INSTALL_DIR}/share/requirements.pybundle > /dev/null
# Install GateOne
${PYTHON} ${INSTALL_DIR}/share/GateOne/setup.py install --prefix=${INSTALL_DIR} > /dev/null
# Correct the files ownership
chown -R ${USER}:root ${SYNOPKG_PKGDEST}
exit 0
}
preuninst ()
{
# Stop the package
${SSS} stop > /dev/null
# Remove the user
if [ "${SYNOPKG_PKG_STATUS}" == "UNINSTALL" ]; then
delgroup ${USER} ${GROUP}
deluser ${USER}
fi
exit 0
}
postuninst ()
{
# Remove link
rm -f ${INSTALL_DIR}
exit 0
}
preupgrade ()
{
# Stop the package
${SSS} stop > /dev/null
# Save some stuff
rm -fr ${TMP_DIR}/${PACKAGE}
mkdir -p ${TMP_DIR}/${PACKAGE}
mv ${INSTALL_DIR}/var ${TMP_DIR}/${PACKAGE}/
exit 0
}
postupgrade ()
{
# Restore some stuff
rm -fr ${INSTALL_DIR}/var
mv ${TMP_DIR}/${PACKAGE}/var ${INSTALL_DIR}/
rm -fr ${TMP_DIR}/${PACKAGE}
exit 0
}
|
#include <iostream>
enum class AtomicType {
INTEGER,
FLOAT,
STRING
};
template<class T>
class AtomicValue {
public:
T value;
AtomicType type;
AtomicValue(T new_value, AtomicType new_type) : value(new_value), type(new_type) {}
};
int main() {
// Example usage
AtomicValue<int> int_value(10, AtomicType::INTEGER);
AtomicValue<float> float_value(3.14f, AtomicType::FLOAT);
AtomicValue<std::string> string_value("Hello", AtomicType::STRING);
std::cout << "Integer value: " << int_value.value << std::endl;
std::cout << "Float value: " << float_value.value << std::endl;
std::cout << "String value: " << string_value.value << std::endl;
return 0;
} |
#!/usr/bin/env bash
set -eu
if [[ ! -z $(git status -s) ]]; then
echo "git not clean, commit changes first"
exit 1
fi
echo "-------------------------------------------"
echo 'Compiling site'
export HUGO_USE_ANALYTICS=true
hugo
PUBLIC=public
REMOTE_URL=$(git config --get remote.origin.url)
SOURCE_SHA=$(git rev-parse --short HEAD)
echo "-------------------------------------------"
echo 'Pushing to master'
rm -rf $PUBLIC/.git
git init $PUBLIC
git -C $PUBLIC checkout --orphan master
git -C $PUBLIC add .
git -C $PUBLIC commit --no-verify -m "Updated site to source @${SOURCE_SHA}"
git -C $PUBLIC remote add origin ${REMOTE_URL}
git -C $PUBLIC push --force origin master
|
<reponame>maxwellburson/42_ft_db<filename>src/vector/vec_realloc.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* corewar.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: zsmith <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2017/03/04 18:52:02 by mburson #+# #+# */
/* Updated: 2017/04/05 13:46:56 by zsmith ### ########.fr */
/* */
/* ************************************************************************** */
#include <vector.h>
#include <stdlib.h>
#include <libft.h>
int vec_realloc(t_vec *vec, size_t new_size)
{
uint8_t *new_data;
if (NULL == (new_data = malloc(vec->elmnt_size * new_size)))
return (-1);
memcpy(new_data, vec->data, vec->elmnt_size * vec->elmnt_count);
free(vec->data);
vec->elmnt_max = new_size;
vec->data = new_data;
vec->data_end = new_data + (vec->elmnt_size * vec->elmnt_count);
return (0);
}
|
# Import the relevant libraries
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from sklearn.pipeline import Pipeline
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
from sklearn.naive_bayes import MultinomialNB
# Preprocessing
tweets_data["text"]= tweets_data["text"].apply(lambda x: x.split(" "))
tweets_data=shuffle(tweets_data)
# Feature Extraction
text_clf = Pipeline([('vect', CountVectorizer()),('tfidf', TfidfTransformer()),])
X = text_clf.fit_transform(tweets_data["text"])
y=tweets_data["sentiment"]
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
# Train the model
clf = MultinomialNB().fit(X_train, y_train)
# Evaluate the accuracy of the model
predictions = clf.predict(X_test)
print("Accuracy:",accuracy_score(y_test, predictions)) |
<html>
<head>
<title>Color List</title>
<style>
.box {
float: left;
width: 50%;
height: 100px;
padding: 5px;
margin: 5px;
text-align: center;
}
</style>
</head>
<body>
<h1>List of Colors</h1>
<div>
<div class="box" style="background-color: red;">Red</div>
<div class="box" style="background-color: green;">Green</div>
</div>
<div>
<div class="box" style="background-color: blue;">Blue</div>
<div class="box" style="background-color: yellow;">Yellow</div>
</div>
</body>
</html> |
<filename>src/calculators/Utils.ts
class Utils {
static renderPercentage(count: number, total: number): string {
const percentage = (count / total) * 100;
const decimalCount = percentage > 0 && percentage < 1 ? 1 : 0;
return `${percentage.toFixed(decimalCount)} %`;
}
}
export = Utils;
|
#!/bin/sh
if ["$DATABASE" = "postgres"]
then
echo "Waiting for postgres..."
while ! nc -z $SQL_HOST $SQL_PORT; do
sleep 0.1
done
echo "PostgreSQL started"
fi
exec "$@"
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=10:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/discrete_MountainCar-v0_doule_ddpg_hardcopy_action_noise_seed3_run3_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env MountainCar-v0 --random-seed 3 --exploration-strategy action_noise --summary-dir ../Double_DDPG_Results_no_monitor/discrete/MountainCar-v0/doule_ddpg_hardcopy_action_noise_seed3_run3 --target-hard-copy-flag
|
package de.siphalor.tweed.config;
import de.siphalor.tweed.Tweed;
import de.siphalor.tweed.config.entry.AbstractBasicEntry;
import de.siphalor.tweed.config.entry.ConfigEntry;
import de.siphalor.tweed.data.DataContainer;
import de.siphalor.tweed.data.DataObject;
import de.siphalor.tweed.data.DataValue;
import net.minecraft.network.PacketByteBuf;
import net.minecraft.util.Identifier;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Stream;
public class ConfigCategory extends AbstractBasicEntry<ConfigCategory> {
protected Map<String, ConfigEntry<?>> entries = new LinkedHashMap<>();
protected Identifier backgroundTexture;
private Runnable reloadListener;
/**
* Adds a new entry to the category
* @param name the key used in the data architecture
* @param configEntry the entry to add
* @see ConfigFile#register(String, ConfigEntry)
*/
public <T extends ConfigEntry<?>> T register(String name, T configEntry) {
entries.put(name, configEntry);
if(configEntry.getEnvironment() == ConfigEnvironment.DEFAULT) configEntry.setEnvironment(environment);
if(configEntry.getScope() == ConfigScope.DEFAULT) configEntry.setScope(scope);
return configEntry;
}
public ConfigEntry<?> get(String name) {
return entries.get(name);
}
@Override
public void reset(ConfigEnvironment environment, ConfigScope scope) {
entryStream(environment, scope).forEach(entry -> entry.getValue().reset(environment, scope));
}
@Override
public String getDescription() {
return comment;
}
/**
* Sets the background texture for a possible GUI
* @param backgroundTexture an identifier to that texture
* @return this category for chain calls
*/
public ConfigCategory setBackgroundTexture(Identifier backgroundTexture) {
this.backgroundTexture = backgroundTexture;
return this;
}
/**
* Gets the background texture identifier (<b>may be null!</b>)
* @return an identifier for the background texture or <b>null</b>
*/
public Identifier getBackgroundTexture() {
return backgroundTexture;
}
@Override
public ConfigCategory setEnvironment(ConfigEnvironment environment) {
super.setEnvironment(environment);
entries.values().stream().filter(configEntry -> configEntry.getEnvironment() == ConfigEnvironment.DEFAULT).forEach(configEntry -> configEntry.setEnvironment(environment));
return this;
}
@Override
public ConfigEnvironment getEnvironment() {
if(entries.isEmpty()) return environment;
Iterator<ConfigEntry<?>> iterator = entries.values().iterator();
ConfigEnvironment environment = iterator.next().getEnvironment();
while(iterator.hasNext()) {
ConfigEnvironment itEnvironment = iterator.next().getEnvironment();
while(!environment.contains(itEnvironment))
environment = environment.parent;
}
return environment;
}
@Override
public ConfigCategory setScope(ConfigScope scope) {
super.setScope(scope);
entries.values().stream().filter(configEntry -> configEntry.getScope() == ConfigScope.DEFAULT).forEach(configEntry -> configEntry.setScope(scope));
return this;
}
@Override
public ConfigScope getScope() {
if(entries.isEmpty()) return scope;
return entries.values().stream().map(ConfigEntry::getScope).min((o1, o2) -> o1 == o2 ? 0 : (o1.triggers(o2) ? -1 : 1)).get();
}
@Override
public void read(DataValue<?> dataValue, ConfigEnvironment environment, ConfigScope scope, ConfigOrigin origin) throws ConfigReadException {
if(!dataValue.isObject()) {
throw new ConfigReadException("The entry should be an object (category)");
}
DataObject<?> dataObject = dataValue.asObject();
entryStream(environment, scope).filter(entry -> dataObject.has(entry.getKey())).forEach(entry -> {
DataValue<?> value = dataObject.get(entry.getKey());
try {
entry.getValue().read(value, environment, scope, origin);
} catch (ConfigReadException e) {
Tweed.LOGGER.error("Error reading " + entry.getKey() + ":");
e.printStackTrace();
return;
}
try {
entry.getValue().applyConstraints();
} catch (ConfigReadException e) {
Tweed.LOGGER.error("Error reading " + entry.getKey() + " in post-constraints:");
e.printStackTrace();
}
});
onReload();
}
@Override
public void read(PacketByteBuf buf, ConfigEnvironment environment, ConfigScope scope, ConfigOrigin origin) {
while(buf.readBoolean()) {
ConfigEntry<?> entry = entries.get(buf.readString(32767));
if(entry != null)
entry.read(buf, environment, scope, origin);
else
throw new RuntimeException("Attempt to sync unknown entry! Aborting.");
}
onReload();
}
@Override
public void write(PacketByteBuf buf, ConfigEnvironment environment, ConfigScope scope, ConfigOrigin origin) {
entryStream(environment, scope).forEach(entry -> {
buf.writeBoolean(true);
buf.writeString(entry.getKey());
entry.getValue().write(buf, environment, scope, origin);
});
buf.writeBoolean(false);
}
@Override
public <Key> void write(DataContainer<?, Key> dataContainer, Key key, ConfigEnvironment environment, ConfigScope scope) {
DataContainer category;
if(key.equals("")) {
category = dataContainer;
} else if(!dataContainer.has(key)) {
category = dataContainer.addObject(key);
} else {
category = dataContainer.get(key).asObject();
}
if(!comment.equals(""))
category.setComment(getComment());
entryStream(environment, scope).forEach(entry -> entry.getValue().write(category, entry.getKey(), environment, scope));
}
public Stream<Map.Entry<String, ConfigEntry<?>>> entryStream() {
return entries.entrySet().stream();
}
public Stream<Map.Entry<String, ConfigEntry<?>>> entryStream(ConfigEnvironment environment, ConfigScope scope) {
return entryStream().filter(entry -> entry.getValue().getEnvironment().contains(environment) && scope.triggers(entry.getValue().getScope()));
}
public boolean isEmpty() {
return entries.isEmpty();
}
public ConfigCategory setReloadListener(Runnable reloadListener) {
this.reloadListener = reloadListener;
return this;
}
public void onReload() {
if (reloadListener != null)
reloadListener.run();
}
}
|
// This module was autogenerate. Please don't edit.
exports._UnsafeBackdrop = require("@material-ui/core/Backdrop").default; |
fn find_max(arr: &[i32]) -> i32 {
if arr.is_empty() {
panic!("Empty array provided");
}
let mut max_value = arr[0];
for &num in arr.iter() {
if num > max_value {
max_value = num;
}
}
max_value
} |
export HOMEBREW_CHANGE_ARCH_TO_ARM=1
export HOMEBREW_NO_INSTALL_CLEANUP=1
export HOMEBREW_NO_AUTO_UPDATE=1
export HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK=1
core_repo="$(brew --repository homebrew/core)"
deps_file="$GITHUB_WORKSPACE/.github/deps/${ImageOS:?}_${ImageVersion:?}"
# configure git
git config --local user.email 1589480+BrewTestBot@users.noreply.github.com
git config --local user.name BrewTestBot
git config --local pull.rebase true
# Install PHP if not found, this will install all the dependencies
brew install php
# Update dependency formulae
for formula in apr apr-util argon2 aspell autoconf curl freetds gd gettext glib gmp icu4c krb5 libffi libpq libsodium libzip oniguruma openldap openssl@1.1 pcre2 sqlite tidy-html5 unixodbc; do
formula_prefix="$(brew --prefix "$formula")"
if ! [ -d "$formula_prefix"/lib ]; then
continue
fi
mkdir -p /tmp/libs/"$formula"
curl -o "$core_repo/Formula/$formula.rb" -sL https://raw.githubusercontent.com/Homebrew/homebrew-core/master/Formula/"$formula".rb
find "$formula_prefix"/lib -maxdepth 1 -name \*.dylib -print0 | xargs -I{} -0 cp -a {} /tmp/libs/"$formula"/
done
# Get updated formulae
(
cd "$core_repo" || exit
git diff --name-only | cut -d '/' -f 2 | sed -e 's/\.[^.]*$//' | sudo tee /tmp/deps_updated
)
# Check update formulae for library changes
rm "$deps_file" && touch "$deps_file"
while read -r formula; do
formula_prefix="$(brew --prefix "$formula")"
if ! [ -d "$formula_prefix"/lib ]; then
continue
fi
printf "\n--- %s ---\n" "$formula"
brew reinstall "$formula" 2>/dev/null 1>&2 || true
formula_prefix="$(brew --prefix "$formula")"
old_libs_hash=$(find /tmp/libs/"$formula"/ -maxdepth 1 -name '*.dylib' -exec basename {} \; | openssl sha256)
new_libs_hash=$(find "$formula_prefix"/lib -maxdepth 1 -name '*.dylib' -exec basename {} \; | openssl sha256)
echo "old hash: $old_libs_hash"
echo "new hash: $new_libs_hash"
if [ "$old_libs_hash" != "$new_libs_hash" ]; then
echo "$formula" | sudo tee -a "$deps_file"
fi
done </tmp/deps_updated
# Push changes
ls ./.github/deps/*
if [ "$(git status --porcelain=v1 2>/dev/null | wc -l)" != "0" ]; then
git stash
git pull -f https://"$GITHUB_REPOSITORY_OWNER":"$GITHUB_TOKEN"@github.com/"$GITHUB_REPOSITORY".git master
git stash apply
git add .
git commit -m "Update PHP dependencies on ${ImageOS:?} ${ImageVersion:?} runner"
git push -f https://"$GITHUB_REPOSITORY_OWNER":"$GITHUB_TOKEN"@github.com/"$GITHUB_REPOSITORY".git master || true
fi
|
from typing import List, Dict, Union, Any
def process_screenshot_managers(screenshot_managers: List[Dict[str, Union[str, Any]]]) -> List[str]:
status_messages = []
for manager in screenshot_managers:
status_format = manager.get("status_format", "Screenshot taken")
play_status = manager.get("play_status", "")
status_message = status_format.format(play_status=play_status, artist="", title="")
status_messages.append(status_message)
return status_messages |
<gh_stars>0
import { ConfirmationModel } from '../../lib/core/interfaces/IConfirmationStore';
import Config from '../../lib/core/models/Config';
import MongoDb from '../common/MongoDb';
import MongoDbConfirmationStore from '../../lib/core/MongoDbConfirmationStore';
/**
* Creates a MongoDbConfirmationStore and initializes it.
*/
async function createConfirmationStore (ConfirmationStoreUri: string, databaseName: string): Promise<MongoDbConfirmationStore> {
const ConfirmationStore = new MongoDbConfirmationStore(ConfirmationStoreUri, databaseName);
await ConfirmationStore.initialize();
return ConfirmationStore;
}
describe('MongoDbConfirmationStore', async () => {
const config: Config = require('../json/config-test.json');
const databaseName = 'sidetree-test';
let mongoServiceAvailable: boolean | undefined;
let confirmationStore: MongoDbConfirmationStore;
beforeAll(async () => {
mongoServiceAvailable = await MongoDb.isServerAvailable(config.mongoDbConnectionString);
if (mongoServiceAvailable) {
confirmationStore = await createConfirmationStore(config.mongoDbConnectionString, databaseName);
}
});
beforeEach(async () => {
if (!mongoServiceAvailable) {
pending('MongoDB service not available');
}
await confirmationStore.clearCollection();
});
describe('getLastSubmitted', () => {
it('should get the last submitted transaction', async () => {
await confirmationStore.submit('anchor-string1', 103);
await confirmationStore.submit('anchor-string2', 104);
await confirmationStore.submit('anchor-string3', 105);
await confirmationStore.submit('anchor-string4', 102);
await confirmationStore.submit('anchor-string5', 101);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 105, anchorString: 'anchor-string3'
}));
});
it('should return undefined if nothing has been submitted yet', async () => {
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(undefined);
});
it('should return confirmed once confirmed', async () => {
await confirmationStore.submit('anchor-string1', 100);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, anchorString: 'anchor-string1'
}));
await confirmationStore.confirm('anchor-string1', 101);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, confirmedAt: 101, anchorString: 'anchor-string1'
}));
await confirmationStore.submit('anchor-string2', 105);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 105, anchorString: 'anchor-string2'
}));
await confirmationStore.confirm('anchor-string2', 106);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 105, confirmedAt: 106, anchorString: 'anchor-string2'
}));
});
it('should clear the collections using afterReset with undefined args', async () => {
await confirmationStore.submit('anchor-string1', 100);
await confirmationStore.confirm('anchor-string1', 101);
await confirmationStore.submit('anchor-string2', 110);
await confirmationStore.resetAfter(undefined);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 110, anchorString: 'anchor-string2'
}));
});
it('should handle reorg correctly', async () => {
await confirmationStore.submit('anchor-string1', 100);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, anchorString: 'anchor-string1'
}));
await confirmationStore.confirm('anchor-string1', 101);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, confirmedAt: 101, anchorString: 'anchor-string1'
}));
await confirmationStore.resetAfter(101);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, confirmedAt: 101, anchorString: 'anchor-string1'
}));
await confirmationStore.resetAfter(100);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, anchorString: 'anchor-string1'
}));
await confirmationStore.confirm('anchor-string1', 102);
await expectAsync(confirmationStore.getLastSubmitted()).toBeResolvedTo(jasmine.objectContaining<ConfirmationModel|undefined>({
submittedAt: 100, confirmedAt: 102, anchorString: 'anchor-string1'
}));
});
});
});
|
#!/usr/bin/env bash
pip install tifffile #TODO: check if really required
mkdir -p data
# gdrive
# gdrive_download 1N82zh0kzmnzqRvUyMgVOGsCoS1kHf3RP ./data/isbi.tar.gz
# aws
wget https://catalyst-ai.s3-eu-west-1.amazonaws.com/isbi.tar.gz -O ./data/isbi.tar.gz
tar -xf ./data/isbi.tar.gz -C ./data/
# @TODO: fix macos fail with sed
set -e
# imports check
(set -e; for f in examples/_tests_scripts/*.py; do PYTHONPATH=./catalyst:${PYTHONPATH} python "$f"; done)
#(set -e; for f in examples/_tests_scripts/dl_*.py; do PYTHONPATH=./catalyst:${PYTHONPATH} python "$f"; done)
#(set -e; for f in examples/_tests_scripts/z_*.py; do PYTHONPATH=./catalyst:${PYTHONPATH} python "$f"; done)
################################ pipeline 00 ################################
rm -rf ./examples/logs
################################ pipeline 01 ################################
echo 'pipeline 01'
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages1
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config1.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('stage1.3', 'loss') < 2.1
"""
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/trace.py \
${LOGDIR}
rm -rf $LOGDIR
################################ pipeline 02 ################################
echo 'pipeline 02'
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages1
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config2.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('stage1.3', 'loss') < 2.1
"""
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config3.yml \
--resume=${LOGDIR}/checkpoints/best.pth \
--out_dir=${LOGDIR}/:str \
--out_prefix="/predictions/":str
cat $LOGFILE
python -c """
import numpy as np
data = np.load('${LOGDIR}/predictions/infer.logits.npy')
assert data.shape == (10000, 10)
"""
rm -rf $LOGDIR
################################ pipeline 03 ################################
echo 'pipeline 03'
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages1
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config4.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('stage1.3', 'loss') < 2.1
"""
rm -rf ${LOGDIR}
################################ pipeline 04 ################################
echo 'pipeline 04'
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages1
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config5.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage2.3', 'loss') < metrics.get('stage2.1', 'loss')
assert metrics.get('stage2.3', 'loss') < 2.1
"""
rm -rf ${LOGDIR}
################################ pipeline 05 ################################
echo 'pipeline 05'
# LrFinder
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages1
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config6.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage2.3', 'loss') < metrics.get('stage2.1', 'loss')
assert metrics.get('stage2.3', 'loss') < 14.5
"""
rm -rf ${LOGDIR}
################################ pipeline 06 ################################
echo 'pipeline 06'
EXPDIR=./examples/_tests_mnist_stages
LOGDIR=./examples/logs/_tests_mnist_stages_finder
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config_finder.yml \
--logdir=${LOGDIR} &
sleep 30
kill %1
rm -rf ${LOGDIR}
################################ pipeline 07 ################################
echo 'pipeline 07'
EXPDIR=./examples/_tests_mnist_stages2
LOGDIR=./examples/logs/_tests_mnist_stages2
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config1.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
# assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('best', 'loss') < 2.35
"""
rm -rf ${LOGDIR}
################################ pipeline 08 ################################
echo 'pipeline 08'
EXPDIR=./examples/_tests_mnist_stages2
LOGDIR=./examples/logs/_tests_mnist_stages2
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config2.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('stage1.3', 'loss') < 2.35
"""
rm -rf ${LOGDIR}
################################ pipeline 09 ################################
echo 'pipeline 09'
EXPDIR=./examples/_tests_mnist_stages2
LOGDIR=./examples/logs/_tests_mnist_stages2
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config3.yml \
--logdir=${LOGDIR} \
--check
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
# assert metrics.get('stage1.3', 'loss') < metrics.get('stage1.1', 'loss')
assert metrics.get('stage1.3', 'loss') < 2.33
"""
rm -rf ${LOGDIR}
################################ pipeline 10 ################################
echo 'pipeline 10'
EXPDIR=./examples/_tests_mnist_stages2
LOGDIR=./examples/logs/_tests_mnist_stages_finder
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config_finder.yml \
--logdir=${LOGDIR} &
sleep 30
kill %1
rm -rf ${LOGDIR}
################################ pipeline 11 ################################
# SEGMENTATION
echo 'pipeline 11 - SEGMENTATION'
EXPDIR=./examples/_test_segmentation
LOGDIR=./examples/logs/_test_segmentation
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
## load the data
# mkdir -p ./examples/_test_segmentation/data
# cd ./examples/_test_segmentation/data/
# download-gdrive 1iYaNijLmzsrMlAdMoUEhhJuo-5bkeAuj segmentation_data.zip
# extract-archive segmentation_data.zip
# cd ../../..
## train
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--configs ${EXPDIR}/config.yml ${EXPDIR}/transforms.yml \
--logdir=${LOGDIR} \
--stages/data_params/image_path=./examples/_test_segmentation/data/segmentation_data/train:str \
--stages/data_params/mask_path=./examples/_test_segmentation/data/segmentation_data/train_masks:str \
--check
## check metrics
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics = Safict.load('$LOGFILE')
iou = metrics.get('last', 'iou')
loss = metrics.get('last', 'loss')
print('iou', iou)
print('loss', loss)
assert iou > 0.8, f'iou must be > 0.8, got {iou}'
assert loss < 0.2, f'loss must be < 0.2, got {loss}'
"""
## remove logs
rm -rf ./examples/logs/_test_segmentation
################################ pipeline 12 ################################
# GAN
echo 'pipeline 12 - GAN'
EXPDIR=./examples/mnist_gan
LOGDIR=./examples/logs/mnist_gan
LOGFILE=${LOGDIR}/checkpoints/_metrics.json
PYTHONPATH=./examples:./catalyst:${PYTHONPATH} \
python catalyst/dl/scripts/run.py \
--expdir=${EXPDIR} \
--config=${EXPDIR}/config.yml \
--logdir=${LOGDIR} \
--stages/state_params/num_epochs=11:int
if [[ ! (-f "$LOGFILE" && -r "$LOGFILE") ]]; then
echo "File $LOGFILE does not exist"
exit 1
fi
cat $LOGFILE
python -c """
from safitty import Safict
metrics=Safict.load('$LOGFILE')
loss_g = metrics.get('last', 'loss_g')
loss_d_real = metrics.get('last', 'loss_d_real')
loss_d_fake = metrics.get('last', 'loss_d_fake')
loss_d = metrics.get('last', 'loss_d')
print('loss_g', loss_g)
print('loss_d_real', loss_d_real)
print('loss_d_fake', loss_d_fake)
print('loss_d', loss_d)
# assert 0.9 < loss_g < 1.5
# assert 0.3 < loss_d_real < 0.6
# assert 0.28 < loss_d_fake < 0.58
# assert 0.3 < loss_d < 0.6
assert loss_g < 2.0
assert loss_d_real < 0.9
assert loss_d_fake < 0.9
assert loss_d < 0.9
"""
rm -rf ${LOGDIR}
################################ pipeline 99 ################################
rm -rf ./examples/logs
|
#!/bin/bash
#SBATCH --job-name=LO_01
#SBATCH --output=slurm.%N.%j.out
#SBATCH --error=slurm.%N.%j.err
#SBATCH --nodes=4
#SBATCH --ntasks-per-node=20
#SBATCH --partition=inf-short
#SBATCH --time=120:00:00
#SBATCH --mem-per-cpu=3200M
#SBATCH --exclusive
#SBATCH --hint=nomultithread
BATCH_ROOT_DIR=/hpcscratch/user/harafiqu
RUN_DIR=/hpcscratch/user/harafiqu/PS_Lattice_SC_Optimisation/00_Initial_Simulation_Setup/01_New_Lattice_Tracking
OrigIwd=$(pwd)
# Make an output folder in the root directory to hold SLURM info file
cd ${BATCH_ROOT_DIR}
output_dir="output"
mkdir -p $output_dir
# Fill the SLURM info file
simulation_info_file="${BATCH_ROOT_DIR}/${output_dir}/simulation_info_${SLURM_JOB_ID}.${SLURM_NODEID}.${SLURM_PROCID}.txt"
echo "PyOrbit path: `readlink -f ${ORBIT_ROOT}`" >> ${simulation_info_file}
echo "Run path: `readlink -f ${RUN_DIR}`" >> ${simulation_info_file}
echo "Submit host: `readlink -f ${SLURM_SUBMIT_HOST}`" >> ${simulation_info_file}
echo "SLURM Job name: `readlink -f ${SLURM_JOB_NAME}`" >> ${simulation_info_file}
echo "SLURM Job ID: `readlink -f ${SLURM_JOB_ID}`" >> ${simulation_info_file}
echo "SLURM Nodes allocated: `readlink -f ${SLURM_JOB_NUM_NODES}`" >> ${simulation_info_file}
echo "SLURM CPUS per Node: `readlink -f ${SLURM_CPUS_ON_NODE}`" >> ${simulation_info_file}
echo "SLURM Node ID: `readlink -f ${SLURM_NODEID}`" >> ${simulation_info_file}
echo "SLURM total cores for job: `readlink -f ${SLURM_NTASKS}`" >> ${simulation_info_file}
echo "SLURM process ID: `readlink -f ${SLURM_PROCID}`" >> ${simulation_info_file}
echo "****************************************" >> ${simulation_info_file}
# Enter job directory, clean it, and setup environment -> SLURM info file
cd ${RUN_DIR}
./clean_all.sh
. setup_environment.sh >> ${simulation_info_file}
# Load correct MPI
module load mpi/mvapich2/2.2
tstart=$(date +%s)
# Run the job
srun --hint=nomultithread ${ORBIT_ROOT}/bin/pyORBIT ${RUN_DIR}/pyOrbit.py
tend=$(date +%s)
dt=$(($tend - $tstart))
echo "total simulation time (s): " $dt >> ${simulation_info_file} |
// package rules defines the rules for checking an AKS cluster.
package rules
|
#!/bin/sh
. ../PREFIX
OPENSSL_ROOT="$MYPREFIX"
git clean -f
git clean -fd
git checkout -- .
./Configure darwin64-x86_64-cc no-hw no-shared \
--prefix="$OPENSSL_ROOT" --openssldir=ssl
make install
|
import graphene
from graphene_sqlalchemy import SQLAlchemyObjectType
class User(SQLAlchemyObjectType):
"""User information"""
class Meta:
model = UserModel
class Query(graphene.ObjectType):
users = graphene.List(User)
def resolve_users(self, info):
query = User.get_query(info)
return query
schema = graphene.Schema(query=Query) |
#! /bin/bash
docker build -t pgdm-ui-build .. |
#!/bin/bash
set -euxo pipefail
source logging.sh
source common.sh
source network.sh
source rhcos.sh
source ocp_install_env.sh
source utils.sh
source validation.sh
early_deploy_validation
# To replace an image entry in the openshift releae image, set
# <ENTRYNAME>_LOCAL_IMAGE - where ENTRYNAME matches an uppercase version of the name in the release image
# with "-" converted to "_" e.g. to use a custom ironic-inspector
#export IRONIC_INSPECTOR_LOCAL_IMAGE=https://github.com/metal3-io/ironic-inspector-image
#export IRONIC_MACHINE_OS_DOWNLOADER_LOCAL_IMAGE=https://github.com/openshift-metal3/ironic-rhcos-downloader
#export BAREMETAL_OPERATOR_LOCAL_IMAGE=192.168.111.1:5000/localimages/bmo:latest
rm -f assets/templates/99_local-registry.yaml $OPENSHIFT_INSTALL_PATH/data/data/bootstrap/baremetal/files/etc/containers/registries.conf
write_pull_secret
DOCKERFILE=$(mktemp --tmpdir "release-update--XXXXXXXXXX")
_tmpfiles="$_tmpfiles $DOCKERFILE"
echo "FROM $OPENSHIFT_RELEASE_IMAGE" > $DOCKERFILE
# To build custom images is highly recommended to build a base image first.
# Build a base image if we set a custom repo file in the config file and
# the file exists
if [[ -n ${CUSTOM_REPO_FILE:-} ]]; then
BASE_IMAGE_DIR=${BASE_IMAGE_DIR:-base-image}
if [[ -f "${BASE_IMAGE_DIR}/${CUSTOM_REPO_FILE}" ]]; then
sudo podman build --tag ${BASE_IMAGE_DIR} --build-arg TestRepo="${CUSTOM_REPO_FILE}" -f "${BASE_IMAGE_DIR}/Dockerfile"
else
echo "${CUSTOM_REPO_FILE} does not exist!"
exit 1
fi
fi
for IMAGE_VAR in $(env | grep "_LOCAL_IMAGE=" | grep -o "^[^=]*") ; do
IMAGE=${!IMAGE_VAR}
sudo -E podman pull --authfile $PULL_SECRET_FILE $OPENSHIFT_RELEASE_IMAGE
# Is it a git repo?
if [[ "$IMAGE" =~ "://" ]] ; then
REPOPATH=~/${IMAGE##*/}
# Clone to ~ if not there already
[ -e "$REPOPATH" ] || git clone $IMAGE $REPOPATH
cd $REPOPATH
export $IMAGE_VAR=${IMAGE##*/}:latest
export $IMAGE_VAR=$LOCAL_REGISTRY_DNS_NAME:$LOCAL_REGISTRY_PORT/localimages/${!IMAGE_VAR}
# Some repos need to build with a non-default Dockerfile name
IMAGE_DOCKERFILE_NAME=${IMAGE_VAR/_LOCAL_IMAGE}_DOCKERFILE
IMAGE_DOCKERFILE=${!IMAGE_DOCKERFILE_NAME:-}
if [[ -z "$IMAGE_DOCKERFILE" ]]; then
for IMAGE_DOCKERFILE in Dockerfile.ocp Dockerfile; do
if [[ -e "$IMAGE_DOCKERFILE" ]]; then
break
fi
done
fi
# If we built a custom base image, we should use it as a new base in
# the Dockerfile to prevent discrepancies between locally built images.
# Replace all FROM entries with the base-image.
if [[ -n ${BASE_IMAGE_DIR:-} ]]; then
sed -i "s/^FROM [^ ]*/FROM ${BASE_IMAGE_DIR}/g" ${IMAGE_DOCKERFILE}
fi
sudo podman build --authfile $PULL_SECRET_FILE -t ${!IMAGE_VAR} -f $IMAGE_DOCKERFILE .
cd -
sudo podman push --tls-verify=false --authfile $PULL_SECRET_FILE ${!IMAGE_VAR} ${!IMAGE_VAR}
fi
IMAGE_NAME=$(echo ${IMAGE_VAR/_LOCAL_IMAGE} | tr '[:upper:]_' '[:lower:]-')
OLDIMAGE=$(sudo podman run --rm $OPENSHIFT_RELEASE_IMAGE image $IMAGE_NAME)
echo "RUN sed -i 's%$OLDIMAGE%${!IMAGE_VAR}%g' /release-manifests/*" >> $DOCKERFILE
done
if [ ! -z "${MIRROR_IMAGES}" ]; then
# combine global and local secrets
# pull from one registry and push to local one
# hence credentials are different
EXTRACT_DIR=$(mktemp --tmpdir -d "mirror-installer--XXXXXXXXXX")
_tmpfiles="$_tmpfiles $EXTRACT_DIR"
oc adm release mirror \
--insecure=true \
-a ${PULL_SECRET_FILE} \
--from ${OPENSHIFT_RELEASE_IMAGE} \
--to-release-image ${LOCAL_REGISTRY_DNS_NAME}:${LOCAL_REGISTRY_PORT}/localimages/local-release-image:${OPENSHIFT_RELEASE_TAG} \
--to ${LOCAL_REGISTRY_DNS_NAME}:${LOCAL_REGISTRY_PORT}/localimages/local-release-image 2>&1 | tee ${MIRROR_LOG_FILE}
echo "export MIRRORED_RELEASE_IMAGE=$OPENSHIFT_RELEASE_IMAGE" > /tmp/mirrored_release_image
#To ensure that you use the correct images for the version of OpenShift Container Platform that you selected,
#you must extract the installation program from the mirrored content:
if [ -z "$KNI_INSTALL_FROM_GIT" ]; then
oc adm release extract --registry-config "${PULL_SECRET_FILE}" \
--command=openshift-baremetal-install --to "${EXTRACT_DIR}" \
"${LOCAL_REGISTRY_DNS_NAME}:${LOCAL_REGISTRY_PORT}/localimages/local-release-image:${OPENSHIFT_RELEASE_TAG}"
mv -f "${EXTRACT_DIR}/openshift-baremetal-install" ${OCP_DIR}
fi
# Build a local release image, if no *_LOCAL_IMAGE env variables are set then this is just a copy of the release image
sudo podman image build --authfile $PULL_SECRET_FILE -t $OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE -f $DOCKERFILE
sudo podman push --tls-verify=false --authfile $PULL_SECRET_FILE $OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE $OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE
# If we're mirroring images, let's use the local Ironic image instead
OPENSHIFT_RELEASE_VERSION=$(oc adm release info --registry-config="$PULL_SECRET_FILE" "$OPENSHIFT_RELEASE_IMAGE" -o json | jq -r ".config.config.Labels.\"io.openshift.release\"")
IRONIC_LOCAL_IMAGE=${IRONIC_LOCAL_IMAGE:-"${LOCAL_REGISTRY_DNS_NAME}:${LOCAL_REGISTRY_PORT}/localimages/local-release-image:${OPENSHIFT_RELEASE_VERSION}-ironic"}
fi
for name in ironic ironic-api ironic-conductor ironic-inspector dnsmasq httpd-${PROVISIONING_NETWORK_NAME} mariadb ipa-downloader; do
sudo podman ps | grep -w "$name$" && sudo podman kill $name
sudo podman ps --all | grep -w "$name$" && sudo podman rm $name -f
done
# Remove existing pod
if sudo podman pod exists ironic-pod ; then
sudo podman pod rm ironic-pod -f
fi
# Create pod
sudo podman pod create -n ironic-pod
IRONIC_IMAGE=${IRONIC_LOCAL_IMAGE:-$IRONIC_IMAGE}
for IMAGE in ${IRONIC_IMAGE} ${VBMC_IMAGE} ${SUSHY_TOOLS_IMAGE} ; do
sudo -E podman pull --authfile $PULL_SECRET_FILE $IMAGE || echo "WARNING: Could not pull latest $IMAGE; will try to use cached images instead"
done
CACHED_MACHINE_OS_IMAGE="${IRONIC_DATA_DIR}/html/images/${MACHINE_OS_IMAGE_NAME}"
if [ ! -f "${CACHED_MACHINE_OS_IMAGE}" ]; then
curl -g --insecure -L -o "${CACHED_MACHINE_OS_IMAGE}" "${MACHINE_OS_IMAGE_URL}"
echo "${MACHINE_OS_IMAGE_SHA256} ${CACHED_MACHINE_OS_IMAGE}" | tee ${CACHED_MACHINE_OS_IMAGE}.sha256sum
sha256sum --strict --check ${CACHED_MACHINE_OS_IMAGE}.sha256sum || ( rm -f "${CACHED_MACHINE_OS_IMAGE}" ; exit 1 )
fi
CACHED_MACHINE_OS_BOOTSTRAP_IMAGE="${IRONIC_DATA_DIR}/html/images/${MACHINE_OS_BOOTSTRAP_IMAGE_NAME}"
if [ ! -f "${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}" ]; then
curl -g --insecure -L -o "${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}" "${MACHINE_OS_BOOTSTRAP_IMAGE_URL}"
echo "${MACHINE_OS_BOOTSTRAP_IMAGE_SHA256} ${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}" | tee ${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}.sha256sum
sha256sum --strict --check ${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}.sha256sum || ( rm -f "${CACHED_MACHINE_OS_BOOTSTRAP_IMAGE}" ; exit 1 )
fi
# cached images to the bootstrap VM
sudo podman run -d --net host --privileged --name httpd-${PROVISIONING_NETWORK_NAME} --pod ironic-pod \
--env PROVISIONING_INTERFACE=${PROVISIONING_NETWORK_NAME} \
-v $IRONIC_DATA_DIR:/shared --entrypoint /bin/runhttpd ${IRONIC_IMAGE}
# IPA Downloader - for testing
if [ -n "${IRONIC_IPA_DOWNLOADER_LOCAL_IMAGE:-}" ];
then
sudo -E podman pull --authfile $PULL_SECRET_FILE $IRONIC_IPA_DOWNLOADER_LOCAL_IMAGE
sudo podman run -d --net host --privileged --name ipa-downloader --pod ironic-pod \
-v $IRONIC_DATA_DIR:/shared ${IRONIC_IPA_DOWNLOADER_LOCAL_IMAGE} /usr/local/bin/get-resource.sh
sudo podman wait -i 1000 ipa-downloader
fi
function is_running() {
local podname="$1"
local ids
ids=$(sudo podman ps -a --filter "name=${podname}" --filter status=running -q)
if [[ -z "$ids" ]]; then
return 1
fi
return 0
}
if [ "$NODES_PLATFORM" = "libvirt" ]; then
if ! is_running vbmc; then
# Force remove the pid file before restarting because podman
# has told us the process isn't there but sometimes when it
# dies it leaves the file.
sudo rm -f $WORKING_DIR/virtualbmc/vbmc/master.pid
sudo podman run -d --net host --privileged --name vbmc --pod ironic-pod \
-v "$WORKING_DIR/virtualbmc/vbmc":/root/.vbmc -v "/root/.ssh":/root/ssh \
"${VBMC_IMAGE}"
fi
if ! is_running sushy-tools; then
sudo podman run -d --net host --privileged --name sushy-tools --pod ironic-pod \
-v "$WORKING_DIR/virtualbmc/sushy-tools":/root/sushy -v "/root/.ssh":/root/ssh \
"${SUSHY_TOOLS_IMAGE}"
fi
fi
# Wait for images to be downloaded/ready
while ! curl --fail -g http://$(wrap_if_ipv6 ${PROVISIONING_HOST_IP})/images/${MACHINE_OS_IMAGE_NAME}.sha256sum ; do sleep 1 ; done
while ! curl --fail -g http://$(wrap_if_ipv6 ${PROVISIONING_HOST_IP})/images/${MACHINE_OS_BOOTSTRAP_IMAGE_NAME}.sha256sum ; do sleep 1 ; done
if [ -n "${IRONIC_IPA_DOWNLOADER_LOCAL_IMAGE:-}" ];
then
while ! curl --fail --head -g http://$(wrap_if_ipv6 ${PROVISIONING_HOST_IP})/images/ironic-python-agent.initramfs ; do sleep 1; done
while ! curl --fail --head -g http://$(wrap_if_ipv6 ${PROVISIONING_HOST_IP})/images/ironic-python-agent.kernel ; do sleep 1; done
fi
|
#!/bin/sh
echo "Starting InvenTree worker..."
sleep 5
# Wait for the database to be ready
cd $INVENTREE_MNG_DIR
python3 manage.py wait_for_db
sleep 10
# Now we can launch the background worker process
python3 manage.py qcluster |
#pragma once
#include <vector>
#include <string>
#include <Vengine/GameObject.h>
namespace vengine {
class Scene
{
public:
Scene();
Scene(std::string name);
~Scene();
std::string name;
void update();
void start();
void end();
void render();
GameObject* addGameObject();
GameObject* addGameObject(std::string name);
GameObject* findGameObject(std::string name);
private:
std::vector<GameObject> gameObjects;
};
}
|
<reponame>ideacrew/pa_edidb
class EdiOpsTransactionsController < ApplicationController
helper_method :sort_column, :sort_direction, :edi_ops_transaction_path
def index
@q = params[:q]
@qf = params[:qf]
@qd = params[:qd]
@edi_ops_transactions = EdiOpsTransaction.order_by(sort_column.to_sym.send(sort_direction)).search(@q, @qf, @qd).page(params[:page]).per(20)
end
def edit
@edi_ops_transaction = EdiOpsTransaction.find(params[:id])
end
def update
@edi_ops_transaction = EdiOpsTransaction.find(params[:id])
if @edi_ops_transaction.update_attributes(params[:edi_ops_transaction])
redirect_to edi_ops_transactions_path
else
render "edit"
end
end
private
def sort_direction
%w[asc desc].include?(params[:direction]) ? params[:direction] : "asc"
end
def sort_column
EdiOpsTransaction.fields.include?(params[:sort]) ? params[:sort] : "submitted_timestamp"
end
end
|
'use strict';
/**
* Module dependencies.
*/
const utils = require('../../utils');
const validator = require('validator');
const dictDao = require('../../dao/sys_dict');
const util = require('../../utils');
const menuDao = require('../../dao/sys_menu');
const dictUtil = require('../../utils/dict_utils');
const moment = require('moment');
module.exports = function (app, routeMethod) {
/**
* 创建字典
*/
routeMethod.session('/manage/dict/create','sys:dict:edit');
routeMethod.csurf('/manage/dict/create');
app.get('/manage/dict/create', function (req, res) {
let type = req.query.type;
Promise.all([
menuDao.queryMenuByHref('/manage/dict'),
dictDao.queryMaxSortByType(type)
]).then(result => {
res.render('manage/sys_dict/create', {
currentMenu: result[0],
maxSort: parseInt(result[1] ? result[1] : 0) + 10,
type: type
});
});
});
/**
* 编辑字典
*/
routeMethod.session('/manage/dict/edit','sys:dict:edit');
routeMethod.csurf('/manage/dict/edit');
app.get('/manage/dict/edit', function (req, res) {
let id = req.query.id;
Promise.all([
menuDao.queryMenuByHref('/manage/dict'),
dictDao.queryDictById(id),
]).then(result => {
res.render('manage/sys_dict/create', {
currentMenu: result[0],
dict: result[1]
});
});
});
/**
* 保存一个用户信息
*/
routeMethod.session('/manage/dict/store','sys:dict:edit');
routeMethod.csurf('/manage/dict/store');
app.post('/manage/dict/store',async function (req, res) {
let value = req.body.value;
let label = req.body.label;
let type = req.body.type;
let sort = req.body.sort;
let description = req.body.description;
let remarks = req.body.remarks;
let result = null;
// 有ID就视为修改
if (typeof (req.body.id) != 'undefined' && req.body.id != '') {
result = await dictDao.updateDict(req);
req.session.notice_info = {
info:'修改字典成功!',
type:'success'
};
} else {
result = await dictDao.saveDict(value, label, type, description, sort, remarks, req);
req.session.notice_info = {
info:'保存字典成功!',
type:'success'
};
}
if (result) {
res.json({
result: true
});
} else {
req.session.notice_info = null;
res.json({
result: false,
error: '操作失败请重试!'
});
}
});
/**
* 删除一个字典信息
*/
routeMethod.session('/manage/dict/delete','sys:dict:edit');
routeMethod.csurf('/manage/dict/delete');
app.post('/manage/dict/delete',async function (req, res) {
let result = null;
if (req.body.id) {
let id = req.body.id;
result = await dictDao.delDictById(id);
if (result) {
req.session.notice_info = {
info:'删除字典成功!',
type:'success'
};
res.json({
result: true
});
} else {
res.json({
result: false
});
}
} else {
let ids = req.body.ids;
let idsAry = ids.split('|');
let proIdsAry = idsAry.map(id => {
return dictDao.delDictById(id);
});
Promise.all(proIdsAry).then(results => {
req.session.notice_info = {
info:'删除字典成功!',
type:'success'
};
res.json({
result: true
});
});
}
});
routeMethod.session('/manage/dict','sys:dict:view');
routeMethod.csurf('/manage/dict');
app.get('/manage/dict', function (req, res) {
let currentPage = req.query.page ? req.query.page : 1; // 获取当前页数,如果没有则为1
Promise.all([
menuDao.queryMenuByHref('/manage/dict'),
dictDao.queryAllDict(req, currentPage, 20),
dictDao.queryAllDictPage(req, 20, currentPage),
dictDao.queryDictType()
]).then(result => {
res.render('manage/sys_dict/index', {
currentMenu: result[0],
dicts: result[1],
page: result[2],
dictTypes: result[3]
});
});
});
}; |
package cyclops.async.reactive.futurestream.react.lazy;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import cyclops.async.reactive.futurestream.FutureStream;
import cyclops.async.reactive.futurestream.LazyReact;
import cyclops.async.reactive.futurestream.react.base.BaseJDKStreamTest;
import java.util.Arrays;
import java.util.Vector;
import org.junit.Test;
public class JDKLazyStreamTest extends BaseJDKStreamTest {
public <U> FutureStream<U> of(U... array) {
return LazyReact.parallelBuilder()
.from(Arrays.asList(array));
}
@Test
public void testMapReduce2() {
assertThat(of(1,
2,
3,
4,
5).map(it -> it * 100)
.reduce((acc, next) -> acc + next)
.get(),
is(1500));
}
@Test
public void testMapReduceSeed2() {
assertThat(of(1,
2,
3,
4,
5).map(it -> it * 100)
.reduce(0,
(acc, next) -> acc + next),
is(1500));
}
@Test
public void testMapReduceCombiner2() {
assertThat(of(1,
2,
3,
4,
5).map(it -> it * 100)
.reduce(0,
(acc, next) -> acc + next,
Integer::sum),
is(1500));
}
@Test
public void forEach() {
Vector<Integer> list = new Vector<>();
of(1,
5,
3,
4,
2).forEach(it -> list.add(it));
assertThat(list,
hasItem(1));
assertThat(list,
hasItem(2));
assertThat(list,
hasItem(3));
assertThat(list,
hasItem(4));
assertThat(list,
hasItem(5));
}
}
|
import { Document } from 'mongoose';
import { Question } from '../question/question.interface';
export interface Survey extends Document {
readonly name: string;
readonly initialState: object;
readonly questions: Question[];
readonly result: string;
}
|
# Program:
# SVN transfer to Git
# Author: Chonpin
# EMail: chonpin[at]gmail.com
# History:
# 2016/12/07 Chonpin First release
clear
echo ""
echo ""
echo " ____ __ __ _ _ ____ ____ _ _ "
echo "/ ___| \ \ / / | \ | | |___ \ / ___| (_) | |_ "
echo "\___ \ \ \ / / | \| | __) | | | _ | | | __|"
echo " ___) | \ V / | |\ | / __/ | |_| | | | | |_ "
echo "|____/ \_/ |_| \_| |_____| \____| |_| \__|"
echo ""
echo " Create by : Chonpin"
echo ""
echo ""
echo ""
# Config
# =================================================================================
config_repository=http://172.19.1.26/repos/ncc/trunk/offline_SQL
config_revision=10000
config_folder=/d/git/test123
config_user=chonpin.hsu
config_repository_git=https://github.com/chonpin/svn2gitxxxxxx.git
# =================================================================================
function func_start() {
while true; do
echo "Choice option :"
echo "1. Transfer SVN to Git"
echo "2. Push existing Git repository to GitLab"
echo "3. Exit"
read -p "" option
echo ""
case $option in
"1")
func_svn
func_git 1
func_bye
;;
"2")
func_git 2
func_bye
;;
"3")
func_bye
;;
*)
func_start
;;
esac
done
}
function func_svn() {
read -p "Git Repository (${config_repository_git}) : " repository_git
read -p "SVN Repository (${config_repository}) : " repository
read -p "Revision number (Empty or 9999) : " revision
read -p "Checkout folder (${config_folder}) : " folder
read -p "UserName (${config_user}) : " user
repository_git=${repository_git:-$config_repository_git}
repository=${repository:-$config_repository}
revision=${revision:-$config_revision}
folder=${folder:-$config_folder}
user=${user:-$config_user}
echo ""
echo "Project info : "
echo "Git repository : ${repository_git}"
echo "SVN repository : ${repository}"
echo "revision : ${revision}"
echo "folder : ${folder}"
echo "user : ${user}"
echo ""
echo ""
echo ""
echo "***** Important!!! this action will delete your destination folder. *****"
echo ""
echo ""
echo ""
while true; do
read -p "Are you sure to transfer this project [Y/N]?:" choice
case $choice in
"Y")
set rCommand=
if [ "$revision" != "Empty" ]; then
rCommand=-r${revision}:HEAD
fi
echo "Get author list and save to author.txt"
#svn log ${repository} --quiet | awk '/^r/ {print $3"="$3"<"$3"@104.com.tw>"}' | sort -u > author.txt
echo "delete destination folder"
#rm -rf $folder
echo "Start transfer..."
#git svn clone ${rCommand} ${repository} -A author.txt --username=${user} ${folder}
echo "Transfer Finished..."
break;
;;
"N")
echo "transfer abort"
func_bye
;;
esac
done
}
function func_git() {
if [ "${1}" == "2" ]; then
read -p "Git Repository (${config_repository_git}) : " repository_git
read -p "Git folder (${config_folder}) : " folder
fi
repository_git=${repository_git:-$config_repository_git}
folder=${folder:-$config_folder}
cd $folder
echo >> README.md
git add README.md
git commit -m "First git commit!!!"
git remote rm origin
git remote add origin ${repository_git}
git push -u origin master
}
function func_bye(){
echo "Thank you. Bye..."
exit 0
}
# run shell script
func_start |
import Vue from 'vue';
const event_bus = new Vue();
export default {
debug: true,
event_bus: event_bus,
api: {
},
}
|
#!/bin/sh
cat qwertyMatrix24+digits.json \
| ./txtkbd2kla altGr qwertyMatrix24.txt $1-altGr.txt \
| ./txtkbd2kla shift qwertyMatrix24.txt $1-shift.txt \
| ./txtkbd2kla primary qwertyMatrix24+digits.txt $1-main.txt \
| ./txtkbd2kla 'author:' "phil quesnel" \
| ./txtkbd2kla 'label:' "$1"
|
#!/bin/bash
if [ $# -eq 0 ] ; then
echo "Usage: `basename $0` ip1 ip2 ip3"
echo " ip1 ip2 ip3 are the ip address of node etcd_1 etcd_2 etcd_3"
exit 1
fi
index=1
while [ $# -gt 0 ] ; do
h="etcd_$index"
if [ $index -eq 1 ] ; then
CLUSTER="$h=http://$1:2380"
else
CLUSTER="$CLUSTER,$h=http://$1:2380"
fi
index=$(($index+1))
shift
done
# -initial-advertise-peer-urls : tell others what peer urls of me
# -listen-peer-urls : what peer urls of me
# -listen-client-urls : what client urls to listen
# -advertise-client-urls : tell others what client urls to listen of me
# -initial-cluster-state : new means join a new cluster; existing means join an existing cluster
# : new not means clear
# download etcd
[ ! -f etcd ] && wget http://www.unias.org/trac/docklet/downloads/1 -O etcd.tar.gz && tar xzvf etcd.tar.gz
./etcd -name etcd_1 \
-initial-advertise-peer-urls http://$etcd_1:2380 \
-listen-peer-urls http://$etcd_1:2380 \
-listen-client-urls http://$etcd_1:2379 \
-advertise-client-urls http://$etcd_1:2379 \
-initial-cluster-token etcd-cluster \
-initial-cluster $CLUSTER \
-initial-cluster-state new
|
package edu.washington.cse.instrumentation.analysis;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import soot.Body;
import soot.BodyTransformer;
import soot.Local;
import soot.Modifier;
import soot.PatchingChain;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootField;
import soot.SootFieldRef;
import soot.SootMethod;
import soot.SootMethodRef;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.VoidType;
import soot.jimple.AssignStmt;
import soot.jimple.CastExpr;
import soot.jimple.InvokeExpr;
import soot.jimple.Jimple;
import soot.jimple.JimpleBody;
import soot.jimple.NullConstant;
import soot.jimple.Stmt;
import soot.toolkits.graph.BriefUnitGraph;
import soot.toolkits.graph.UnitGraph;
public class JasperTransformer extends BodyTransformer {
private static final String GET_TAG_POOL_SIG =
"<org.apache.jasper.runtime.TagHandlerPool: org.apache.jasper.runtime.TagHandlerPool getTagHandlerPool(javax.servlet.ServletConfig)>";
private static final String RELEASE_TAG_SIG = "<org.apache.jasper.runtime.TagHandlerPool: void release()>";
private static final String REUSE_TAG_SIG = "<org.apache.jasper.runtime.TagHandlerPool: void reuse(javax.servlet.jsp.tagext.Tag)>";
private static final String GET_TAG_SIG = "<org.apache.jasper.runtime.TagHandlerPool: javax.servlet.jsp.tagext.Tag get(java.lang.Class)>";
@Override
protected void internalTransform(final Body b, final String phaseName, final Map<String, String> options) {
final PatchingChain<Unit> units = b.getUnits();
final UnitGraph ug = new BriefUnitGraph(b);
for(final Iterator<Unit> it = units.snapshotIterator(); it.hasNext(); ) {
final Unit currUnit = it.next();
final Stmt stmt = (Stmt) currUnit;
if(!(currUnit instanceof AssignStmt)) {
if(!stmt.containsInvokeExpr()) {
continue;
}
final InvokeExpr ie = stmt.getInvokeExpr();
final String calledSig = ie.getMethod().getSignature();
if(calledSig.equals(GET_TAG_SIG)) {
throw new RuntimeException("Could not adapt: " + b.getMethod());
}
if(calledSig.equals(REUSE_TAG_SIG) ||
calledSig.equals(RELEASE_TAG_SIG)) {
units.insertAfter(Jimple.v().newNopStmt(), currUnit);
// b.validate();
}
continue;
}
final AssignStmt invokeStmt = (AssignStmt) currUnit;
if(!invokeStmt.containsInvokeExpr()) {
continue;
}
final Value lhs = invokeStmt.getLeftOp();
final SootMethod m = invokeStmt.getInvokeExpr().getMethod();
if(!m.getSignature().equals(GET_TAG_SIG) && !m.getSignature().equals(GET_TAG_POOL_SIG)) {
continue;
}
if(m.getSignature().equals(GET_TAG_POOL_SIG)) {
invokeStmt.setRightOp(NullConstant.v());
continue;
}
final List<Unit> succs = ug.getSuccsOf(currUnit);
if(succs.size() != 1) {
throw new RuntimeException("Could not adapt: " + currUnit + " in " + b.getMethod());
}
final Unit nextUnit = succs.get(0);
if(!(nextUnit instanceof AssignStmt) || !(((AssignStmt)nextUnit).getRightOp() instanceof CastExpr)) {
throw new RuntimeException("Could not adapt: " + currUnit + " in " + b.getMethod());
}
final CastExpr castExpr = (CastExpr) ((AssignStmt)nextUnit).getRightOp();
if(castExpr.getOp() != lhs) {
throw new RuntimeException("Could not adapt: " + currUnit + " in " + b.getMethod());
}
final Type castedType = castExpr.getCastType();
if(!(castedType instanceof RefType)) {
throw new RuntimeException("Could not adapt: " + currUnit + " in " + b.getMethod());
}
invokeStmt.getRightOpBox().setValue(Jimple.v().newNewExpr((RefType)castedType));
final SootClass created = ((RefType)castedType).getSootClass();
final Unit invokeConstructor = Jimple.v().newInvokeStmt(Jimple.v().newSpecialInvokeExpr((Local)lhs,
Scene.v().makeMethodRef(created, "<init>", Collections.<Type>emptyList(), VoidType.v(), false)));
units.insertAfter(invokeConstructor, currUnit);
// b.validate();
}
}
public static void synthesizeBodies(final SootClass declaringClass) {
assert declaringClass.getSuperclass().getName().equals("org.apache.jasper.runtime.HttpJspBase") : declaringClass;
// inline jsp implementations
final RefType servletConfigType = RefType.v("javax.servlet.ServletConfig");
final SootClass servletExceptionClass = Scene.v().getSootClass("javax.servlet.ServletException");
final SootField configField = new SootField("_config", servletConfigType, Modifier.PRIVATE);
declaringClass.addField(configField);
final SootFieldRef configRef = configField.makeRef();
final RefType thisType = RefType.v(declaringClass.getName());
final Jimple jimple = Jimple.v();
{
final SootMethod initMethod = new SootMethod("init", Collections.<Type>singletonList(servletConfigType),
VoidType.v(), Modifier.PUBLIC, Collections.<SootClass>singletonList(servletExceptionClass));
final JimpleBody jb = jimple.newBody(initMethod);
final PatchingChain<Unit> units = jb.getUnits();
final Local thisLocal = jimple.newLocal("this", thisType);
final Local configLocal = jimple.newLocal("configArg", servletConfigType);
jb.getLocals().add(thisLocal);
jb.getLocals().add(configLocal);
units.add(jimple.newNopStmt());
units.add(jimple.newIdentityStmt(thisLocal, jimple.newThisRef(thisType)));
units.add(jimple.newIdentityStmt(configLocal, jimple.newParameterRef(servletConfigType, 0)));
units.add(jimple.newAssignStmt(jimple.newInstanceFieldRef(thisLocal, configRef), configLocal));
units.add(jimple.newReturnVoidStmt());
declaringClass.addMethod(initMethod);
initMethod.setActiveBody(jb);
}
{
final SootMethod getConfigMethod = new SootMethod("getServletConfig", Collections.<Type>emptyList(),
servletConfigType, Modifier.PUBLIC);
final JimpleBody jb = jimple.newBody(getConfigMethod);
final PatchingChain<Unit> units = jb.getUnits();
final Local thisLocal = jimple.newLocal("this", thisType);
final Local configLocal = jimple.newLocal("configArg", servletConfigType);
jb.getLocals().add(thisLocal);
jb.getLocals().add(configLocal);
units.add(jimple.newNopStmt());
units.add(jimple.newIdentityStmt(thisLocal, jimple.newThisRef(thisType)));
units.add(jimple.newAssignStmt(configLocal, jimple.newInstanceFieldRef(thisLocal, configRef)));
units.add(jimple.newReturnStmt(configLocal));
declaringClass.addMethod(getConfigMethod);
getConfigMethod.setActiveBody(jb);
}
{
final SootMethod destroyMethod = new SootMethod("destroy", Collections.<Type>emptyList(), VoidType.v(), Modifier.PUBLIC);
final JimpleBody jb = jimple.newBody(destroyMethod);
final PatchingChain<Unit> units = jb.getUnits();
final Local thisLocal = jimple.newLocal("this", thisType);
jb.getLocals().add(thisLocal);
units.add(jimple.newNopStmt());
units.add(jimple.newIdentityStmt(thisLocal, jimple.newThisRef(thisType)));
units.add(jimple.newInvokeStmt(
jimple.newVirtualInvokeExpr(thisLocal, Scene.v().makeMethodRef(declaringClass, "jspDestroy", Collections.<Type>emptyList(), VoidType.v(), false))
));
units.add(jimple.newInvokeStmt(
jimple.newVirtualInvokeExpr(thisLocal, Scene.v().makeMethodRef(declaringClass, "_jspDestroy", Collections.<Type>emptyList(), VoidType.v(), false))
));
units.add(jimple.newReturnVoidStmt());
declaringClass.addMethod(destroyMethod);
destroyMethod.setActiveBody(jb);
}
addServiceAdapter(declaringClass, "_jspService");
{
final RefType responseType = RefType.v("javax.servlet.http.HttpServletResponse");
final RefType requestType = RefType.v("javax.servlet.http.HttpServletRequest");
final List<Type> serviceParams = Arrays.<Type>asList(requestType,responseType);
final SootMethod httpServiceMethod = new SootMethod("service",
serviceParams, VoidType.v(),
Modifier.PUBLIC,
getServiceExceptions());
final JimpleBody b = jimple.newBody(httpServiceMethod);
final Local thisLocal = jimple.newLocal("r0", thisType);
final Local reqLocal = jimple.newLocal("req", requestType);
final Local respLocal = jimple.newLocal("resp", requestType);
b.getLocals().addAll(Arrays.asList(thisLocal, reqLocal, respLocal));
final PatchingChain<Unit> units = b.getUnits();
units.add(jimple.newNopStmt());
units.add(jimple.newIdentityStmt(thisLocal, jimple.newThisRef(thisType)));
units.add(jimple.newIdentityStmt(reqLocal, jimple.newParameterRef(requestType, 0)));
units.add(jimple.newIdentityStmt(respLocal, jimple.newParameterRef(responseType, 1)));
units.add(
jimple.newInvokeStmt(
jimple.newVirtualInvokeExpr(thisLocal, Scene.v().makeMethodRef(declaringClass, "_jspService", serviceParams, VoidType.v(), false), reqLocal, respLocal)
)
);
units.add(jimple.newReturnVoidStmt());
httpServiceMethod.setActiveBody(b);
declaringClass.addMethod(httpServiceMethod);
}
}
private static List<SootClass> getServiceExceptions() {
return Arrays.asList(Scene.v().getSootClass("javax.servlet.ServletException"), Scene.v().getSootClass("java.io.IOException"));
}
public static void addServiceAdapter(final SootClass declaringClass, final String callee) {
final RefType thisType = declaringClass.getType();
final Jimple jimple = Jimple.v();
final RefType requestType = RefType.v("javax.servlet.ServletRequest");
final RefType responseType = RefType.v("javax.servlet.ServletResponse");
final SootMethod serviceMethod = new SootMethod("service", Arrays.<Type>asList(
requestType,
responseType
), VoidType.v(), Modifier.PUBLIC,
getServiceExceptions());
final JimpleBody jb = jimple.newBody(serviceMethod);
final PatchingChain<Unit> units = jb.getUnits();
final Local thisLocal = jimple.newLocal("this", thisType);
final Local reqLocal = jimple.newLocal("r1", requestType);
final Local respLocal = jimple.newLocal("r2", responseType);
final RefType httpRequestType = RefType.v("javax.servlet.http.HttpServletRequest");
final RefType httpResponseType = RefType.v("javax.servlet.http.HttpServletResponse");
final Local t1 = jimple.newLocal("t1", httpRequestType);
final Local t2 = jimple.newLocal("t2", httpResponseType);
jb.getLocals().addAll(Arrays.asList(thisLocal, reqLocal, respLocal, t1, t2));
units.add(jimple.newNopStmt());
units.add(jimple.newIdentityStmt(thisLocal, jimple.newThisRef(thisType)));
units.add(jimple.newIdentityStmt(reqLocal, jimple.newParameterRef(requestType, 0)));
units.add(jimple.newIdentityStmt(respLocal, jimple.newParameterRef(responseType, 1)));
units.add(jimple.newAssignStmt(t1, jimple.newCastExpr(reqLocal, httpRequestType)));
units.add(jimple.newAssignStmt(t2, jimple.newCastExpr(respLocal, httpResponseType)));
final SootMethodRef serviceMethodRef = Scene.v().makeMethodRef(declaringClass, callee, Arrays.<Type>asList(
httpRequestType, httpResponseType
), VoidType.v(), false);
units.add(jimple.newInvokeStmt(
jimple.newVirtualInvokeExpr(thisLocal, serviceMethodRef, t1, t2)
));
units.add(jimple.newReturnVoidStmt());
serviceMethod.setActiveBody(jb);
declaringClass.addMethod(serviceMethod);
}
}
|
from typing import List, Dict
from django.core.management import call_command
from django.test import TestCase
from github_data.exceptions import RateLimitExceededError
from github_data.models import GithubUser, GithubRepository
class ScrapeCommandTestCase(TestCase):
def test_2_specific_users(self) -> None:
# test user argument
args: List[str] = ['jcaraballo17', 'Maurier']
try:
call_command('scrape_git', *args)
except RateLimitExceededError: # pragma: no cover
self.skipTest('Unable to complete test: Github rate limit exceeded! Try again later.')
self.assertTrue(GithubUser.objects.filter(login='jcaraballo17').exists())
self.assertTrue(GithubUser.objects.filter(login='Maurier').exists())
self.assertEqual(GithubRepository.objects.filter(owner__login='jcaraballo17').count(), 2)
self.assertEqual(GithubRepository.objects.filter(owner__login='Maurier').count(), 3)
def test_1_specific_user_with_1_repository(self) -> None:
# test user argument with repositories option
args: List[str] = ['jcaraballo17']
options: Dict[str, int] = {'repositories': 1}
try:
call_command('scrape_git', *args, **options)
except RateLimitExceededError: # pragma: no cover
self.skipTest('Unable to complete test: Github rate limit exceeded! Try again later.')
self.assertTrue(GithubUser.objects.filter(login='jcaraballo17').exists())
self.assertEqual(GithubRepository.objects.filter(owner__login='jcaraballo17').count(), 1)
def test_first_5_users_1_repository(self) -> None:
# try users and repositories options
args: List = []
options: Dict[str, int] = {'users': 5, 'repositories': 1}
try:
call_command('scrape_git', *args, **options)
except RateLimitExceededError: # pragma: no cover
self.skipTest('Unable to complete test: Github rate limit exceeded! Try again later.')
self.assertEqual(GithubUser.objects.count(), 5)
self.assertEqual(GithubRepository.objects.count(), 5)
def test_2_users_all_repositories_since_id(self) -> None:
# try since and users options
args: List = []
options: Dict[str, int] = {'since': 125, 'users': 2}
try:
call_command('scrape_git', *args, **options)
except RateLimitExceededError: # pragma: no cover
self.skipTest('Unable to complete test: Github rate limit exceeded! Try again later.')
self.assertEqual(GithubUser.objects.count(), 2)
self.assertEqual(GithubRepository.objects.count(), 66)
|
<reponame>Codpoe/jimu
import React from 'react';
export interface IconHexagonProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconHexagon: React.SFC<IconHexagonProps> = (
props: IconHexagonProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-hexagon"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<path d="M21 16V8a2 2 0 0 0-1-1.73l-7-4a2 2 0 0 0-2 0l-7 4A2 2 0 0 0 3 8v8a2 2 0 0 0 1 1.73l7 4a2 2 0 0 0 2 0l7-4A2 2 0 0 0 21 16z" />
</svg>
);
};
IconHexagon.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconHexagon;
|
require 'spec_helper'
require 'deis-interactive/rails/base'
describe DeisInteractive::Rails::Base do
let(:base) { DeisInteractive::Rails::Base.new(nil, nil) }
describe "#inferred_app" do
before do
allow(base).to receive(:git_remote_response).and_return remotes
end
context "there is deis repo" do
let(:remotes) { "deis\tssh://git@<EMAIL>:2222/my-app.git (fetch)" }
it "returns the name" do
expect(base.inferred_app).to eq "my-app"
end
end
context "there is no git repo" do
let(:remotes) { "fatal: Not a git repository (or any of the parent directories): .git" }
it "returns nil" do
expect(base.inferred_app).to be_nil
end
end
context "there is no deis repo" do
let(:remotes) { "origin\tssh://git@my.domain.com:2222/my-app.git (fetch)" }
it "returns nil" do
expect(base.inferred_app).to be_nil
end
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.