text stringlengths 1 1.05M |
|---|
class Users::InvitationsController < Devise::InvitationsController
def after_invite_path_for(inviter, invitee)
user_management_path
end
def after_accept_path_for(resource)
new_volunteer_path
end
end
|
#!/bin/bash
# file: src/bash/pgsql-runner.sh doc at the eof file
umask 022 ;
# set -x # print the commands
# set -v # print each input line as well
# set -e # exit the script if any statement returns a non-true return value. gotcha !!!
trap "exit $exit_code" TERM
export TOP_PID=$$
#v1.0.7
#------------------------------------------------------------------------------
# the main function called
#------------------------------------------------------------------------------
main(){
doInit
case $1 in
'-usage')
actions="print-usage "
;;
'--usage')
actions="print-usage "
;;
'-help')
actions="print-help "
;;
'--help' )
actions="print-help "
;;
esac
test -z "$actions" && doParseCmdArgs "$@"
doSetVars
doCheckReadyToStart
doRunActions "$@"
doExit 0 "# = STOP MAIN = $run_unit "
}
#eof main
# v1.0.7
#------------------------------------------------------------------------------
# the "reflection" func - identify the the funcs per file
#------------------------------------------------------------------------------
get_function_list () {
env -i bash --noprofile --norc -c '
source "'"$1"'"
typeset -f |
grep '\''^[^{} ].* () $'\'' |
awk "{print \$1}" |
while read -r function_name; do
type "$function_name" | head -n 1 | grep -q "is a function$" || continue
echo "$function_name"
done
'
}
#eof func get_function_list
# v1.0.7
#------------------------------------------------------------------------------
# run all the actions
#------------------------------------------------------------------------------
doRunActions(){
cd $product_instance_dir
test -z "$actions" && doPrintUsage && doExit 0
while read -d ' ' action ; do (
#debug doLog "action: \"$action\""
while read -r func_file ; do (
while read -r function_name ; do (
action_name=`echo $(basename $func_file)|sed -e 's/.func.sh//g'`
test "$action_name" != $action && continue
doLog "INFO running action :: $action_name":"$function_name"
test "$action_name" == "$action" && $function_name
);
done< <(get_function_list "$func_file")
);
done < <(find src/bash/pgsql-runner/funcs -type f -name '*.sh')
test "$action" == 'to-dev' && doChangeEnvType 'dev'
test "$action" == 'to-tst' && doChangeEnvType 'tst'
test "$action" == 'to-git' && doChangeEnvType 'git'
test "$action" == 'to-qas' && doChangeEnvType 'qas'
test "$action" == 'to-prd' && doChangeEnvType 'prd'
[[ $action == to-ver=* ]] && doChangeVersion $action
[[ $action == to-app=* ]] && doCloneToApp $action
);
done < <(echo "$actions")
}
#eof func doRunActions
#v1.2.6
#------------------------------------------------------------------------------
# register the run-time vars before the call of the $0
#------------------------------------------------------------------------------
doInit(){
call_start_dir=`pwd`
run_unit_bash_dir=$(perl -e 'use File::Basename; use Cwd "abs_path"; print dirname(abs_path(@ARGV[0]));' -- "$0")
tmp_dir="$run_unit_bash_dir/tmp/.tmp.$$"
mkdir -p "$tmp_dir"
( set -o posix ; set )| sort >"$tmp_dir/vars.before"
my_name_ext=`basename $0`
run_unit=${my_name_ext%.*}
host_name=`hostname -s`
}
#eof doInit
#v1.0.7
#------------------------------------------------------------------------------
# parse the single letter command line args
#------------------------------------------------------------------------------
doParseCmdArgs(){
# traverse all the possible cmd args
while getopts ":a:c:i:h:" opt; do
case $opt in
a)
actions="$actions$OPTARG "
;;
c)
export run_unit="$OPTARG "
;;
i)
include_file="$OPTARG"
;;
h)
doPrintHelp
;;
\?)
doExit 2 "Invalid option: -$OPTARG"
;;
:)
doExit 2 "Option -$OPTARG requires an argument."
;;
esac
done
}
#eof func doParseCmdArgs
#v1.0.7
#------------------------------------------------------------------------------
# create an example host dependant ini file
#------------------------------------------------------------------------------
doCreateDefaultConfFile(){
echo -e "#file: $cnf_file \n\n" >> $cnf_file
echo -e "[MainSection] \n" >> $cnf_file
echo -e "#use simple var_name=var_value syntax \n">>$cnf_file
echo -e "#the name of this application ">>$cnf_file
echo -e "app_name=$run_unit\n" >> $cnf_file
echo -e "#the e-mails to send the package to ">>$cnf_file
echo -e "Emails=some.email@company.com\n" >> $cnf_file
echo -e "#the name of this application's db" >> $cnf_file
echo -e "db_name=$env_type""_""$run_unit\n\n" >> $cnf_file
echo -e "#eof file: $cnf_file" >> $cnf_file
}
#eof func doCreateDefaultConfFile
#v1.0.7
#------------------------------------------------------------------------------
# perform the checks to ensure that all the vars needed to run are set
#------------------------------------------------------------------------------
doCheckReadyToStart(){
test -f $cnf_file || doCreateDefaultConfFile
# check http://stackoverflow.com/a/677212/65706
# but which works for both cygwin and Ubuntu
command -v zip 2>/dev/null || { echo >&2 "The zip binary is missing ! Aborting ..."; exit 1; }
which perl 2>/dev/null || { echo >&2 "The perl binary is missing ! Aborting ..."; exit 1; }
}
#eof func doCheckReadyToStart
# v1.2.7
#------------------------------------------------------------------------------
# clean and exit with passed status and message
#------------------------------------------------------------------------------
doExit(){
exit_msg="${exit_msg#* }"
if (( $exit_code != 0 )); then
exit_msg=" ERROR --- exit_code $exit_code --- exit_msg : $exit_msg"
>&2 echo "$exit_msg" #echo to STDERR !
# doSendReport # this is very often overkill ...
doLog "FATAL STOP FOR $run_unit RUN with: "
doLog "FATAL exit_code: $exit_code exit_msg: $exit_msg"
else
doLog "INFO STOP FOR $run_unit RUN with: "
doLog "INFO STOP FOR $run_unit RUN: $exit_code $exit_msg"
fi
doCleanAfterRun
cd $call_start_dir
#src: http://stackoverflow.com/a/9894126/65706
test $exit_code -ne 0 && kill -s TERM $TOP_PID
test $exit_code -eq 0 && exit 0
}
#eof func doExit
#v1.0.7
#------------------------------------------------------------------------------
# echo pass params and print them to a log file and terminal
# with timestamp and $host_name and $0 PID
# usage:
# doLog "INFO some info message"
# doLog "DEBUG some debug message"
#------------------------------------------------------------------------------
doLog(){
type_of_msg=$(echo $*|cut -d" " -f1)
msg="$(echo $*|cut -d" " -f2-)"
[[ $type_of_msg == DEBUG ]] && [[ $do_print_debug_msgs -ne 1 ]] && return
[[ $type_of_msg == INFO ]] && type_of_msg="INFO "
# print to the terminal if we have one
test -t 1 && echo " [$type_of_msg] `date "+%Y.%m.%d-%H:%M:%S"` [pgsql-runner][@$host_name] [$$] $msg "
# define default log file none specified in cnf file
test -z $log_file && \
mkdir -p $product_instance_dir/dat/log/bash && \
log_file="$product_instance_dir/dat/log/bash/$run_unit.`date "+%Y%m"`.log"
echo " [$type_of_msg] `date "+%Y.%m.%d-%H:%M:%S"` [$run_unit][@$host_name] [$$] $msg " >> $log_file
}
#eof func doLog
#v1.1.0
#------------------------------------------------------------------------------
# cleans the unneeded during after run-time stuff
# do put here the after cleaning code
#------------------------------------------------------------------------------
doCleanAfterRun(){
# remove the temporary dir and all the stuff bellow it
cmd="rm -fvr $tmp_dir"
doRunCmdAndLog "$cmd"
}
#eof func doCleanAfterRun
#v1.0.7
#------------------------------------------------------------------------------
# run a command and log the call and its output to the log_file
# doPrintHelp: doRunCmdAndLog "$cmd"
#------------------------------------------------------------------------------
doRunCmdAndLog(){
cmd="$*" ;
doLog "DEBUG running cmd and log: \"$cmd\""
msg=$($cmd 2>&1)
ret_cmd=$?
error_msg=": Failed to run the command:
\"$cmd\" with the output:
\"$msg\" !!!"
[ $ret_cmd -eq 0 ] || doLog "$error_msg"
doLog "DEBUG : cmdoutput : \"$msg\""
}
#eof func doRunCmdAndLog
#v1.0.7
#------------------------------------------------------------------------------
# run a command on failure exit with message
# doPrintHelp: doRunCmdOrExit "$cmd"
# call by:
# set -e ; doRunCmdOrExit "$cmd" ; set +e
#------------------------------------------------------------------------------
doRunCmdOrExit(){
cmd="$*" ;
doLog "DEBUG running cmd or exit: \"$cmd\""
msg=$($cmd 2>&1)
ret_cmd=$?
# if occured during the execution exit with error
error_msg=": FATAL : Failed to run the command \"$cmd\" with the output \"$msg\" !!!"
[ $ret_cmd -eq 0 ] || doExit "$ret_cmd" "$error_msg"
#if no occured just log the message
doLog "DEBUG : cmdoutput : \"$msg\""
}
#eof func doRunCmdOrExit
#v1.0.7
#------------------------------------------------------------------------------
# set the variables from the $0.$host_name.cnf file which has ini like syntax
#------------------------------------------------------------------------------
doSetVars(){
cd $run_unit_bash_dir
for i in {1..3} ; do cd .. ; done ;
export product_instance_dir=`pwd`;
# include all the func files to fetch their funcs
while read -r func_file ; do . "$func_file" ; done < <(find . -name "*func.sh")
# while read -r func_file ; do echo "$func_file" ; done < <(find . -name "*func.sh")
# this will be dev , tst, prd
env_type=$(echo `basename "$product_instance_dir"`|cut -d'.' -f5)
product_version=$(echo `basename "$product_instance_dir"`|cut -d'.' -f2-4)
product_instance_env_name=$(basename "$product_instance_dir")
cd ..
product_dir=`pwd`;
cd ..
product_base_dir=`pwd`;
org_dir=`pwd`
org_name=$(echo `basename "$org_dir"`)
cd ..
org_base_dir=`pwd`;
cd "$run_unit_bash_dir/"
# start set default vars
do_print_debug_msgs=0
# if the db_name is NOT pre-set in the calling shell set it from own conf file
test -z "$db_name" && doParseConfFile
# if the db_name IS pre-set in the calling-shell set the sript run vars
# by not overriding the pre-set in the shell vars
test -z "$db_name" || doSetUndefinedShellVarsFromCnfFile
( set -o posix ; set ) | sort -n >"$tmp_dir/vars.after"
doLog "INFO # --------------------------------------"
doLog "INFO # -----------------------"
doLog "INFO # === START MAIN === $run_unit"
doLog "INFO # -----------------------"
doLog "INFO # --------------------------------------"
exit_code=0
doLog "INFO using the following vars:"
cmd="$(comm $tmp_dir/vars.before $tmp_dir/vars.after | perl -ne 's#\s+##g;print "\n $_ "' )"
echo -e "$cmd"
# and clear the screen
printf "\033[2J";printf "\033[0;0H"
}
#eof func doSetVars
#------------------------------------------------------------------------------
# set vars from the cnf file, but only if they are not pre-set in the calling shell
#------------------------------------------------------------------------------
doSetUndefinedShellVarsFromCnfFile(){
# set a default cnfiguration file
cnf_file="$run_unit_bash_dir/$run_unit.cnf"
# however if there is a host dependant cnf file override it
test -f "$run_unit_bash_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$run_unit_bash_dir/$run_unit.$host_name.cnf"
# if we have perl apps they will share the same cnfiguration settings with this one
test -f "$product_instance_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$product_instance_dir/$run_unit.$host_name.cnf"
# however if there is a host dependant and env-aware cnf file override it
test -f "$run_unit_bash_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$run_unit_bash_dir/$run_unit.$env_type.$host_name.cnf"
INI_SECTION=MainSection
vars_to_set=`sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \
-e 's/#.*$//' \
-e 's/[[:space:]]*$//' \
-e 's/^[[:space:]]*//' \
-e "s/^\(.*\)=\([^\"']*\)$/test -z \"\$\1\" \&\& export \1=\"\2\"/" \
< $cnf_file \
| sed -n -e "/^\[$INI_SECTION\]/,/^\s*\[/{/^[^#].*\=.*/p;}"`
while IFS=' ' read -r var_to_set
do
echo "running: $var_to_set"
eval "$var_to_set"
done < "$vars_to_set"
vars_to_set=""
}
#eof func doSetShellVarsFromCnfFile
#v1.0.7
#------------------------------------------------------------------------------
# parse the ini like $0.$host_name.cnf and set the variables
# cleans the unneeded during after run-time stuff. Note the MainSection
# courtesy of : http://mark.aufflick.com/blog/2007/11/08/parsing-ini-files-with-sed
#------------------------------------------------------------------------------
doParseConfFile(){
# set a default cnfiguration file
cnf_file="$run_unit_bash_dir/$run_unit.cnf"
# however if there is a host dependant cnf file override it
test -f "$run_unit_bash_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$run_unit_bash_dir/$run_unit.$host_name.cnf"
# if we have perl apps they will share the same cnfiguration settings with this one
test -f "$product_instance_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$product_instance_dir/$run_unit.$host_name.cnf"
# however if there is a host dependant and env-aware cnf file override it
test -f "$run_unit_bash_dir/$run_unit.$host_name.cnf" \
&& cnf_file="$run_unit_bash_dir/$run_unit.$env_type.$host_name.cnf"
# yet finally override if passed as argument to this function
# if the the ini file is not passed define the default host independant ini file
test -z "$1" || cnf_file=$1;shift 1;
#debug echo "@doParseConfFile cnf_file:: $cnf_file" ; sleep 6
# coud be later on parametrized ...
INI_SECTION=MainSection
eval `sed -e 's/[[:space:]]*\=[[:space:]]*/=/g' \
-e 's/#.*$//' \
-e 's/[[:space:]]*$//' \
-e 's/^[[:space:]]*//' \
-e "s/^\(.*\)=\([^\"']*\)$/\1=\"\2\"/" \
< $cnf_file \
| sed -n -e "/^\[$INI_SECTION\]/,/^\s*\[/{/^[^#].*\=.*/p;}"`
}
#eof func doParseConfFile
# Action !!!
main "$@"
#
#----------------------------------------------------------
# Purpose:
# a simplistic app stub with simplistic source control and
# cloning or morphing functionalities ...
#----------------------------------------------------------
#
#----------------------------------------------------------
# Requirements: bash , perl , ctags
#
#----------------------------------------------------------
#
#----------------------------------------------------------
# EXIT CODES
# 0 --- Successfull completion
# 1 --- required binary not installed
# 2 --- Invalid options
# 3 --- deployment file not found
# 4 --- perl syntax check error
#----------------------------------------------------------
#
# VersionHistory:
#------------------------------------------------------------------------------
# 1.0.0 --- 2016-09-11 12:24:15 -- init from bash-stub
#----------------------------------------------------------
#
#eof file: pgsql-runner.sh v1.0.7
|
<filename>packages/gimbal/src/command/index.ts
import program, { Command as CommandType } from 'commander';
import Config from '@/config';
import EventEmitter from '@/event';
import Logger from '@/logger';
import output from '@/output';
import { StartEvent, EndEvent, ActionStartEvent, ActionEndEvent, Report } from '@/typings/command';
import { CommandOptions } from '@/typings/utils/command';
import { getOptionsFromCommand } from '@/utils/command';
import comment from '@/vcs/comment';
import reconcileReports from './reconcile';
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
type Action = (commandOptions: CommandOptions, args?: string[]) => Promise<any>;
type DefaultValueFn = (options: CommandOptions) => CommandOptions;
interface Option {
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
defaultValue?: any | DefaultValueFn;
description?: string;
flag: string;
/* eslint-disable-next-line @typescript-eslint/no-explicit-any */
process?: ((arg1: any, arg2: any) => void) | RegExp;
}
interface Config {
action: Action;
command: string;
}
class Command {
private action: Action;
private command: string;
public constructor(config: Config) {
this.action = config.action;
this.command = config.command;
}
public async run(args: string[] = []): Promise<void> {
try {
const commandOptions = getOptionsFromCommand(program, undefined, Config);
const startEvent: StartEvent = {
args,
commandOptions,
command: this,
};
await EventEmitter.fire(`command/${this.command}/start`, startEvent);
if (!Config.isLoaded) {
await Config.load(commandOptions.cwd, commandOptions);
}
const actionStartEvent: ActionStartEvent = {
args,
commandOptions,
command: this,
};
await EventEmitter.fire(`command/${this.command}/action/start`, actionStartEvent);
const reports: Report | Report[] = await this.action(commandOptions, args);
const report: Report = reconcileReports(reports);
const actionEndEvent: ActionEndEvent = {
args,
commandOptions,
command: this,
report,
};
await EventEmitter.fire(`command/${this.command}/action/end`, actionEndEvent);
await output(report, commandOptions);
await comment(report, commandOptions);
const endEvent: EndEvent = {
args,
commandOptions,
command: this,
report,
};
await EventEmitter.fire(`command/${this.command}/end`, endEvent);
if (!report.success) {
process.exit(1);
}
} catch (error) {
Logger.log(error);
process.exit(1);
}
}
}
export const preparseOptions = (): CommandOptions => {
const parsed = program.parseOptions(program.normalize(process.argv.slice(2)));
const match =
parsed.args[0] && program.commands.find((command: CommandType): boolean => command.name() === parsed.args[0]);
const cmd = match || program;
if (!match && parsed.args[0]) {
Logger.log(`The "${parsed.args[0]}" command was not found`);
}
cmd.parseOptions(parsed.args); // this applies option values onto the command/program
return getOptionsFromCommand(cmd, undefined, Config);
};
export default Command;
|
<filename>clusters/server/tests/recon_test.py
import unittest
from src.factory import DayFactory, TransactionsFactory
from src.day import Day
from src.recon import Recon
class ReconTest(unittest.TestCase):
def setUp(self):
self.D0: Day = DayFactory().create("D0", ["AAPL 100", "GOOG 200", "SP500 175.75"], 1000)
self.D1: Day = DayFactory().create("D0", ["AAPL 120", "GOOG 220", "SP500 175.75"], 10000)
self.stock_list = ["AAPL", "GOOG", "SP500"]
self.r = Recon()
def test_compare(self):
self.assertEqual(self.r.compare(self.D0, self.D1), {'AAPL': 20.0, 'GOOG': 20.0, 'Cash': 9000})
if __name__ == '__main__':
unittest.main()
|
<reponame>ctgriffiths/twister
/*
File: PluginsLoader.java ; This file is part of Twister.
Version: 2.001
Copyright (C) 2012-2013 , Luxoft
Authors: <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import java.io.File;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Iterator;
import java.util.ServiceLoader;
import com.twister.plugin.twisterinterface.TwisterPluginInterface;
import java.io.InputStream;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class PluginsLoader {
private static URLClassLoader sysLoader;
private static Class[] parameters = new Class[]{URL.class};
public static void setClassPath(){
try{
if(sysLoader==null){
sysLoader= new URLClassLoader(new URL[]{},MainRepository.class.getClassLoader());
}
addDirToClasspath(new File(MainRepository.temp+MainRepository.bar+"components"));
}
catch(Exception e){e.printStackTrace();}}
public static Iterator<TwisterPluginInterface> getPlugins() {
return ServiceLoader.load(TwisterPluginInterface.class,sysLoader).iterator();}
public static void addDirToClasspath(File directory) throws Exception{
addURL(directory.toURI().toURL());
}
public static void addURL(URL u) throws Exception{
System.out.println("URL: "+u.toString());
URL urls[] = sysLoader.getURLs();
for(int i=0;i<urls.length;i++){
if(urls[i].toString().equalsIgnoreCase(u.toString())){
return;}}
Class sysClass = URLClassLoader.class;
try{Method method = sysClass.getDeclaredMethod("addURL", parameters);
method.setAccessible(true);
method.invoke(sysLoader, new Object[]{u});}
catch(Exception e){e.printStackTrace();}}} |
<reponame>brighteningStar/rp
import {Errors} from './Errors';
export class Form {
constructor(data) {
this.originalData = data;
this.copyDataToForm(data);
this.errors = new Errors();
this.loading = false;
}
reset() {
for (let field in this.originalData) {
this[field] = '';
}
this.errors.clear();
}
data() {
let data = {};
for (let property in this.originalData) {
data[property] = this[property];
}
return data;
}
post($uri) {
this.loading = true;
return new Promise((resolve, reject) => {
axios.post($uri, this.data())
.then(response => {
this.onSuccess(response.data);
resolve(response.data);
})
.catch(errors => {
this.onFail(errors.response.data.errors);
reject(errors.response.data.errors);
})
});
}
put($uri) {
this.loading = true;
return new Promise((resolve, reject) => {
axios.put($uri, this.data())
.then(response => {
this.onSuccess(response.data);
resolve(response.data);
})
.catch(errors => {
this.onFail(errors.response.data.errors);
reject(errors.response.data.errors);
})
});
}
onSuccess(data) {
this.loading = false;
this.reset();
}
onFail(errors) {
this.loading = false;
this.errors.record(errors)
}
copyDataToForm(data) {
for (let field in data) {
this[field] = data[field];
}
}
}
|
<reponame>AmatanHead/collective-blog
"""Form field for the markdown field
It is based on the `CharField`.
Unlike `CharField`, it works with `Markdown` objects.
"""
from django import forms
from django.core import exceptions
from .datatype import Markdown
from .renderer import BaseRenderer
from .widgets import MarkdownTextarea
class MarkdownFormField(forms.fields.CharField):
# TODO tests
def __init__(self, *args, **kwargs):
"""Form field for editing markdown objects"""
source_validators = kwargs.pop('source_validators', [])
_source_validators = kwargs.pop('_source_validators', [])
html_validators = kwargs.pop('html_validators', [])
_html_validators = kwargs.pop('_html_validators', [])
validators = kwargs.pop('validators', [])
_validators = kwargs.pop('_validators', [])
self.validators = validators + _validators
self.html_validators = html_validators + _html_validators
self.source_validators = source_validators + _source_validators
self.markdown_cls = kwargs.pop('markdown', Markdown)
self.renderer = kwargs.pop('renderer', BaseRenderer())
defaults = {'widget': MarkdownTextarea}
defaults.update(kwargs)
super(MarkdownFormField, self).__init__(*args, **defaults)
def run_validator(self, validator, *args, **kwargs):
"""Runs a single validator on a value
:param validator: A callable. The validator to run.
:param args: Arguments for the validator
:param kwargs: Keyword arguments for the validator
:return: List of errors.
"""
for validator in self.validators:
try:
validator(*args, **kwargs)
return []
except exceptions.ValidationError as e:
if hasattr(e, 'code') and e.code in self.error_messages:
e.message = self.error_messages[e.code]
return e.error_list
def run_validators(self, value):
"""Run validators on the given value
:param value: the `Markdown` class instance which needs validation.
:return: None
:raise: ValidationError
"""
if value in self.empty_values:
return
errors = []
for validator in iter(self.source_validators):
errors.extend(self.run_validator(validator, [value.source]))
for validator in iter(self.html_validators):
errors.extend(self.run_validator(validator, [value.html]))
for validator in iter(self.validators):
errors.extend(self.run_validator(validator, [value]))
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value):
"""Run all source, html, and common validators
:param value: `Markdown` class instance which needs to be validated.
"""
if value in self.empty_values and self.required:
raise exceptions.ValidationError(self.error_messages['required'], code='required')
def to_python(self, value):
"""Text to python
:param value: Source string, `Markdown`, or None.
:return: None or `Markdown`.
"""
if value in self.empty_values and value != '':
return value
elif isinstance(value, self.markdown_cls):
return value
else:
return self.markdown_cls(self.renderer, value)
def prepare_value(self, value):
"""Python to text
:param value: A `Markdown` class instance.
:return: Source text that will be displayed in a widget.
"""
if isinstance(value, self.markdown_cls):
return value.source
else:
return value
|
<reponame>trufflesuite/scaffold-eth
const { execSync } = require("child_process");
const args = " " + process.argv.slice(2).join(" ");
const execOption = { stdio: "inherit" };
const main = async () => {
// get network from command line arguments; set to localhost if not passed
let migrateCommand;
// put together cmd for migrate
const formatArgs = args.includes("network") ? args : "--network localhost";
if (args.includes("network")) {
migrateCommand = 'truffle migrate' + args;
} else {
migrateCommand = 'truffle migrate' + args + ' --network localhost';
}
// run migrate command
execSync(migrateCommand, execOption);
// format properly for scaffold-eth using same network
let formatCommand;
formatCommand = 'node scripts/scaffoldFormat.js ' + formatArgs;
// run format command
execSync(formatCommand, execOption);
}
main();
|
class Article:
def __init__(self, name, serial_id):
self.name = name
self.serial_id = serial_id
@classmethod
def generate_serial_id(cls, existing_serial_ids):
if not existing_serial_ids:
return "S-1"
else:
serial_numbers = [int(sid.split('-')[1]) for sid in existing_serial_ids]
new_serial_number = max(serial_numbers) + 1
return f"S-{new_serial_number}"
# Example usage
existing_serial_ids = ["S-1", "S-2", "S-3"]
new_article = Article("Example Article", Article.generate_serial_id(existing_serial_ids))
print(new_article.serial_id) # Output: S-4 |
<reponame>ysdtkm/manual_graph_analyzer
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
import cgi
import cgitb
import datetime
from hashlib import md5
import os
import pickle
import shutil
from string import Template
import sys
from classes import State, Node
def encode(ustr):
hash = md5(ustr.encode("utf8")).hexdigest()
return hash
def get_template():
template = Template("""
<html>
<body>
${body}
</body>
</html>
""")
return template
def dump_fieldstorage(fo):
for k in fo.keys():
sys.stderr.write(f"Field: {k}, Value: {fo[k].value}\n")
def main_process(fo):
fname = "state.pkl"
dump_fieldstorage(fo)
if os.path.isfile(fname):
with open(fname, "rb") as f:
st = pickle.load(f)
else:
st = State()
st = edit_state(st, fo)
bd = get_main_body(st)
with open("tmp.pkl", "wb") as f:
pickle.dump(st, f)
shutil.move("tmp.pkl", fname)
return bd
def console(obj):
sys.stderr.write(str(obj) + "\n")
def edit_state(st, fo):
if "Add" in fo and "child" in fo:
console("Add")
cname = fo["child"].value
if st.name_exists(cname):
cid = st.id_by_name(cname)
else:
cid = generate_time_hash()
pids = set()
if "parent" in fo:
na = fo["parent"].value
if st.name_exists(na):
pids = {st.id_by_name(na)}
st.enqueue(Node(cid, cname, pids))
elif "Next" in fo and st.non_empty_queue():
console("Next")
st.pop()
elif "Clear" in fo and "option" in fo and fo["option"].value == "clear":
console("Clear")
st = State()
elif "Rename" in fo and "edited" in fo and "To" in fo:
console("Rename")
if st.name_exists(fo["To"].value):
return st
if st.name_exists(fo["edited"].value):
nid = st.id_by_name(fo["edited"].value)
st.nodes[nid].name = fo["To"].value
elif "Merge" in fo and "edited" in fo and "Mto" in fo:
if st.name_exists(fo["edited"].value) and st.name_exists(fo["Mto"].value):
st.merge_names(fo["edited"].value, fo["Mto"].value)
elif "Delete" in fo and "edited" in fo:
console("Delete")
edited = fo["edited"].value
if st.name_exists(edited):
did = st.id_by_name(edited)
st.delete_by_id(did)
return st
def get_main_body(st):
states = str(st)
st.save_graph()
rd = datetime.datetime.now().strftime("%H%M%S%f")
if st.non_empty_queue():
pid = st.pop(noremove=True)
p = st.nodes[pid].name
else:
p = ""
bo = f"""
<h1>Semi-manual graph analyzer (with breadth-first prompt)</h1>
<h3>Current state</h3>
<img src="../graph.png?dummy={rd}">
<br/>
<h3>Add node/edge</h3>
<form action="server.py" method="post">
Parent name (optional): <input type="text" name="parent" value="{p}"><br/>
Name: <input type="text" name="child" value="" autofocus><br/>
<input type="submit" value="Add" name="Add">
<input type="submit" value="Next in queue" name="Next"><br/>
</form>
<br/>
<h3>Edit</h3>
<form action="server.py" method="post">
Original name: <input type="text" name="edited" value=""><br/>
↓<br/>
<input type="submit" value="Rename to" name="Rename"><input type="text" name="To" value="">
<br/>
<input type="submit" value="Merge to" name="Merge"><input type="text" name="Mto" value="">
<br/>
<input type="submit" value="Delete" name="Delete">
</form>
<form action="server.py" method="post">
<input type="submit" value="All clear" name="Clear">
<input type="text" name="option" value="Type 'clear' to confirm" onfocus='this.value=""'>
</form>
<br/>
<h3>Debug info</h3>
Queue:<br>
{st.html_queue()}<br>
<br>
Nodes:<br>
{st.html_nodes()}
"""
return bo
def generate_time_hash():
rd = datetime.datetime.now().strftime("%Y%m%d%H%M%S%f")
hash = md5(rd.encode("utf8")).hexdigest()
return hash
def main():
cgitb.enable()
fo = cgi.FieldStorage()
msg = main_process(fo)
print("Content-Type: text/html;charset=utf-8")
print("")
di = {"body": msg}
print(get_template().substitute(di))
if __name__ == "__main__":
main()
|
<filename>include/tev/Lazy.h
// This file was developed by <NAME> <<EMAIL>>.
// It is published under the BSD 3-Clause License within the LICENSE file.
#pragma once
#include <tev/Common.h>
#include <tev/ThreadPool.h>
#include <chrono>
#include <functional>
#include <future>
TEV_NAMESPACE_BEGIN
// Encapsulates a lazy, potentially asynchronous computation
// of some value. The public interface of this object is not
// thread-safe, i.e. it is expected to never be used from
// multiple threads at once.
template <typename T>
class Lazy {
public:
Lazy(std::function<T(void)> compute)
: Lazy{compute, nullptr} {
}
Lazy(std::function<T(void)> compute, ThreadPool* threadPool)
: mThreadPool{threadPool}, mCompute{compute} {
}
T get() {
if (mIsComputed) {
return mValue;
}
if (mAsyncValue.valid()) {
mValue = mAsyncValue.get();
} else {
mValue = compute();
}
mIsComputed = true;
return mValue;
}
bool isReady() const {
if (mIsComputed) {
TEV_ASSERT(
!mAsyncValue.valid(),
"There should never be a background computation while the result is already available."
);
return true;
}
if (!mAsyncValue.valid()) {
return false;
}
return mAsyncValue.wait_for(std::chrono::seconds{0}) == std::future_status::ready;
}
std::chrono::steady_clock::time_point becameReadyAt() const {
if (!isReady()) {
return std::chrono::steady_clock::now();
} else {
return mBecameReadyAt;
}
}
void computeAsync() {
// No need to perform an async computation if we
// already computed the value before or if one is
// already running.
if (mAsyncValue.valid() || mIsComputed) {
return;
}
if (mThreadPool) {
mAsyncValue = mThreadPool->enqueueTask([this]() {
T result = compute();
mBecameReadyAt = std::chrono::steady_clock::now();
return result;
}, true);
} else {
mAsyncValue = std::async(std::launch::async, [this]() {
T result = compute();
mBecameReadyAt = std::chrono::steady_clock::now();
return result;
});
}
}
private:
T compute() {
T result = mCompute();
mCompute = std::function<T(void)>{};
return result;
}
// If this thread pool is present, use it to run tasks
// instead of std::async.
ThreadPool* mThreadPool = nullptr;
std::function<T(void)> mCompute;
std::future<T> mAsyncValue;
T mValue;
bool mIsComputed = false;
std::chrono::steady_clock::time_point mBecameReadyAt;
};
TEV_NAMESPACE_END
|
<filename>lib/car/obj/src/ascend_ll.c
/* **** Notes
Ascend the current temporary knot position - i.e., (*argp).t in a pointer of the knot structure.
Deprecated..
di: Put the address for the current temporary knot i.e., (*argp).t in a pointer of the knot structure at.
si: Put the current temporary knot - or e.g., leading i.e., (*argp).l - address of the <knot> structure at.
example:
r = ascend_ll(&((*argp).t),(*argp).t);
Remarks:
Based on a doubly linked list
*/
# define C_CODE_STDS
# define CAR
# include "../../../incl/config.h"
signed(__cdecl ascend_ll(KNOT(**di),KNOT(*si))) {
/* **** DATA, BSS and STACK */
auto KNOT *cache;
/* **** CODE/TEXT */
if(!di) return(0x00);
if(!si) return(0x00);
if(!((*si).s)) return(0x01);
// It is at the terminating address i.e., (*argp).b
cache = ((*si).s);
*(di) = (cache);
return(0x01);
}
|
package ch.hslu.pcp;
public class Stack implements Stackable {
private static final int DEFAULT_VALUE = Integer.MIN_VALUE;
private Element head;
@Override
public void push(Element element) {
if (!isEmpty()) {
element.setNext(head);
}
head = element;
}
@Override
public Element top() {
if (!isEmpty()) {
return head;
} else {
return new Element(DEFAULT_VALUE);
}
}
@Override
public boolean pop() {
if (isEmpty()) {
return false;
}
head = head.getNext();
return true;
}
@Override
public void print() {
var message = new StringBuilder();
if (!isEmpty()) {
message.append("print - Stack contains: ");
var current = head;
while (current != null) {
message.append(current.getValue()).append(", ");
current = current.getNext();
}
message.append("top element = ").append(top().getValue());
} else {
message.append("print - Stack is empty");
}
System.out.println(message);
}
@Override
public boolean isEmpty() {
return head == null;
}
@Override
public int size() {
var count = 0;
var element = head;
while (element != null) {
count++;
element = element.getNext();
}
return count;
}
}
|
import { ValueObject } from '../../../core/valueObject';
import { Result } from '../../../core/resolve';
import { Url } from '../../../common/url';
interface RemoteUrlProps {
value: string;
}
export class RemoteUrl extends ValueObject<RemoteUrlProps> {
get value(): string {
return this.props.value;
}
private constructor(props: RemoteUrlProps) {
super(props);
}
public static create(json: any): Result<RemoteUrl> {
const [url, error] = Url.create(json, 'remoteUrl', false);
if (error) {
return [null, error];
}
return [new RemoteUrl({ value: url!.value }), null];
}
public toDto(): any {
return {
remoteUrl: this.props.value,
};
}
}
|
/*
* Copyright 2019 Adobe. All rights reserved.
* This file is licensed to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
* OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
import path from 'path';
import fs from 'fs/promises';
export async function loadCommands(yargs, dir) {
const d = await fs.readdir(dir);
for (const filename of d) {
if (filename.endsWith('.js')) {
// eslint-disable-next-line no-await-in-loop
const cmd = (await import(path.resolve(dir, filename))).default;
yargs.command(cmd);
}
}
}
|
<filename>src/tests/components/modals/EditRoom.test.js
import React from 'react'
import {shallow} from 'enzyme'
import {EditRoom} from '../../../components/modals/EditRoom'
test(`Render 'Edit Room' Modal`, () => {
const wrapper = shallow(<EditRoom/>)
expect(wrapper.getElement()).toMatchSnapshot()
}) |
docker run -it --rm -v $PWD:/tmp -w /tmp skywills87/card-detect-base python ./id_card_detection_image.py
docker run -it --rm \
-v $PWD:/tmp \
-v $PWD:/tmp/source \
-v $PWD:/tmp/target/ \
-w /tmp \
skywills87/card-detect-base python ./cropped.py -s /tmp/source -t /tmp/target
docker run -it --rm \
-v $PWD:/tmp \
-v /Users/williamkhoo/Desktop/projects/main/mxw/kyc/edge_detect/cn:/tmp/source \
-v /Users/williamkhoo/Desktop/projects/main/mxw/kyc/cn_cropped2:/tmp/target/ \
-w /tmp \
skywills87/card-detect-base python ./cropped.py -s /tmp/source -t /tmp/target
docker build -f gpu.dockerfile -t skywills87/opencv-cuda-tensorflow:gpu-jupyter
docker run -it --rm -v $(realpath ~/notebooks):/tf/notebooks -p 8888:8888 skywills87/opencv-tensorflow:jupyter
docker run -it --rm -v $(realpath ~/notebooks):/tf/notebooks -p 8888:8888 opencv-tensorflow:jupyter
docker run -it --rm --gpus all -v $(realpath ~/notebooks):/tf/notebooks -p 8888:8888 skywills87/opencv-tensorflow:gpu-jupyter
https://github.com/datamachines/cuda_tensorflow_opencv
docker run -it --rm --gpus all -v $(realpath ~/notebooks):/dmc -p 8888:8888 skywills87/opencv-cudnn-tensorflow:gpu-jupyter |
#!/bin/bash
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <filename> <serial> (example: ./upload.sh image.bin /dev/ttyUSB0)"
exit 1
fi
sudo stty -F $2 115200 cs8 -parenb -cstopb -ixoff
sx $1 < $2 > $2
|
#!/bin/bash
while true; do
echo 'Diagnostics as at ' $(date)
echo 'top'
top -bn1 -o +%MEM | head -n10 | tail -n4 | awk '{ printf("%-8s %-8s %-8s\n", $9, $10, $12); }'
echo
echo 'db'
pass=$(cat .env | grep SQLALCHEMY_DATABASE_URI | awk -F':' '{ print $3 }' | awk -F'@' '{ print $1 }')
PGPASSWORD=$pass psql --user=vc --host=127.0.0.1 vc "$@" -A -c "
SELECT id, started, completed, failed, steps_completed, steps_total
FROM generation_request WHERE id = (SELECT MAX(id) FROM generation_request)
"
echo
echo 'logs'
grep "DEBUG.*DEBUG" log/vc.* | tail -n10
sleep 5
clear
done
|
#!/bin/bash
AUDITD_PATH=`dirname $(realpath "$0")`
cd $AUDITD_PATH
# check version
VERSION=latest
if [ ! -z $1 ]; then
VERSION=$1
fi
# remove old images
docker images | grep kubearmor/kubearmor-cos-auditd | awk '{print $3}' | xargs -I {} docker rmi -f {} 2> /dev/null
echo "[INFO] Removed existing kubearmor/kubearmor-cos-auditd images"
# build a new image
echo "[INFO] Building kubearmor/kubearmor-cos-auditd:$VERSION"
docker build -t kubearmor/kubearmor-cos-auditd:$VERSION .
if [ $? == 0 ]; then
echo "[PASSED] Built kubearmor/kubearmor-cos-auditd:$VERSION"
exit 0
else
echo "[FAILED] Failed to build kubearmor/kubearmor-cos-auditd:$VERSION"
exit 1
fi
|
#!/usr/bin/env bash
docker run --name hello-service-2 -p 8082:8080 --net=default -d step1/hello-service
|
#!/bin/bash
normalize_text() {
awk '{print tolower($0);}' < "$1" | LC_ALL=C sed -e 's/\./ \. /g' -e 's/<br \/>/ /g' -e 's/"/ " /g' \
-e 's/,/ , /g' -e 's/(/ ( /g' -e 's/)/ ) /g' -e 's/\!/ \! /g' -e 's/\?/ \? /g' \
-e 's/\;/ \; /g' -e 's/\:/ \: /g' > "$1"-norm
}
wget http://ai.stanford.edu/~amaas/data/sentiment/aclImdb_v1.tar.gz -O aclImdb_v1.tar.gz
tar -xf aclImdb_v1.tar.gz
## normalize the data
cd aclImdb || (echo 'aclImdb directory not found' && exit 1)
for j in train/pos train/neg test/pos test/neg train/unsup; do
rm -f temp
rm -f $j/norm.txt
for i in "$j"/*; do cat "$i" >> temp; echo '' >> temp; done
normalize_text temp
mv temp-norm $j/norm.txt
done
cat train/pos/norm.txt train/neg/norm.txt train/unsup/norm.txt test/pos/norm.txt test/neg/norm.txt > alldata.txt
## shuffle the training set
shuf alldata.txt > alldata-shuf.txt
cd ..
cd build || (echo 'build directory not found' && exit 1)
cmake -DCMAKE_BUILD_TYPE=Release .. && cmake --build .
cd ..
# this script trains on all the data (train/test/unsup), you could also remove the test documents from the learning of word/document representation
time build/doc2vecc -train ./aclImdb/alldata-shuf.txt -word wordvectors.txt -output docvectors.txt -cbow 1 -size 100 -window 10 -negative 5 -hs 0 -sample 0 -binary 0 -iter 20 -min-count 10 -test ./aclImdb/alldata.txt -sentence-sample 0.1 -save-vocab alldata.vocab
head -n 25000 docvectors.txt | awk 'BEGIN{a=0;}{if (a<12500) printf "1 "; else printf "-1 "; for (b=1; b<=NF; b++) printf b ":" $(b) " "; print ""; a++;}' > train.txt
tail -n 25000 docvectors.txt | awk 'BEGIN{a=0;}{if (a<12500) printf "1 "; else printf "-1 "; for (b=1; b<=NF; b++) printf b ":" $(b) " "; print ""; a++;}' > test.txt
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.iosPaperOutline = void 0;
var iosPaperOutline = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M112,64v16v320h16V80h304v337.143c0,8.205-6.652,14.857-14.857,14.857H94.857C86.652,432,80,425.348,80,417.143V128h16v-16\r\n\t\tH64v305.143C64,434.157,77.843,448,94.857,448h322.285C434.157,448,448,434.157,448,417.143V64H112z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M112,64v16v320h16V80h304v337.143c0,8.205-6.652,14.857-14.857,14.857H94.857C86.652,432,80,425.348,80,417.143V128h16v-16\r\n\t\tH64v305.143C64,434.157,77.843,448,94.857,448h322.285C434.157,448,448,434.157,448,417.143V64H112z"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "160",
"y": "112",
"width": "128",
"height": "16"
},
"children": [{
"name": "rect",
"attribs": {
"x": "160",
"y": "112",
"width": "128",
"height": "16"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "160",
"y": "192",
"width": "240",
"height": "16"
},
"children": [{
"name": "rect",
"attribs": {
"x": "160",
"y": "192",
"width": "240",
"height": "16"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "160",
"y": "272",
"width": "192",
"height": "16"
},
"children": [{
"name": "rect",
"attribs": {
"x": "160",
"y": "272",
"width": "192",
"height": "16"
},
"children": []
}]
}, {
"name": "rect",
"attribs": {
"x": "160",
"y": "352",
"width": "240",
"height": "16"
},
"children": [{
"name": "rect",
"attribs": {
"x": "160",
"y": "352",
"width": "240",
"height": "16"
},
"children": []
}]
}]
}]
};
exports.iosPaperOutline = iosPaperOutline; |
teardown() {
set -u
echo "$unset_parameter"
}
@test "referencing unset parameter fails in teardown" {
:
}
|
#!/bin/bash
count=0
for i in $(cat ~/.bashrc); do
count=$((count+1))
echo "Word $count ($i) contains $(echo -n $i | wc -c) characters"
done
|
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lettuce.core;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.Closeable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import javax.inject.Inject;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import io.lettuce.core.api.StatefulRedisConnection;
import io.lettuce.core.api.sync.RedisCommands;
import io.lettuce.core.codec.RedisCodec;
import io.lettuce.core.codec.StringCodec;
import io.lettuce.core.protocol.ProtocolVersion;
import io.lettuce.core.pubsub.RedisPubSubAdapter;
import io.lettuce.core.pubsub.StatefulRedisPubSubConnection;
import io.lettuce.test.LettuceExtension;
import io.lettuce.test.Wait;
import io.lettuce.test.condition.EnabledOnCommand;
/**
* Integration tests for server-side assisted cache invalidation.
*
* @author <NAME>
*/
@ExtendWith(LettuceExtension.class)
@EnabledOnCommand("ACL")
public class ClientsideCachingIntegrationTests extends TestSupport {
private final RedisClient redisClient;
@Inject
public ClientsideCachingIntegrationTests(RedisClient redisClient) {
this.redisClient = redisClient;
}
@BeforeEach
void setUp() {
try (StatefulRedisConnection<String, String> connection = redisClient.connect()) {
connection.sync().flushdb();
}
}
@Test
void clientCachingResp2() {
ClientOptions resp2 = ClientOptions.builder().protocolVersion(ProtocolVersion.RESP2).build();
redisClient.setOptions(resp2);
StatefulRedisConnection<String, String> data = redisClient.connect();
RedisCommands<String, String> commands = data.sync();
StatefulRedisPubSubConnection<String, String> pubSub = redisClient.connectPubSub();
List<String> invalidations = new CopyOnWriteArrayList<>();
commands.clientTracking(TrackingArgs.Builder.enabled().redirect(pubSub.sync().clientId()));
pubSub.addListener(new RedisPubSubAdapter<String, String>() {
@Override
public void message(String channel, String message) {
if (channel.equals("__redis__:invalidate")) {
invalidations.add(message);
}
}
});
pubSub.sync().subscribe("__redis__:invalidate");
commands.get("key1");
commands.get("key2");
assertThat(invalidations).isEmpty();
Map<String, String> keys = new HashMap<>();
keys.put("key1", "value1");
keys.put("key2", "value2");
commands.mset(keys);
Wait.untilEquals(2, invalidations::size).waitOrTimeout();
assertThat(invalidations).contains("key1", "key2");
data.close();
pubSub.close();
}
@Test
void clientCachingResp3() {
ClientOptions resp2 = ClientOptions.builder().protocolVersion(ProtocolVersion.RESP3).build();
redisClient.setOptions(resp2);
StatefulRedisConnection<String, String> data = redisClient.connect();
RedisCommands<String, String> commands = data.sync();
List<String> invalidations = new CopyOnWriteArrayList<>();
commands.clientTracking(TrackingArgs.Builder.enabled());
data.addListener(message -> {
if (message.getType().equals("invalidate")) {
invalidations.addAll((List) message.getContent(StringCodec.UTF8::decodeKey).get(1));
}
});
commands.get("key1");
commands.get("key2");
assertThat(invalidations).isEmpty();
Map<String, String> keys = new HashMap<>();
keys.put("key1", "value1");
keys.put("key2", "value2");
commands.mset(keys);
Wait.untilEquals(2, invalidations::size).waitOrTimeout();
assertThat(invalidations).contains("key1", "key2");
data.close();
}
@Test
void serverAssistedCachingShouldFetchValueFromRedis() {
Map<String, String> clientCache = new ConcurrentHashMap<>();
StatefulRedisConnection<String, String> otherParty = redisClient.connect();
RedisCommands<String, String> commands = otherParty.sync();
commands.set(key, value);
StatefulRedisConnection<String, String> connection = redisClient.connect();
CacheFrontend<String, String> frontend = ClientSideCaching.enable(CacheAccessor.forMap(clientCache), connection,
TrackingArgs.Builder.enabled().noloop());
assertThat(clientCache).isEmpty();
String shouldExist = frontend.get(key);
assertThat(shouldExist).isNotNull();
assertThat(clientCache).hasSize(1);
otherParty.close();
frontend.close();
}
@Test
void serverAssistedCachingShouldExpireValueFromRedis() throws InterruptedException {
Map<String, String> clientCache = new ConcurrentHashMap<>();
StatefulRedisConnection<String, String> otherParty = redisClient.connect();
RedisCommands<String, String> commands = otherParty.sync();
StatefulRedisConnection<String, String> connection = redisClient.connect();
CacheFrontend<String, String> frontend = ClientSideCaching.enable(CacheAccessor.forMap(clientCache), connection,
TrackingArgs.Builder.enabled());
// make sure value exists in Redis
// client-side cache is empty
commands.set(key, value);
// Read-through into Redis
String cachedValue = frontend.get(key);
assertThat(cachedValue).isNotNull();
// client-side cache holds the same value
assertThat(clientCache).hasSize(1);
// now, the key expires
commands.pexpire(key, 1);
// a while later
Thread.sleep(200);
// the expiration reflects in the client-side cache
assertThat(clientCache).isEmpty();
assertThat(frontend.get(key)).isNull();
otherParty.close();
frontend.close();
}
@Test
void serverAssistedCachingShouldUseValueLoader() throws InterruptedException {
Map<String, String> clientCache = new ConcurrentHashMap<>();
StatefulRedisConnection<String, String> otherParty = redisClient.connect();
RedisCommands<String, String> commands = otherParty.sync();
StatefulRedisConnection<String, String> connection = redisClient.connect();
CacheFrontend<String, String> frontend = ClientSideCaching.enable(CacheAccessor.forMap(clientCache), connection,
TrackingArgs.Builder.enabled().noloop());
String shouldLoad = frontend.get(key, () -> "myvalue");
assertThat(shouldLoad).isEqualTo("myvalue");
assertThat(clientCache).hasSize(1);
assertThat(commands.get(key)).isEqualTo("myvalue");
commands.set(key, value);
Thread.sleep(100);
assertThat(clientCache).isEmpty();
otherParty.close();
frontend.close();
}
/**
* Interface defining a cache frontend for common cache retrieval operations that using Redis server-side caching
* assistance.
*
* @param <K>
* @param <V>
*/
interface CacheFrontend<K, V> extends Closeable {
/**
* Return the value to which this cache maps the specified key.
* <p>
* Note: This method does not allow for differentiating between a cached {@literal null} value and no cache entry found
* at all.
*
* @param key the key whose associated value is to be returned.
* @return the value to which this cache maps the specified key (which may be {@literal null} itself), or also
* {@literal null} if the cache contains no mapping for this key.
* @see CacheAccessor#get(Object)
* @see RedisCache#get(Object)
*/
V get(K key);
/**
* Return the value to which this cache maps the specified key, obtaining that value from {@code valueLoader} if
* necessary. This method provides a simple substitute for the conventional "if cached, return; otherwise create, cache
* and return" pattern.
*
* If the {@code valueLoader} throws an exception, it is wrapped in a {@link ValueRetrievalException}
*
* @param key the key whose associated value is to be returned
* @param valueLoader the value loader that is used to obtain the value if the client-side cache and Redis cache are not
* associated with a value.
* @return the value to which this cache maps the specified key.
* @throws ValueRetrievalException if the {@code valueLoader} throws an exception or returns a {@literal null} value.
*/
V get(K key, Callable<V> valueLoader);
/**
* Closes this cache frontend and releases any system resources associated with it. If the frontend is already closed
* then invoking this method has no effect.
*/
@Override
void close();
}
/**
* Interface defining access to the client-side cache. The cache must support value retrieval, value update (for Redis Cache
* read-through so values obtained from Redis get written into the client-side cache) and removal (used for invalidations).
*
* @param <K> Key type.
* @param <V> Value type.
*/
interface CacheAccessor<K, V> {
/**
* Obtain a {@link CacheAccessor} for a cache object implementing {@link Map}.
*
* @param map the cache.
* @param <K> Key type.
* @param <V> Value type.
* @return a {@link CacheAccessor} backed by a {@link Map} implementation.
*/
static <K, V> CacheAccessor<K, V> forMap(Map<K, V> map) {
return new MapCacheAccessor<>(map);
}
/**
* Return the value to which this cache maps the specified key.
* <p>
* Note: This method does not allow for differentiating between a cached {@literal null} value and no cache entry found
* at all.
*
* @param key the key whose associated value is to be returned.
* @return the value to which this cache maps the specified key (which may be {@literal null} itself), or also
* {@literal null} if the cache contains no mapping for this key.
*/
V get(K key);
/**
* Associate the specified value with the specified key in this cache.
* <p>
* If the cache previously contained a mapping for this key, the old value is replaced by the specified value.
* <p>
* Actual registration may be performed in an asynchronous or deferred fashion, with subsequent lookups possibly not
* seeing the entry yet. This may for example be the case with transactional cache decorators.
*
* @param key the key with which the specified value is to be associated.
* @param value the value to be associated with the specified key.
*/
void put(K key, V value);
/**
* Evict the mapping for this key from this cache if it is present.
* <p>
* Actual eviction may be performed in an asynchronous or deferred fashion, with subsequent lookups possibly still
* seeing the entry.
*
* @param key the key whose mapping is to be removed from the cache.
*/
void evict(K key);
}
/**
* {@link CacheAccessor} implementation for {@link Map}-based cache implementations.
*
* @param <K> Key type.
* @param <V> Value type.
*/
static class MapCacheAccessor<K, V> implements CacheAccessor<K, V> {
private final Map<K, V> map;
MapCacheAccessor(Map<K, V> map) {
this.map = map;
}
@Override
public V get(K key) {
return map.get(key);
}
@Override
public void put(K key, V value) {
map.put(key, value);
}
@Override
public void evict(K key) {
map.remove(key);
}
}
/**
* Interface defining common Redis Cache operations.
*
* @param <K> Key type.
* @param <V> Value type.
*/
interface RedisCache<K, V> {
/**
* Retrieve a {@code value} from Redis for the given cache {@code key}.
*
* @param key the key whose associated value is to be returned.
* @return the value to which this Redis cache value maps the specified key (which may be {@literal null} itself), or
* also {@literal null} if the Redis cache contains no mapping for this key.
*/
V get(K key);
/**
* Associate the specified value with the specified key in this Redis cache.
*
* @param key the key with which the specified value is to be associated.
* @param value the value to be associated with the specified key.
*/
void put(K key, V value);
/**
* Register a invalidation {@code listener} that is notified if a key in this Redis cache expires or gets modified.
*
* @param listener the listener to notify.
*/
void addInvalidationListener(java.util.function.Consumer<? super K> listener);
/**
* Closes this Redis cache and releases any connections associated with it. If the cache is already closed then invoking
* this method has no effect.
*/
void close();
}
/**
* Utility to provide server-side assistance for client-side caches. This is a {@link CacheFrontend} that represents a
* two-level cache backed by a client-side and a Redis cache.
*
* @param <K> Key type.
* @param <V> Value type.
*/
static class ClientSideCaching<K, V> implements CacheFrontend<K, V> {
private final CacheAccessor<K, V> cacheAccessor;
private final RedisCache<K, V> redisCache;
private final List<java.util.function.Consumer<K>> invalidationListeners = new CopyOnWriteArrayList<>();
private ClientSideCaching(CacheAccessor<K, V> cacheAccessor, RedisCache<K, V> redisCache) {
this.cacheAccessor = cacheAccessor;
this.redisCache = redisCache;
}
/**
* Enable server-assisted Client side caching for the given {@link CacheAccessor} and {@link StatefulRedisConnection}.
* <p>
* Note that the {@link CacheFrontend} is associated with a Redis connection. Make sure to {@link CacheFrontend#close()
* close} the frontend object to release the Redis connection after use.
*
* @param cacheAccessor the accessor used to interact with the client-side cache.
* @param connection the Redis connection to use. The connection will be associated with {@link CacheFrontend} and must
* be closed through {@link CacheFrontend#close()}.
* @param tracking the tracking parameters.
* @param <K> Key type.
* @param <V> Value type.
* @return the {@link CacheFrontend} for value retrieval.
*/
public static <K, V> CacheFrontend<K, V> enable(CacheAccessor<K, V> cacheAccessor,
StatefulRedisConnection<K, V> connection, TrackingArgs tracking) {
connection.sync().clientTracking(tracking);
return create(cacheAccessor, connection);
}
/**
* Create a server-assisted Client side caching for the given {@link CacheAccessor} and {@link StatefulRedisConnection}.
* This method expects that client key tracking is already configured.
* <p>
* Note that the {@link CacheFrontend} is associated with a Redis connection. Make sure to {@link CacheFrontend#close()
* close} the frontend object to release the Redis connection after use.
*
* @param cacheAccessor the accessor used to interact with the client-side cache.
* @param connection the Redis connection to use. The connection will be associated with {@link CacheFrontend} and must
* be closed through {@link CacheFrontend#close()}.
* @param <K> Key type.
* @param <V> Value type.
* @return the {@link CacheFrontend} for value retrieval.
*/
public static <K, V> CacheFrontend<K, V> create(CacheAccessor<K, V> cacheAccessor,
StatefulRedisConnection<K, V> connection) {
StatefulRedisConnectionImpl<K, V> connectionImpl = (StatefulRedisConnectionImpl) connection;
RedisCodec<K, V> codec = connectionImpl.getCodec();
RedisCache<K, V> redisCache = new DefaultRedisCache<>(connection, codec);
return create(cacheAccessor, redisCache);
}
private static <K, V> CacheFrontend<K, V> create(CacheAccessor<K, V> cacheAccessor, RedisCache<K, V> redisCache) {
ClientSideCaching<K, V> caching = new ClientSideCaching<>(cacheAccessor, redisCache);
redisCache.addInvalidationListener(caching::notifyInvalidate);
caching.addInvalidationListener(cacheAccessor::evict);
return caching;
}
private void notifyInvalidate(K key) {
for (java.util.function.Consumer<K> invalidationListener : invalidationListeners) {
invalidationListener.accept(key);
}
}
@Override
public void close() {
redisCache.close();
}
public void addInvalidationListener(java.util.function.Consumer<K> invalidationListener) {
invalidationListeners.add(invalidationListener);
}
@Override
public V get(K key) {
V value = cacheAccessor.get(key);
if (value == null) {
value = redisCache.get(key);
if (value != null) {
cacheAccessor.put(key, value);
}
}
return value;
}
@Override
public V get(K key, Callable<V> valueLoader) {
V value = cacheAccessor.get(key);
if (value == null) {
value = redisCache.get(key);
if (value == null) {
try {
value = valueLoader.call();
} catch (Exception e) {
throw new ValueRetrievalException(
String.format("Value loader %s failed with an exception for key %s", valueLoader, key), e);
}
if (value == null) {
throw new ValueRetrievalException(
String.format("Value loader %s returned a null value for key %s", valueLoader, key));
}
redisCache.put(key, value);
// register interest in key
redisCache.get(key);
}
cacheAccessor.put(key, value);
}
return value;
}
/**
* Default {@link RedisCache} implementation using {@code GET} and {@code SET} operations to map cache values to
* top-level keys.
*
* @param <K> Key type.
* @param <V> Value type.
*/
static class DefaultRedisCache<K, V> implements RedisCache<K, V> {
private final StatefulRedisConnection<K, V> connection;
private final RedisCodec<K, V> codec;
public DefaultRedisCache(StatefulRedisConnection<K, V> connection, RedisCodec<K, V> codec) {
this.connection = connection;
this.codec = codec;
}
@Override
public V get(K key) {
return connection.sync().get(key);
}
@Override
public void put(K key, V value) {
connection.sync().set(key, value);
}
@Override
public void addInvalidationListener(java.util.function.Consumer<? super K> listener) {
connection.addListener(message -> {
if (message.getType().equals("invalidate")) {
List<Object> content = message.getContent(codec::decodeKey);
List<K> keys = (List<K>) content.get(1);
keys.forEach(listener);
}
});
}
@Override
public void close() {
connection.close();
}
}
}
/**
* Wrapper exception to be thrown from {@link CacheFrontend#get(Object, Callable)} in case of the value loader callback
* failing with an exception.
*/
@SuppressWarnings("serial")
static class ValueRetrievalException extends RedisException {
/**
* Create a {@code ValueRetrievalException} with the specified detail message.
*
* @param msg the detail message.
*/
public ValueRetrievalException(String msg) {
super(msg);
}
/**
* Create a {@code ValueRetrievalException} with the specified detail message and nested exception.
*
* @param msg the detail message.
* @param cause the nested exception.
*/
public ValueRetrievalException(String msg, Throwable cause) {
super(msg, cause);
}
}
}
|
<reponame>DevopsChina/conf
import '@polymer/iron-icon';
import { html, PolymerElement } from '@polymer/polymer';
import 'plastic-image';
import { ReduxMixin } from '../mixins/redux-mixin';
import { dialogsActions, sessionsActions, toastActions } from '../redux/actions';
import { DIALOGS } from '../redux/constants';
import { store } from '../redux/store';
import { getVariableColor, toggleQueryParam } from '../utils/functions';
import './shared-styles';
import './text-truncate';
class SessionElement extends ReduxMixin(PolymerElement) {
constructor() {
super(...arguments);
this.user = {};
this.session = {};
this.featuredSessions = {};
}
static get template() {
return html `
<style include="shared-styles flex flex-alignment positioning">
:host {
display: block;
background-color: var(--primary-background-color);
border-bottom: 1px solid var(--border-light-color);
height: 100%;
border-radius: var(--border-radius);
}
.session {
height: 100%;
color: var(--primary-text-color);
overflow: hidden;
}
.session:hover {
background-color: var(--additional-background-color);
}
.session-icon {
--iron-icon-width: 88px;
--iron-icon-height: 88px;
--iron-icon-fill-color: var(--border-light-color);
position: absolute;
right: 40px;
bottom: -4px;
}
.session-header,
.session-content,
.session-footer {
padding: 16px;
z-index: 1;
}
.session-header {
padding-bottom: 8px;
}
.language {
margin-left: 8px;
font-size: 12px;
text-transform: uppercase;
color: var(--secondary-text-color);
}
.session-content {
padding-top: 0;
padding-bottom: 40px;
}
.bookmark-session,
.feedback-action {
color: var(--secondary-text-color);
}
.session[featured] .bookmark-session {
color: var(--default-primary-color);
}
.bookmark-session:hover,
.feedback-action:hover {
color: var(--default-primary-color);
}
.session-title {
font-size: 20px;
line-height: 1.2;
}
.session-description {
margin-top: 8px;
}
.session-meta {
margin: 0;
padding: 0;
font-size: 12px;
color: var(--secondary-text-color);
}
.session-footer {
font-size: 14px;
}
.speakers {
margin-top: 10px;
}
.speaker:not(:last-of-type) {
padding-bottom: 10px;
}
.speaker-photo {
margin-right: 12px;
width: 32px;
height: 32px;
background-color: var(--secondary-background-color);
border-radius: 50%;
overflow: hidden;
transform: translateZ(0);
}
.speaker-name {
margin-bottom: 4px;
line-height: 1.2;
}
.speaker-title {
font-size: 12px;
line-height: 1;
}
@media (min-width: 640px) {
:host {
border: 1px solid var(--border-light-color);
border-top: 0;
}
}
@media (min-width: 812px) {
:host {
border: 1px solid var(--border-light-color);
}
}
</style>
<a
class="session"
href$="/schedule/[[dayName]]?[[toggleQueryParam(queryParams, 'sessionId', session.id)]]"
featured$="[[isFeatured]]"
layout
vertical
relative
>
<iron-icon class="session-icon" icon="hoverboard:[[session.icon]]"></iron-icon>
<div class="session-header" layout horizontal justified>
<div flex>
<h3 class="session-title">[[session.title]]</h3>
<text-truncate lines="3">
<div class="session-description">[[summary]]</div>
</text-truncate>
</div>
<span class="language">[[slice(session.language, 2)]]</span>
</div>
<div class="session-content" flex layout horizontal justified>
<div class="session-meta">
<div hidden$="[[!session.complexity]]">[[session.complexity]]</div>
</div>
<div class="session-actions">
<iron-icon
class="feedback-action"
hidden="[[!_acceptingFeedback()]]"
icon="hoverboard:insert-comment"
on-click="_toggleFeedback"
></iron-icon>
<iron-icon
class="bookmark-session"
hidden="[[_acceptingFeedback()]]"
icon="hoverboard:[[_getFeaturedSessionIcon(featuredSessions, session.id)]]"
on-click="_toggleFeaturedSession"
></iron-icon>
</div>
</div>
<div class="session-footer">
<div layout horizontal justified center-aligned>
<div class="session-meta" flex>
<span hidden$="[[!session.duration.hh]]">
[[session.duration.hh]] hour[[_getEnding(session.duration.hh)]]
</span>
<span hidden$="[[!session.duration.mm]]">
[[session.duration.mm]] min[[_getEnding(session.duration.mm)]]
</span>
</div>
<div hidden$="[[!session.tags.length]]">
<template is="dom-repeat" items="[[session.tags]]" as="tag">
<span class="tag" style$="color: [[getVariableColor(tag)]]">[[tag]]</span>
</template>
</div>
</div>
<div class="speakers" hidden$="[[!session.speakers.length]]">
<template is="dom-repeat" items="[[session.speakers]]" as="speaker">
<div class="speaker" layout horizontal center>
<plastic-image
class="speaker-photo"
srcset="[[speaker.photoUrl]]"
sizing="cover"
lazy-load
preload
fade
></plastic-image>
<div class="speaker-details" flex>
<div class="speaker-name">[[speaker.name]]</div>
<div class="speaker-title">[[_join(speaker.company, speaker.country)]]</div>
</div>
</div>
</template>
</div>
</div>
</a>
`;
}
static get is() {
return 'session-element';
}
static get properties() {
return {
user: Object,
session: Object,
featuredSessions: Object,
queryParams: String,
dayName: String,
sessionColor: {
type: String,
computed: 'getVariableColor(session.mainTag)',
},
isFeatured: {
type: String,
computed: '_isFeatured(featuredSessions, session.id)',
},
summary: {
type: String,
computed: '_summary(session.description)',
},
};
}
_isFeatured(featuredSessions, sessionId) {
if (!featuredSessions || !sessionId)
return false;
return featuredSessions[sessionId];
}
_getEnding(number) {
return number > 1 ? 's' : '';
}
_summary(description = '') {
const indexes = [
description.indexOf('\n'),
description.indexOf('<br'),
description.length,
].filter((index) => index > 0);
return description.slice(0, Math.min(...indexes));
}
_getFeaturedSessionIcon(featuredSessions, sessionId) {
return this.isFeatured ? 'bookmark-check' : 'bookmark-plus';
}
_toggleFeaturedSession(event) {
event.preventDefault();
event.stopPropagation();
if (!this.user.signedIn) {
toastActions.showToast({
message: '{$ schedule.saveSessionsSignedOut $}',
action: {
title: 'Sign in',
callback: () => {
dialogsActions.openDialog(DIALOGS.SIGNIN);
},
},
});
return;
}
const sessions = Object.assign({}, this.featuredSessions, {
[this.session.id]: !this.featuredSessions[this.session.id] ? true : null,
});
store.dispatch(sessionsActions.setUserFeaturedSessions(this.user.uid, sessions));
}
_toggleFeedback(event) {
event.preventDefault();
event.stopPropagation();
dialogsActions.openDialog(DIALOGS.FEEDBACK, this.session);
}
_acceptingFeedback() {
const ONE_WEEK_MS = 7 * 24 * 60 * 60 * 1000;
const ONE_MINUTE_MS = 60 * 1000;
const now = new Date();
const convertedTimezoneDate = new Date(new Date(`${this.session.day} ${this.session.startTime}`).getTime() +
(parseInt('{$ timezoneOffset $}') - now.getTimezoneOffset()) * ONE_MINUTE_MS);
const diff = now.getTime() - convertedTimezoneDate.getTime();
return diff > 0 && diff < ONE_WEEK_MS;
}
_join(company, country) {
return [company, country].filter(Boolean).join(' / ');
}
toggleQueryParam(currentQueryParams, key, value) {
return toggleQueryParam(currentQueryParams, key, value);
}
getVariableColor(value) {
return getVariableColor(this, value);
}
slice(text, number) {
return text && text.slice(0, number);
}
}
window.customElements.define(SessionElement.is, SessionElement);
//# sourceMappingURL=session-element.js.map |
<reponame>Axam/nsx-web
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import sys
import tempfile
from fabric import api as fab
LOG = logging.getLogger(__name__)
def ssh_status(ssh):
LOG.debug('Trying to get ssh status')
with fab.settings(
host_string=ssh.host,
user=ssh.user,
key_filename=os.path.join(ssh.env.envdir, ssh.key_filename),
timeout=ssh.timeout):
try:
with fab.hide('running', 'stdout', 'stderr'):
fab.run('echo')
LOG.debug('Ssh connection is available')
return True
except SystemExit:
sys.exit()
except Exception:
LOG.debug('Ssh connection is not available')
return False
def ssh_put_content(ssh, file_content, remote_filename):
LOG.debug('Trying to put content into remote file: %s' % remote_filename)
with fab.settings(
host_string=ssh.host,
user=ssh.user,
key_filename=os.path.join(ssh.env.envdir, ssh.key_filename),
timeout=ssh.timeout):
with tempfile.NamedTemporaryFile() as f:
f.write(file_content)
try:
fab.put(f.file, remote_filename)
except SystemExit:
sys.exit()
except Exception:
LOG.error('Error while putting content into '
'remote file: %s' % remote_filename)
raise
def ssh_put_file(ssh, filename, remote_filename):
LOG.debug('Trying to put file on remote host: '
'local=%s remote=%s' % (filename, remote_filename))
with fab.settings(
host_string=ssh.host,
user=ssh.user,
key_filename=os.path.join(ssh.env.envdir, ssh.key_filename),
timeout=ssh.timeout):
try:
fab.put(filename, remote_filename)
except SystemExit:
sys.exit()
except Exception:
LOG.error('Error while putting file on remote host: '
'local=%s remote=%s' % (filename, remote_filename))
raise
def ssh_run(ssh, command, command_timeout=10):
LOG.debug('Trying to run command on remote host: %s' % command)
with fab.settings(
host_string=ssh.host,
user=ssh.user,
key_filename=os.path.join(ssh.env.envdir, ssh.key_filename),
timeout=ssh.timeout,
command_timeout=command_timeout,
warn_only=True):
try:
with fab.hide('running', 'stdout', 'stderr'):
return fab.run(command, pty=True)
except SystemExit:
sys.exit()
except Exception:
LOG.error('Error while putting file on remote host: '
'%s' % command)
raise
|
#!/bin/bash
#
# Create a tar file containing wiki files
# from the mediawiki docker container.
set -eux
CONTAINER_NAME="stormy_mw"
STAMP="`date +"%Y%m%d"`"
function usage {
set +x
echo ""
echo "wikifiles_dump.sh script:"
echo ""
echo "Create a tar file containing wiki files"
echo "from the mediawiki docker container."
echo ""
echo " ./wikifiles_dump.sh"
echo ""
echo "Example:"
echo ""
echo " ./wikifiles_dump.sh"
echo " (creates ${POD_CHARLESREID1_BACKUP_DIR}/20200101/wikifiles_20200101.tar.gz)"
echo ""
exit 1;
}
if [ "$(id -u)" == "0" ]; then
echo ""
echo ""
echo "This script should NOT be run as root!"
echo ""
echo ""
exit 1;
fi
if [ "$#" == "0" ]; then
TARGET="wikifiles_${STAMP}.tar.gz"
BACKUP_TARGET="${POD_CHARLESREID1_BACKUP_DIR}/${STAMP}/${TARGET}"
echo ""
echo "pod-charlesreid1: wikifiles_dump.sh"
echo "-----------------------------------"
echo ""
echo "Backup directory: ${POD_CHARLESREID1_BACKUP_DIR}"
echo "Backup target: ${BACKUP_TARGET}"
echo ""
mkdir -p ${POD_CHARLESREID1_BACKUP_DIR}/${STAMP}
DOCKER=$(which docker)
DOCKERX="${DOCKER} exec -t"
echo "Step 1: Compress wiki files inside container"
${DOCKERX} ${CONTAINER_NAME} /bin/tar czf /tmp/${TARGET} /var/www/html/images
echo "Step 2: Copy tar.gz file out of container"
mkdir -p $(dirname "${BACKUP_TARGET}")
${DOCKER} cp ${CONTAINER_NAME}:/tmp/${TARGET} ${BACKUP_TARGET}
echo "Step 3: Clean up tar.gz file"
${DOCKERX} ${CONTAINER_NAME} /bin/rm -f /tmp/${TARGET}
echo "Done."
else
usage
fi
|
package bd.edu.daffodilvarsity.classmanager.adapters.recyclerViewAdapters;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import java.util.ArrayList;
import bd.edu.daffodilvarsity.classmanager.R;
public class EmptyRoomsRecyclerViewAdapter extends RecyclerView.Adapter<EmptyRoomsRecyclerViewAdapter.ViewHolder> {
private ArrayList<String> emptyRooms;
public EmptyRoomsRecyclerViewAdapter(ArrayList<String> emptyRooms) {
this.emptyRooms = emptyRooms;
}
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.list_item_empty_room,parent,false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull ViewHolder holder, int position) {
holder.roomNo.setText(emptyRooms.get(position));
}
@Override
public int getItemCount() {
return emptyRooms.size();
}
class ViewHolder extends RecyclerView.ViewHolder {
TextView roomNo;
ViewHolder(@NonNull View itemView) {
super(itemView);
roomNo = itemView.findViewById(R.id.room_no);
}
}
}
|
let canvas, ctx;
const particles = [];
const maxParticles = 1000;
const getRandomInt = (a, b) => Math.floor(Math.random() * (b - a + 1) + a);
const getRandomFloat = (a, b) => (Math.random() * (a - b) + b).toFixed(2);
const generateNewParticle = (init) => {
const height = getRandomInt(10, 15);
const y = (init ? getRandomInt(0, canvas.height) : 0) - height;
return {
speed: getRandomInt(15, 25),
opacity: getRandomFloat(0.1, 0.9),
x: getRandomInt(0, window.innerWidth),
y, height
};
};
const resizeCanvas = () => {
if (canvas.width !== window.innerWidth) canvas.width = window.innerWidth;
if (canvas.height !== window.innerHeight) canvas.height = window.innerHeight;
};
const draw = () => {
resizeCanvas();
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.lineWidth = 1;
ctx.lineCap = "round";
for (let i = 0; i < particles.length; i++) {
const p = particles[i];
ctx.strokeStyle = `rgba(183, 217, 247, ${ p.opacity })`;
ctx.beginPath();
ctx.moveTo(p.x, p.y);
ctx.lineTo(p.x, p.y + p.height);
ctx.stroke();
particles[i].y += p.speed;
if (particles[i].y >= canvas.height) particles[i] = generateNewParticle();
};
window.requestAnimationFrame(draw);
};
document.addEventListener("DOMContentLoaded", () => {
canvas = document.querySelector("canvas");
ctx = canvas.getContext("2d");
resizeCanvas();
for (let i = 0; i < maxParticles; i++) {
particles.push(generateNewParticle(true));
};
draw();
}); |
<gh_stars>0
import { TelegramAccountEntity } from '../../entities/telegram-account.entity';
export const AddTelegramAccountToDatabasePortSymbol = Symbol('AddTelegramAccountToDatabasePort');
export interface IAddTelegramAccountToDatabasePort {
addTelegramAccountToDatabase(account: TelegramAccountEntity): Promise<void>;
}
|
#!/bin/bash
awk -F: '{print $4}' /etc/passwd | while read -r gid; do
if ! grep -E -q "^.*?:[^:]*:$gid:" /etc/group; then
echo "The group ID \"$gid\" does not exist in /etc/group"
fi
done
|
docker network create -d overlay parse_net
docker volume create mongo_data
docker stack deploy -c mongodb.yml parse_01
docker stack deploy -c parse.yml parse_02
docker stack deploy -c parse_dashboard.yml parse_03
docker stack deploy -c ingress.yml parse_04
|
#!/bin/sh
set -e
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\""
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH"
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "BaiduMapKit/BaiduMapKit/BaiduMapAPI_Map.framework/Resources/mapapi.bundle"
install_resource "MJRefresh/MJRefresh/MJRefresh.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "BaiduMapKit/BaiduMapKit/BaiduMapAPI_Map.framework/Resources/mapapi.bundle"
install_resource "MJRefresh/MJRefresh/MJRefresh.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
<reponame>lananh265/social-network
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_border_inner_twotone = void 0;
var ic_border_inner_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M0 0h24v24H0V0z",
"fill": "none"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M3 15h2v2H3zM3 3h2v2H3zm0 16h2v2H3zm8 2h2v-8h8v-2h-8V3h-2v8H3v2h8zm-4-2h2v2H7zm12-4h2v2h-2zm-4 4h2v2h-2zm4 0h2v2h-2zM3 7h2v2H3zm16 0h2v2h-2zM7 3h2v2H7zm8 0h2v2h-2zm4 0h2v2h-2z"
},
"children": []
}]
};
exports.ic_border_inner_twotone = ic_border_inner_twotone; |
import AutoPosterSchema from './AutoPosterSchema.js';
export {
AutoPosterSchema,
};
|
<gh_stars>1-10
package br.com.jmsstudio.jumper.elements;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import br.com.jmsstudio.jumper.R;
import br.com.jmsstudio.jumper.graphics.Screen;
/**
* Created by jms on 12/01/17.
*/
public class Pipe {
private static final int PIPE_HEIGHT = 250;
private static final int PIPE_WIDTH = 100;
private Screen screen;
private int position;
private int bottomPipeHeight;
private int upperPipeHeight;
private Bitmap bottomPipe;
private Bitmap upperPipe;
public Pipe(Screen screen, int position, Context context) {
this.screen = screen;
this.position = position;
this.bottomPipeHeight = screen.getHeight() - PIPE_HEIGHT - getRandomValue();
this.upperPipeHeight = PIPE_HEIGHT + getRandomValue();
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), R.drawable.cano);
this.bottomPipe = Bitmap.createScaledBitmap(bitmap, PIPE_WIDTH, this.bottomPipeHeight, false);
this.upperPipe = Bitmap.createScaledBitmap(bitmap, PIPE_WIDTH, this.upperPipeHeight, false);
}
public void draw(Canvas canvas) {
drawUpperPipe(canvas);
drawBottomPipe(canvas);
}
public void drawBottomPipe(Canvas canvas) {
// canvas.drawRect(this.position, bottomPipeHeight, this.position + PIPE_WIDTH, screen.getHeight(), ColorHelper.getPipeColor());
canvas.drawBitmap(this.bottomPipe, this.position, this.bottomPipeHeight, null);
}
public void drawUpperPipe(Canvas canvas) {
// canvas.drawRect(this.position, 0, this.position + PIPE_WIDTH, upperPipeHeight, ColorHelper.getPipeColor());
canvas.drawBitmap(this.upperPipe, this.position, 0, null);
}
public void move() {
this.position -= 5;
}
public int getRandomValue() {
return (int) (Math.random() * 200);
}
public boolean isOutOfTheScreen() {
return position + PIPE_WIDTH < 0;
}
public int getPosition() {
return position;
}
public boolean collidedWithBird(Bird bird) {
boolean yAxisCollided = bird.getVerticalTopPosition() <= this.upperPipeHeight || bird.getVerticalBottomPosition() >= this.bottomPipeHeight;
boolean xAxisCollided = bird.getRightPosition() > this.position && this.position > 0;
return xAxisCollided && yAxisCollided;
}
}
|
var express = require('express');
var fortune = require('./lib/fortune.js');
var formidable = require('formidable');
var credentials = require('./credentials.js');
var app = express();
app.set('port', process.env.PORT || 3000);
var handlebars = require('express-handlebars').create({
defaultLayout:'main',
helpers: {
section: function(name, options) {
if (!this._sections) this._sections = {};
this._sections[name] = options.fn(this);
return null;
}
}
});
app.engine('handlebars', handlebars.engine);
app.set('view engine', 'handlebars');
switch(app.get('env')) {
case 'development':
app.use(require('morgan')('dev'));
break;
case 'production':
app.use(require('express-logger')({
path: __dirname + '/log/requests.log'
}));
break;
}
app.use(express.static(__dirname + '/public'));
app.use(function(req, res, next) {
res.locals.showTests = app.get('env') !== 'production' &&
req.query.test === '1';
next();
});
app.use(require('body-parser').urlencoded({ extended: true }));
app.use(require('cookie-parser')(credentials.cookieSecret));
app.use(require('express-session')({
resave: false,
saveUninitialized: false,
secret: credentials.cookieSecret,
}));
// flash message middleware
app.use(function(req, res, next){
// if there's a flash message, transfer
// it to the context, then clear it
res.locals.flash = req.session.flash;
delete req.session.flash;
next();
});
app.get('/thank-you', function(req, res){
res.render('thank-you');
});
app.get('/newsletter', function(req, res) {
// res.render('newsletter', { csrf: 'CSRF token goes here' });
res.render('newsletter');
});
var VALID_EMAIL_REGEX = /^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+$/;
app.post('/newsletter', function(req, res){
var name = req.body.name || '', email = req.body.email || '';
// input validation
if(!email.match(VALID_EMAIL_REGEX)) {
// if (1) {
if(req.xhr) return res.json({ error: 'Invalid name email address.' });
req.session.flash = {
type: 'danger',
intro: 'Validation error!',
message: 'The email address you entered was not valid.',
};
return res.redirect(303, '/newsletter/archive');
}
new NewsletterSignup({ name: name, email: email }).save(function(err){
if(err) {
if(req.xhr) return res.json({ error: 'Database error.' });
req.session.flash = {
type: 'danger',
intro: 'Database error!',
message: 'There was a database error; please try again later.',
};
return res.redirect(303, '/newsletter/archive');
}
if(req.xhr) return res.json({ success: true });
req.session.flash = {
type: 'success',
intro: 'Thank you!',
message: 'You have now been signed up for the newsletter.',
};
return res.redirect(303, '/newsletter/archive');
});
});
app.post('/process', function(req, res) {
// console.log('Form (from querystring): ' + req.query.form);
// console.log('CSRF token (from hidden form field): ' + req.body._csrf);
// console.log('Name (from visible form field): ' + req.body.name);
// console.log('Email (from visible form field): ' + req.body.email);
// res.redirect(303, '/thank-you');
if (req.xhr || req.accepts('json,html') === 'json') {
res.send({success: true});
} else {
res.redirect(303, '/thank-you');
}
});
function getWeatherData() {
return {
locations: [
{
name: 'Portland',
forecastUrl: 'http://www.wunderground.com/US/OR/Portland.html',
iconUrl: 'http://icons-ak.wxug.com/i/c/k/cloudy.gif',
weather: 'Overcast',
temp: '54.1 F (12.3 C)',
},
{
name: 'Bend',
forecastUrl: 'http://www.wunderground.com/US/OR/Bend.html',
iconUrl: 'http://icons-ak.wxug.com/i/c/k/partlycloudy.gif',
weather: 'Partly Cloudy',
temp: '55.0 F (12.8 C)',
},
{
name: 'Manzanita',
forecastUrl: 'http://www.wunderground.com/US/OR/Manzanita.html',
iconUrl: 'http://icons-ak.wxug.com/i/c/k/rain.gif',
weather: 'Light Rain',
temp: '55.0 F (12.8 C)',
},
],
};
}
app.use(function(req, res, next) {
if(!res.locals.partials) res.locals.partials = {};
res.locals.partials.weatherContext = getWeatherData();
next();
});
app.get('/', function(req, res) {
res.render('home');
});
app.get('/about', function(req, res) {
res.render('about', {
fortune: fortune.getFortune(),
pageTestScript: '/qa/tests-about.js'
});
});
app.get('/tours/hood-river', function(req, res) {
res.render('tours/hood-river');
});
app.get('/tours/request-group-rate', function(req, res) {
res.render('tours/request-group-rate');
});
app.get('/tours/oregon-coast', function(req, res) {
res.render('tours/oregon-coast');
});
app.get('/jquery-test', function(req, res){
res.render('jquery-test');
});
app.get('/nursery-rhyme', function(req, res){
res.render('nursery-rhyme');
});
app.get('/data/nursery-rhyme', function(req, res){
res.json({
animal: 'squirrel',
bodyPart: 'tail',
adjective: 'bushy',
noun: 'heck',
});
});
app.get('/contest/vacation-photo', function(req, res){
var now = new Date();
res.render('contest/vacation-photo', { year: now.getFullYear(), month: now.getMonth() });
});
app.post('/contest/vacation-photo/:year/:month', function(req, res){
var form = new formidable.IncomingForm();
form.parse(req, function(err, fields, files){
if(err) return res.redirect(303, '/error');
console.log('received fields:');
console.log(fields);
console.log('received files:');
console.log(files);
res.redirect(303, '/thank-you');
});
});
app.use(function(req, res, next) {
res.status(400);
res.render('404');
});
app.use(function(err, req, res, next) {
console.error(err.stack);
res.status(500);
res.render('500');
});
app.listen(app.get('port'), function() {
console.log('Express started in ' + app.get('env') +
' mode on http://localhost:' + app.get('port') +
'; press Ctrl-C to terminate.');
});
|
let str = "Hello World!"
var frequencyMap = [Character: Int]()
for char in str {
if let value = frequencyMap[char] {
frequencyMap[char] = value + 1
} else {
frequencyMap[char] = 1
}
}
for (char, count) in frequencyMap {
print("\(char): \(count)")
}
// Output:
// H: 1
// e: 1
// l: 3
// o: 2
// : 1
// W: 1
// r: 1
// d: 1
// !: 1 |
<gh_stars>10-100
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2018.03.30 at 02:21:16 PM MDT
//
package net.opengis.kml._220;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for unitsEnumType.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="unitsEnumType">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="fraction"/>
* <enumeration value="pixels"/>
* <enumeration value="insetPixels"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "unitsEnumType")
@XmlEnum
public enum UnitsEnumType {
@XmlEnumValue("fraction")
FRACTION("fraction"),
@XmlEnumValue("pixels")
PIXELS("pixels"),
@XmlEnumValue("insetPixels")
INSET_PIXELS("insetPixels");
private final String value;
UnitsEnumType(String v) {
value = v;
}
public String value() {
return value;
}
public static UnitsEnumType fromValue(String v) {
for (UnitsEnumType c: UnitsEnumType.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
|
describe("BankAccount", function() {
it("is initialized with different properties", function() {
var newAccount = new BankAccount("<NAME>", 350);
expect(newAccount.name).to.equal("<NAME>");
expect(newAccount.balance).to.equal(350);
});
it("deposits money into the account", function() {
var newAccount = new BankAccount("<NAME>", 350);
newAccount.deposit(100);
expect(newAccount.balance).to.equal(450);
});
it("withdraws money out of bank account", function() {
var newAccount = new BankAccount("<NAME>", 350);
newAccount.withdraw(100);
expect(newAccount.balance).to.equal(250);
});
it("returns error if not enough funds available", function() {
var newAccount = new BankAccount("<NAME>", 350);
expect(newAccount.withdraw(400)).to.equal("yo broke")
});
});
|
#!/bin/bash
set -euo pipefail
env GOOS=linux GARCH=amd64 CGO_ENABLED=0 go build
docker build -t armakuni/circleci-workflow-dashboard .
|
# types/common.sh
mypy_() {
local system=mypy
local pip_global=/usr/local/bin/mypy
local pip_user=~/.local/bin/mypy
if test -x $pip_user; then
$pip_user "$@"
elif test -x $pip_global; then
$pip_global "$@"
else
$system "$@"
fi
}
typecheck() {
# we 'import libc' but the source is native/libc.{c,pyi}
MYPYPATH=.:native PYTHONPATH=. mypy_ --py2 "$@"
}
readonly MYPY_FLAGS='--strict --no-implicit-optional --no-strict-optional'
readonly OSH_EVAL_MANIFEST='types/osh-eval-manifest.txt'
readonly COMMENT_RE='^[ ]*#'
osh-eval-manifest() {
egrep -v "$COMMENT_RE" $OSH_EVAL_MANIFEST # allow comments
}
|
package auth_test
import (
"errors"
"testing"
"github.com/revel/modules/auth/basic"
"github.com/revel/modules/auth/basic/driver/secret"
)
type User struct {
email string
password string
hashpass string
secret.BcryptAuth // SecurityDriver for testing
}
func NewUser(email, pass string) *User {
u := &User{
email: email,
password: <PASSWORD>,
}
u.UserContext = u
return u
}
func (self *User) UserId() string {
return self.email
}
func (self *User) Secret() string {
return self.password
}
func (self *User) HashedSecret() string {
return self.hashpass
}
func (self *User) SetHashedSecret(hpass string) {
self.hashpass = hpass
}
// func (self *User) Load() string
type TestStore struct {
data map[string]string
}
func (self *TestStore) Save(user interface{}) error {
u, ok := user.(*User)
if !ok {
return errors.New("TestStore.Save() expected arg of type User")
}
hPass, err := u.HashSecret(u.Secret())
if err != nil {
return err
}
self.data[u.UserId()] = hPass
return nil
}
func (self *TestStore) Load(user interface{}) error {
u, ok := user.(*User)
if !ok {
return errors.New("TestStore.Load() expected arg of type User")
}
hpass, ok := self.data[u.UserId()]
if !ok {
return errors.New("Record Not Found")
}
u.SetHashedSecret(hpass)
return nil
}
func TestPasswordHash(t *testing.T) {
auth.Store = &TestStore{
data: make(map[string]string),
}
u := NewUser("<EMAIL>", "<PASSWORD>")
fail := NewUser("<EMAIL>", "")
var err error
u.hashpass, err = u.HashSecret(u.password)
if err != nil {
t.Errorf("Should have hashed password, get error: %v\n", err)
}
fail.hashpass, err = fail.HashSecret(fail.password)
if err == nil {
t.Errorf("Should have failed hashing\n")
}
}
func TestAuthenticate(t *testing.T) {
auth.Store = &TestStore{
data: make(map[string]string),
}
// user registered a long time ago
u := NewUser("<EMAIL>", "<PASSWORD>")
err := auth.Store.Save(u)
if err != nil {
t.Errorf("Should have saved user: %v", err)
}
// users now logging in
pass := NewUser("<EMAIL>", "<PASSWORD>")
fail := NewUser("<EMAIL>", "invalid")
// valid user is now trying to login
// check user in DB
err = auth.Store.Load(pass)
if err != nil {
t.Errorf("Should have loaded pass user: %v\n", err)
}
// check credentials
ok, err := pass.Authenticate()
if !ok || err != nil {
t.Errorf("Should have authenticated user")
}
// invalid user is now trying to login
err = auth.Store.Load(fail)
if err != nil {
t.Errorf("Should have loaded fail user")
}
// this should fail
ok, err = fail.Authenticate()
if ok || err != nil {
t.Errorf("Should have failed to authenticate user: %v\n", err)
}
}
|
<gh_stars>1000+
/*
* Copyright (C) 2015 The Dagger Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dagger.producers;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import com.google.common.testing.EqualsTester;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests {@link Produced}.
*/
@RunWith(JUnit4.class)
public class ProducedTest {
@Test public void successfulProduced() throws ExecutionException {
Object o = new Object();
assertThat(Produced.successful(5).get()).isEqualTo(5);
assertThat(Produced.successful("monkey").get()).isEqualTo("monkey");
assertThat(Produced.successful(o).get()).isSameInstanceAs(o);
}
@Test public void failedProduced() {
RuntimeException cause = new RuntimeException("monkey");
try {
Produced.failed(cause).get();
fail();
} catch (ExecutionException e) {
assertThat(e).hasCauseThat().isSameInstanceAs(cause);
}
}
@Test public void producedEquivalence() {
RuntimeException e1 = new RuntimeException("monkey");
RuntimeException e2 = new CancellationException();
new EqualsTester()
.addEqualityGroup(Produced.successful(132435), Produced.successful(132435))
.addEqualityGroup(Produced.successful("hi"), Produced.successful("hi"))
.addEqualityGroup(Produced.failed(e1), Produced.failed(e1))
.addEqualityGroup(Produced.failed(e2), Produced.failed(e2))
.testEquals();
}
}
|
import team
import consts
from nba_py import constants
from numpy import mean
name_to_index = {consts.points: 32, consts.assists: 27, consts.rebounds: 26, consts.steals: 29, consts.blocks: 31,
'year': 3}
class League:
def __init__(self):
self.teams = get_teams_info()
def calculate_league_average(self, from_year, to_year):
points = []
rebounds = []
steals = []
blocks = []
assists = []
for team_object in self.teams:
point_av = get_average(consts.points, team_object.team_stats, from_year, to_year)
if point_av > 0:
points.append(point_av)
rebound_av = get_average(consts.rebounds, team_object.team_stats, from_year, to_year)
if rebound_av > 0:
rebounds.append(rebound_av)
steal_av = get_average(consts.steals, team_object.team_stats, from_year, to_year)
if steal_av > 0:
steals.append(steal_av)
block_av = get_average(consts.blocks, team_object.team_stats, from_year, to_year)
if block_av > 0:
blocks.append(block_av)
assist_av = get_average(consts.assists, team_object.team_stats, from_year, to_year)
if assist_av > 0:
assists.append(assist_av)
if not points:
return None
return {
consts.points: mean(points),
consts.rebounds: mean(rebounds),
consts.steals: mean(steals),
consts.blocks: mean(blocks),
consts.assists: mean(assists)
}
def get_teams_info():
teams = []
for team_name, team_info in constants.TEAMS.iteritems():
team_object = team.Team(team_info['id'])
# if team_object.team_stats['points'] <= 0:
# continue
teams.append(team_object)
return teams
def get_average(category, stats, from_year, to_year):
if int(from_year[:-3]) < 1960:
from_year = '1960-61'
if int(from_year[:-3]) > 2017:
from_year = '2017-18'
if int(to_year[:-3]) > 2017:
to_year = '2017-18'
if int(to_year[:-3]) < 1960:
to_year = '1960-61'
averages = []
for year in stats:
a = int(from_year[:-3])
b = int(year[name_to_index['year']][:-3])
c = int(to_year[:-3])
if a <= b <= c:
value = year[name_to_index[category]]
if value <= 0:
continue
averages.append(value)
if b > c:
break
if not averages:
return 0
return mean(averages)
if __name__ == '__main__':
league = League()
|
#!/bin/bash
COMPOSE="/usr/local/bin/docker-compose --no-ansi"
DOCKER="/usr/bin/docker"
# shellcheck disable=SC2164
cd /home/masha/almond-be/
$COMPOSE run certbot renew --dry-run && $COMPOSE kill -s SIGHUP webserver
$DOCKER system prune -af
|
#include "../../../include/odfaeg/Graphics/viewportMatrix.h"
namespace odfaeg {
namespace graphic {
ViewportMatrix::ViewportMatrix() {
viewportUpdated = true;
scale = math::Vec3f(1.f, 1.f, 1.f);
position = math::Vec3f(-1.f, -1.f, -1.f);
size = math::Vec3f(2.f, 2.f, 2.f);
}
void ViewportMatrix::setViewport(math::Vec3f position, math::Vec3f size) {
this->position = position;
this->size = size;
viewportUpdated = true;
}
void ViewportMatrix::setScale(math::Vec3f scale) {
this->scale = scale;
viewportUpdated = true;
}
math::Vec3f ViewportMatrix::toViewportCoordinates(math::Vec3f vec) {
if (viewportUpdated) {
viewport.m11 = std::abs(size.x) * 0.5f;
viewport.m12 = 0.f;
viewport.m13 = 0.f;
viewport.m14 = std::abs(size.x) * 0.5f + position.x;
viewport.m21 = 0.f;
viewport.m22 = std::abs(size.y) * 0.5f;
viewport.m23 = 0.f;
viewport.m24 = std::abs(size.y) * 0.5f + position.y;
viewport.m31 = 0.f;
viewport.m32 = 0.f;
viewport.m33 = std::abs(size.z) * 0.5f;
viewport.m34 = std::abs(size.z) * 0.5f + position.z;
viewport.m41 = std::min(position.x, size.x);
viewport.m42 = std::min(position.y, size.y);
viewport.m43 = std::min(position.y, size.z);
viewport.m44 = 1.f;
viewportUpdated = false;
}
return viewport * vec;
}
math::Vec3f ViewportMatrix::toNormalizedCoordinates(math::Vec3f vec) {
if (viewportUpdated) {
viewport.m11 = std::abs(size.x) * 0.5f;
viewport.m12 = 0.f;
viewport.m13 = 0.f;
viewport.m14 = std::abs(size.x) * 0.5f + position.x;
viewport.m21 = 0.f;
viewport.m22 = std::abs(size.y) * 0.5f;
viewport.m23 = 0.f;
viewport.m24 = std::abs(size.y) * 0.5f + position.y;
viewport.m31 = 0.f;
viewport.m32 = 0.f;
viewport.m33 = std::abs(size.z) * 0.5f;
viewport.m34 = std::abs(size.z) * 0.5f + position.z;
viewport.m41 = std::min(position.x, size.x);
viewport.m42 = std::min(position.y, size.y);
viewport.m43 = std::min(position.y, size.z);
viewport.m44 = 1.f;
viewportUpdated = false;
}
//std::cout<<"matrix : "<<viewport<<std::endl;
return viewport.inverse() * vec;
}
math::Matrix4f ViewportMatrix::getMatrix() {
if (viewportUpdated) {
viewport.m11 = std::abs(size.x) * 0.5f;
viewport.m12 = 0.f;
viewport.m13 = 0.f;
viewport.m14 = std::abs(size.x) * 0.5f + position.x;
viewport.m21 = 0.f;
viewport.m22 = std::abs(size.y) * 0.5f;
viewport.m23 = 0.f;
viewport.m24 = std::abs(size.y) * 0.5f + position.y;
viewport.m31 = 0.f;
viewport.m32 = 0.f;
viewport.m33 = std::abs(size.z) * 0.5f;
viewport.m34 = std::abs(size.z) * 0.5f + position.z;
viewport.m41 = std::min(position.x, size.x);
viewport.m42 = std::min(position.y, size.y);
viewport.m43 = std::min(position.y, size.z);
viewport.m44 = 1.f;
viewportUpdated = false;
}
return viewport;
}
}
}
|
#!/bin/bash
set -e
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
CREATE USER villagebook_test;
CREATE DATABASE villagebook_test;
GRANT ALL PRIVILEGES ON DATABASE villagebook_test TO villagebook_test;
EOSQL
|
#!/usr/bin/env bash
set -eou pipefail
if [ $# -ne 2 ]
then
echo "usage: $0 <workflow_name> <resource_name>"
exit 1
fi
resource_name=$1
workflow_name=$2
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
state_dir="${TILT_WORKFLOW_STATE_PATH:-$script_dir}"
while read -r CMD_LINE
do
$CMD_LINE
done < "${state_dir}/workflow_reset_cmds-$resource_name-$workflow_name.tmp"
|
/*
* dbtype_PropertyDirectory.cpp
*/
#include <string>
#include <stddef.h>
#include <vector>
#include <map>
#include <sstream>
#include <ostream>
#include "logging/log_Logger.h"
#include "osinterface/osinterface_OsTypes.h"
#include "dbtypes/dbtype_PropertyDirectory.h"
#include "dbtypes/dbtype_PropertyData.h"
#include <boost/tokenizer.hpp>
#include <boost/algorithm/string/trim.hpp>
namespace
{
// @see PropertyEntity
/** Currently this can only be one character */
static const std::string PATH_SEPARATOR = "/";
static const std::string LISTING_SEPARATOR = ": ";
static const MG_UnsignedInt MAX_TO_STRING_BYTES = 1024000;
}
namespace mutgos
{
namespace dbtype
{
// ----------------------------------------------------------------------
PropertyDirectory::PropertyDirectory()
: last_accessed_name_ptr(0),
last_accessed_entry_ptr(0)
{
}
// ----------------------------------------------------------------------
PropertyDirectory::~PropertyDirectory()
{
clear();
}
// ----------------------------------------------------------------------
PropertyDirectory::PropertyDirectory(const PropertyDirectory &rhs)
: last_accessed_name_ptr(0),
last_accessed_entry_ptr(0)
{
operator=(rhs);
}
// ----------------------------------------------------------------------
PropertyDirectory &PropertyDirectory::operator=(
const PropertyDirectory &rhs)
{
if (&rhs != this)
{
clear();
PropertyDirectoryMap::iterator insert_iter;
for (PropertyDirectoryMap::const_iterator
copy_iter = rhs.property_map.begin();
copy_iter != rhs.property_map.end();
++copy_iter)
{
insert_iter = property_map.insert(
std::make_pair(copy_iter->first, DirectoryEntry(0,0))).first;
if (copy_iter->second.first)
{
insert_iter->second.first = copy_iter->second.first->clone();
}
if (copy_iter->second.second)
{
insert_iter->second.second = copy_iter->second.second->clone();
}
}
}
return *this;
}
// ----------------------------------------------------------------------
PropertyDirectory *PropertyDirectory::clone(void) const
{
return new PropertyDirectory(*this);
}
// ----------------------------------------------------------------------
bool PropertyDirectory::operator==(const PropertyDirectory &rhs) const
{
if (this == &rhs)
{
return true;
}
if (property_map.size() != rhs.property_map.size())
{
return false;
}
// Exactly the same size, so do a entry-by-entry deep comparison.
//
PropertyDirectoryMap::const_iterator equal_iter =
property_map.begin();
PropertyDirectoryMap::const_iterator rhs_equal_iter =
rhs.property_map.begin();
for (; (equal_iter != property_map.end()) and
(rhs_equal_iter != rhs.property_map.end());
++equal_iter, ++rhs_equal_iter)
{
// Entry name
//
if (equal_iter->first != rhs_equal_iter->first)
{
// Directory entry names are not equal.
return false;
}
// Contents of entry
//
if (equal_iter->second.first)
{
if (rhs_equal_iter->second.first)
{
if (*(equal_iter->second.first) !=
*(rhs_equal_iter->second.first))
{
return false;
}
}
else
{
return false;
}
}
else if (rhs_equal_iter->second.first)
{
return false;
}
// Contents of subdirectory, if any
//
if (equal_iter->second.second)
{
if (rhs_equal_iter->second.second)
{
if (*(equal_iter->second.second) !=
*(rhs_equal_iter->second.second))
{
return false;
}
}
else
{
return false;
}
}
else if (rhs_equal_iter->second.second)
{
return false;
}
}
// Nothing triggered a mismatch, so we must be equal.
return true;
}
// ----------------------------------------------------------------------
bool PropertyDirectory::operator!=(const PropertyDirectory &rhs) const
{
return not operator==(rhs);
}
// ----------------------------------------------------------------------
std::string PropertyDirectory::to_string(void) const
{
std::ostringstream result;
std::vector<ToStringPosition> dir_stack;
// Prime the loop to start here.
//
dir_stack.push_back(ToStringPosition(
std::string(),
property_map.begin(),
&property_map));
while (not dir_stack.empty())
{
ToStringPosition
¤t_position = dir_stack.back();
if (current_position.path_iter == current_position.dir_ptr->end())
{
// No more entries this deep, so pop it and loop back for the
// next.
dir_stack.pop_back();
}
else
{
// Print entry path and value
//
if (current_position.path_iter->second.first)
{
result << " "
<< current_position.path_prefix
<< current_position.path_iter->first
<< LISTING_SEPARATOR
<< current_position.path_iter->second.first->
get_as_short_string()
<< std::endl;
}
// If entry is a propdir, push back iterator and updated path to
// back.
//
if (current_position.path_iter->second.second and
(not current_position.path_iter->second.second->
property_map.empty()))
{
dir_stack.push_back(
ToStringPosition(
current_position.path_prefix +
current_position.path_iter->first +
PATH_SEPARATOR,
current_position.path_iter->second.second->
property_map.begin(),
¤t_position.path_iter->second.second->
property_map));
}
++current_position.path_iter;
// If result >= limit, then append '...' at bottom and exit.
if (result.tellp() >= MAX_TO_STRING_BYTES)
{
result << "..." << std::endl;
break;
}
}
}
return result.str();
}
// ----------------------------------------------------------------------
PropertyData *PropertyDirectory::get_property_data(const std::string &path)
{
PropertyData *result_ptr = 0;
DirectoryEntry *entry_ptr = parse_directory_path(path, false);
if (entry_ptr)
{
result_ptr = entry_ptr->first;
}
return result_ptr;
}
// ----------------------------------------------------------------------
PropertyDirectory *PropertyDirectory::get_property_directory(
const std::string &path)
{
PropertyDirectory *result_ptr = 0;
DirectoryEntry *entry_ptr = parse_directory_path(path, false);
if (entry_ptr)
{
result_ptr = entry_ptr->second;
}
return result_ptr;
}
// ----------------------------------------------------------------------
std::string PropertyDirectory::get_next_property(const std::string &path)
{
std::string result;
DirectoryPath search_path;
DirectoryEntry *entry_ptr =
parse_directory_path(path, false, &search_path);
if (entry_ptr)
{
// Found something, so find it again in the parent's map, and go
// forward one to find what's next. Cache the result in case
// the caller plans to look at the contents.
//
PropertyDirectory *parent_ptr = search_path.back();
PropertyDirectoryMap::iterator parent_iter =
parent_ptr->property_map.find(
*parent_ptr->last_accessed_name_ptr);
if (parent_iter != parent_ptr->property_map.end())
{
++parent_iter;
if (parent_iter != parent_ptr->property_map.end())
{
// Not at the end, so cache it and build the return path.
//
parent_ptr->last_accessed_name_ptr = &(parent_iter->first);
parent_ptr->last_accessed_entry_ptr =
&(parent_iter->second);
// Build the path by using the last accessed cache.
//
for (DirectoryPath::iterator path_iter =
search_path.begin();
path_iter != search_path.end();
++path_iter)
{
result += PATH_SEPARATOR +
*(*path_iter)->last_accessed_name_ptr;
}
}
}
}
return result;
}
// ----------------------------------------------------------------------
std::string PropertyDirectory::get_previous_property(
const std::string &path)
{
std::ostringstream result;
DirectoryPath search_path;
DirectoryEntry *entry_ptr =
parse_directory_path(path, false, &search_path);
if (entry_ptr)
{
// Found something, so find it again in the parent's map, and go
// forward one to find what's next. Cache the result in case
// the caller plans to look at the contents.
//
PropertyDirectory *parent_ptr = search_path.back();
PropertyDirectoryMap::iterator parent_iter =
parent_ptr->property_map.find(
*parent_ptr->last_accessed_name_ptr);
// Make sure the entry was found and not at the beginning.
// If it's at the beginning, we can't go backwards any further so
// we can just stop.
//
if ((parent_iter != parent_ptr->property_map.end()) and
(parent_iter != parent_ptr->property_map.begin()))
{
--parent_iter;
// Not at the beginning, so cache it and build the return path.
//
parent_ptr->last_accessed_name_ptr = &(parent_iter->first);
parent_ptr->last_accessed_entry_ptr =
&(parent_iter->second);
// Build the path by using the last accessed cache.
//
for (DirectoryPath::iterator path_iter = search_path.begin();
path_iter != search_path.end();
++path_iter)
{
result << PATH_SEPARATOR
<< *((*path_iter)->last_accessed_name_ptr);
}
}
}
return result.str();
}
// ----------------------------------------------------------------------
std::string PropertyDirectory::get_first_property(const std::string &path)
{
std::string result;
get_property_edge(path, false, result);
return result;
}
// ----------------------------------------------------------------------
std::string PropertyDirectory::get_last_property(const std::string &path)
{
std::string result;
get_property_edge(path, true, result);
return result;
}
// ----------------------------------------------------------------------
void PropertyDirectory::delete_property_data(const std::string &path)
{
DirectoryEntry *entry_ptr = parse_directory_path(path, false);
if (entry_ptr)
{
if (entry_ptr->second)
{
// A directory, so just delete the data.
delete entry_ptr->first;
entry_ptr->first = 0;
}
else
{
// This is a not directory, so delete the entire property.
delete_property(path);
}
}
}
// ----------------------------------------------------------------------
void PropertyDirectory::delete_property(const std::string &path)
{
DirectoryPath search_path;
DirectoryEntry *entry_ptr =
parse_directory_path(path, false, &search_path);
if (entry_ptr)
{
// Delete the data
//
delete entry_ptr->first;
entry_ptr->first = 0;
// Delete anything inside the directory
delete entry_ptr->second;
entry_ptr->second = 0;
// Remove it from the map and cache.
// A trick here: The property we need to delete is always the
// last accessed one in the parent. So we use that for the
// property name.
//
PropertyDirectory *parent_ptr = search_path.back();
if (not parent_ptr->last_accessed_name_ptr)
{
LOG(fatal, "dbtype", "delete_property",
"Cache is null! Cannot delete " + path);
}
else
{
parent_ptr->property_map.erase(
*(parent_ptr->last_accessed_name_ptr));
parent_ptr->last_accessed_name_ptr = 0;
parent_ptr->last_accessed_entry_ptr = 0;
}
}
}
// ----------------------------------------------------------------------
bool PropertyDirectory::set_property(
const std::string &path,
const PropertyData &data)
{
DirectoryEntry *entry_ptr = parse_directory_path(path, true);
if (entry_ptr)
{
if (entry_ptr->first)
{
// Delete what's currently there.
//
delete entry_ptr->first;
entry_ptr->first = 0;
}
// Copy the new value in.
//
entry_ptr->first = data.clone();
}
return entry_ptr;
}
// ----------------------------------------------------------------------
bool PropertyDirectory::does_property_exist(const std::string &path)
{
return parse_directory_path(path, false);
}
// ----------------------------------------------------------------------
bool PropertyDirectory::is_property_directory(const std::string &path)
{
DirectoryEntry *entry_ptr = parse_directory_path(path, false);
return (entry_ptr ? entry_ptr->second : 0);
}
// ----------------------------------------------------------------------
void PropertyDirectory::clear(void)
{
for (PropertyDirectoryMap::iterator delete_iter = property_map.begin();
delete_iter != property_map.end();
++delete_iter)
{
delete delete_iter->second.first;
delete delete_iter->second.second;
delete_iter->second.first = 0;
delete_iter->second.second = 0;
}
property_map.clear();
last_accessed_entry_ptr = 0;
last_accessed_name_ptr = 0;
}
// ----------------------------------------------------------------------
size_t PropertyDirectory::mem_used(void) const
{
size_t memory_used = property_map.size();
for (PropertyDirectoryMap::const_iterator
mem_iter = property_map.begin();
mem_iter != property_map.end();
++mem_iter)
{
memory_used += mem_iter->first.size();
memory_used += sizeof(*mem_iter);
if (mem_iter->second.first)
{
memory_used += mem_iter->second.first->mem_used();
}
if (mem_iter->second.second)
{
memory_used += mem_iter->second.second->mem_used();
}
}
return memory_used;
}
// ----------------------------------------------------------------------
PropertyDirectory::DirectoryEntry *PropertyDirectory::get_directory_entry(
const std::string &name,
const bool create)
{
if (last_accessed_name_ptr)
{
// Try the cache first.
//
if ((*last_accessed_name_ptr) == name)
{
// Found in cache.
return last_accessed_entry_ptr;
}
}
PropertyDirectoryMap::iterator prop_iter = property_map.find(name);
if (prop_iter == property_map.end())
{
// Not found. See if we need to create it.
if (not create)
{
return 0;
}
else
{
// Create the entry, cache it, and return.
prop_iter = property_map.insert(
std::make_pair(name, DirectoryEntry(0,0))).first;
last_accessed_name_ptr = &(prop_iter->first);
last_accessed_entry_ptr = &(prop_iter->second);
return last_accessed_entry_ptr;
}
}
else
{
// Found something. Cache it and return.
//
last_accessed_name_ptr = &(prop_iter->first);
last_accessed_entry_ptr = &(prop_iter->second);
return last_accessed_entry_ptr;
}
return 0;
}
// ----------------------------------------------------------------------
PropertyDirectory::DirectoryEntry *PropertyDirectory::parse_directory_path(
const std::string &path,
const bool create,
PropertyDirectory::DirectoryPath *path_ptr)
{
DirectoryEntry *current_entry_ptr = 0;
PropertyDirectory *current_propdir_ptr = this;
std::string trimmed_path = boost::trim_copy(path);
if (trimmed_path.empty())
{
// Empty paths are not valid.
return 0;
}
// Remove any prefixed separators, since they are not needed.
//
const size_t trim_index =
trimmed_path.find_first_not_of(PATH_SEPARATOR);
if (trim_index and (trim_index != std::string::npos))
{
// Get rid of prefix separator since it's not needed.
trimmed_path = (trimmed_path.size() == (trim_index + 1)) ?
"" : trimmed_path.substr(trim_index + 1);
}
boost::char_separator<char> sep(PATH_SEPARATOR.c_str());
boost::tokenizer<boost::char_separator<char> >
tokens(trimmed_path, sep);
// Go through the path one segment at a time, traversing the property
// directories until either the end is found, or a segment cannot
// be located.
//
for (boost::tokenizer<boost::char_separator<char> >::iterator
tok_iter = tokens.begin();
tok_iter != tokens.end(); ++tok_iter)
{
// Skip empty tokens. This might happen if there are multiple
// separators in a row, or if it ends in a separator.
//
if (not (*tok_iter).empty())
{
if (not current_propdir_ptr)
{
current_entry_ptr = 0;
break;
}
if (path_ptr)
{
path_ptr->push_back(current_propdir_ptr);
}
current_entry_ptr = current_propdir_ptr->get_directory_entry(
*tok_iter, create);
if (not current_entry_ptr)
{
// Couldn't find a segment.
break;
}
else
{
current_propdir_ptr = current_entry_ptr->second;
}
}
}
if (path_ptr and (not current_entry_ptr))
{
// Nothing found, so leave the path empty.
path_ptr->clear();
}
return current_entry_ptr;
}
// ----------------------------------------------------------------------
void PropertyDirectory::get_property_edge(
const std::string &path,
const bool last,
std::string &edge_path)
{
std::string trimmed_path = boost::trim_copy(path);
edge_path.clear();
if (not trimmed_path.empty())
{
bool add_separator = true;
// If they already have a slash at the end, no need to add another
// one
//
if (trimmed_path[trimmed_path.size() - 1] == PATH_SEPARATOR[0])
{
add_separator = false;
}
// Parse the path, then simply append the first entry at the end.
//
DirectoryEntry *entry_ptr = parse_directory_path(path, false);
if (entry_ptr and entry_ptr->second and
(not entry_ptr->second->property_map.empty()))
{
edge_path = trimmed_path;
if (add_separator)
{
edge_path += PATH_SEPARATOR;
}
edge_path += (last ?
entry_ptr->second->property_map.rbegin()->first :
entry_ptr->second->property_map.begin()->first);
}
}
}
} /* namespace dbtype */
} /* namespace mutgos */
|
package com.github.shimmerjordan.exam.mapper;
import com.github.shimmerjordan.common.core.persistence.CrudMapper;
import com.github.shimmerjordan.exam.api.module.Answer;
import org.apache.ibatis.annotations.Mapper;
import java.util.List;
/**
* 答题Mapper
*
* @author shimmerjordan
* @date 2021/03/08 21:09
*/
@Mapper
public interface AnswerMapper extends CrudMapper<Answer> {
/**
* 根据用户ID、考试ID、考试记录ID、题目ID查找答题
*
* @param answer answer
* @return Answer
* @author shimmerjordan
* @date 2021/01/21 19:38
*/
Answer getAnswer(Answer answer);
/**
* 根据examRecordId查询
* @param examRecordId examRecordId
* @return List
* @author shimmerjordan
* @date 2021/02/21 1:08
*/
List<Answer> findListByExamRecordId(Long examRecordId);
}
|
<gh_stars>0
/* jshint indent: 1 */
module.exports = function(sequelize, DataTypes) {
return sequelize.define('dorCuvSubs', {
coname: {
type: DataTypes.CHAR,
allowNull: true,
field: 'CONAME'
},
conum: {
type: DataTypes.CHAR,
allowNull: true,
field: 'CONUM'
},
digyr: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'DIGYR'
},
realkey: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'REALKEY'
},
landkey: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'LANDKEY'
},
subType: {
type: DataTypes.CHAR,
allowNull: true,
field: 'SUB_TYPE'
},
descrip: {
type: DataTypes.STRING,
allowNull: true,
field: 'DESCRIP'
},
ltype: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'LTYPE'
},
lclass: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'LCLASS'
},
acres: {
type: DataTypes.DOUBLE,
allowNull: true,
field: 'ACRES'
},
cuvMatch: {
type: DataTypes.INTEGER,
allowNull: true,
field: 'CUV_MATCH'
},
createdate: {
type: DataTypes.DATEONLY,
allowNull: true,
field: 'CREATEDATE'
}
}, {
tableName: 'DOR_CUV_SUBS',
timestamps: false
});
};
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
local source="${BUILT_PRODUCTS_DIR}/Pods-Chillapp/$1"
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source=$(readlink "${source}")
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" ${source} ${destination}"
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" "${source}" "${destination}"
# Resign the code if required by the build settings to avoid unstable apps
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/$1"
fi
# Embed linked Swift runtime libraries
local basename
basename=$(echo $1 | sed -E s/\\..+// && exit ${PIPESTATUS[0]})
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/$1/${basename}" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/${lib}"
fi
done
}
# Signs a framework with the provided identity
code_sign() {
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements $1"
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework 'SwiftyJSON.framework'
install_framework 'THContactPicker.framework'
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework 'SwiftyJSON.framework'
install_framework 'THContactPicker.framework'
fi
|
package decorator
import (
"fmt"
"github.com/aws/aws-sdk-go/aws/session"
sparta "github.com/mweagle/Sparta"
cfCustomResources "github.com/mweagle/Sparta/aws/cloudformation/resources"
gocf "github.com/mweagle/go-cloudformation"
"github.com/sirupsen/logrus"
)
// S3ArtifactPublisherDecorator returns a ServiceDecoratorHookHandler
// function that publishes the given data to an S3 Bucket
// using the given bucket and key.
func S3ArtifactPublisherDecorator(bucket gocf.Stringable,
key gocf.Stringable,
data map[string]interface{}) sparta.ServiceDecoratorHookHandler {
// Setup the CF distro
artifactDecorator := func(context map[string]interface{},
serviceName string,
template *gocf.Template,
S3Bucket string,
S3Key string,
buildID string,
awsSession *session.Session,
noop bool,
logger *logrus.Logger) error {
// Ensure the custom action handler...
sourceArnExpr := gocf.Join("",
gocf.String("arn:aws:s3:::"),
bucket.String(),
gocf.String("/*"))
configuratorResName, err := sparta.EnsureCustomResourceHandler(serviceName,
cfCustomResources.S3ArtifactPublisher,
sourceArnExpr,
[]string{},
template,
S3Bucket,
S3Key,
logger)
if err != nil {
return err
}
// Create the invocation of the custom action...
s3PublishResource := &cfCustomResources.S3ArtifactPublisherResource{}
s3PublishResource.ServiceToken = gocf.GetAtt(configuratorResName, "Arn")
s3PublishResource.Bucket = bucket.String()
s3PublishResource.Key = key.String()
s3PublishResource.Body = data
// Name?
resourceInvokerName := sparta.CloudFormationResourceName("ArtifactS3",
fmt.Sprintf("%v", bucket.String()),
fmt.Sprintf("%v", key.String()))
// Add it
template.AddResource(resourceInvokerName, s3PublishResource)
return nil
}
return sparta.ServiceDecoratorHookFunc(artifactDecorator)
}
|
package weixin.liuliangbao.flowcard.Service.impl;
import org.jeecgframework.core.common.service.impl.CommonServiceImpl;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import weixin.liuliangbao.flowcard.Entity.FlowCardDeliveryInfoEntity;
import weixin.liuliangbao.flowcard.Service.FlowCardDeliveryInfoServiceI;
import weixin.tenant.entity.FlowCardTradeRecordsEntity;
import weixin.tenant.service.FlowCardTradeRecordsServiceI;
import java.io.Serializable;
/**
* Created by aa on 2015/12/18.
*/
@Service("flowCardDeliveryInfoService")
@Transactional
public class FlowCardDeliveryInfoServiceImpl extends CommonServiceImpl implements FlowCardDeliveryInfoServiceI {
public <T> void delete(T entity) {
super.delete(entity);
//执行删除操作配置的sql增强
this.doDelSql((FlowCardDeliveryInfoEntity) entity);
}
public <T> Serializable save(T entity) {
Serializable t = super.save(entity);
//执行新增操作配置的sql增强
this.doAddSql((FlowCardDeliveryInfoEntity) entity);
return t;
}
public <T> void saveOrUpdate(T entity) {
super.saveOrUpdate(entity);
//执行更新操作配置的sql增强
this.doUpdateSql((FlowCardDeliveryInfoEntity) entity);
}
/**
* 默认按钮-sql增强-新增操作
*
* @return
*/
public boolean doAddSql(FlowCardDeliveryInfoEntity t) {
return true;
}
/**
* 默认按钮-sql增强-更新操作
*
* @return
*/
public boolean doUpdateSql(FlowCardDeliveryInfoEntity t) {
return true;
}
/**
* 默认按钮-sql增强-删除操作
*
* @return
*/
public boolean doDelSql(FlowCardDeliveryInfoEntity t) {
return true;
}
}
|
<filename>ViewPagerDemo/src/main/java/com/creadigol/inshort/Gcm/MyFirebaseInstanceIDService.java<gh_stars>0
package com.creadigol.inshort.Gcm;
/**
* Created by Vj on 2/8/2017.
*/
import android.content.Context;
import android.telephony.TelephonyManager;
import android.util.Log;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.creadigol.inshort.Utils.AppUrl;
import com.creadigol.inshort.Utils.MyApplication;
import com.google.firebase.iid.FirebaseInstanceId;
import com.google.firebase.iid.FirebaseInstanceIdService;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import java.util.Map;
//Class extending FirebaseInstanceIdService
public class MyFirebaseInstanceIDService extends FirebaseInstanceIdService {
private int countTimeOut = 3;
private static final String TAG = "MyFirebaseIIDService";
/**
* Called if InstanceID token is updated. This may occur if the security of
* the previous token had been compromised. Note that this is called when the InstanceID token
* is initially generated so this is where you would retrieve the token.
*/
// [START refresh_token]
@Override
public void onTokenRefresh() {
//Getting registration token
String refreshedToken = FirebaseInstanceId.getInstance().getToken();
//Displaying token on logcat
Log.e(TAG, "Refreshed token: " + refreshedToken);
TelephonyManager telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
String number = telephonyManager.getDeviceId();
// If you want to send messages to this application instance or
// manage this apps subscriptions on the server side, send the
// Instance ID token to your app server.
sendRegistrationToServer(refreshedToken, number);
}
private void sendRegistrationToServer(final String token, final String number) {
//You can implement this method to store the token on your server
//Not required for current project
StringRequest jsonObjectRequest = new StringRequest(Request.Method.POST, AppUrl.URL_SETGCM, new Response.Listener<String>() {
@Override
public void onResponse(String response) {
try {
Log.e(TAG, "Response: " + response);
JSONObject jsonObject = new JSONObject(response);
int statusCode = jsonObject.optInt("status_code");
String massage = jsonObject.optString("massage");
if (statusCode == 1) {
} else {
}
} catch (JSONException e) {
}
//mProgressDialog.dismiss();
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
if (countTimeOut >= 3) {
countTimeOut = 0;
} else {
countTimeOut++;
sendRegistrationToServer(token, number);
}
//mProgressDialog.dismiss();
}
}) {
@Override
protected Map<String, String> getParams() throws AuthFailureError {
Map<String, String> params = new HashMap<String, String>();
params.put("gcmtoken", token);
params.put("imei", number);
Log.e(TAG, "reqSetGcmId params: " + params.toString());
return params;
}
};
MyApplication.getInstance().addToRequestQueue(jsonObjectRequest, TAG);
}
} |
<reponame>cn2oo8/molicode<gh_stars>10-100
/**
* Copyright(C) 2004-2017 shareyi.com All Right Reserved
*/
package com.shareyi.molicode.common.valid.group;
/**
* <p>
* 新增操作
* </p>
*
* @author david
* @date 2017-09-14 11:31
*/
public interface Creating {
}
|
# main.py COPYRIGHT Fujitsu Limited 2021
from argparse import ArgumentParser
from collections import OrderedDict
import copy
import os
import torch
from torchvision import transforms as T
import torchvision.datasets as datasets
from tqdm import tqdm
#from cac import auto_prune
import sys
sys.path.append('../../')
from auto_prune import auto_prune
from vgg11_bn import VGG11_BN
from schduler import WarmupCosineLR
#===================================================================================
parser = ArgumentParser()
parser.add_argument('--workers', default=8, type=int,
help='number of data loading workers')
parser.add_argument('--use_gpu', action='store_true',
help='use gpu')
parser.add_argument('--use_DataParallel', action='store_true',
help='use DataParallel')
# for training
parser.add_argument('--data', type=str, default='./data',
help='path to dataset')
parser.add_argument('--batch_size', type=int, default=256)
parser.add_argument('--learning_rate', type=float, default=1e-2)
parser.add_argument('--momentum', type=float, default=0.9)
parser.add_argument('--weight_decay', type=float, default=1e-2)
# for auto pruning
parser.add_argument('--acc_control', type=float, default=1.0,
help='control parameter for pruned model accuracy')
parser.add_argument('--rates', nargs='*', type=float, default=[0.2, 0.1, 0.0],
help='candidates for pruning rates')
parser.add_argument('--max_search_times', type=int, default=1000,
help='maximum number of times for pruning rate search')
parser.add_argument('--epochs', type=int, default=300,
help='re-training epochs')
parser.add_argument('--model_path', type=str, default='./pretrained_cifar10_vgg11_bn.pt',
help='pre-trained model filepath')
parser.add_argument('--pruned_model_path', type=str, default='./pruned_cifar10_vgg11_bn.pt',
help='pruned model filepath')
#===================================================================================
def main():
args = parser.parse_args()
args.rates = ([float(f) for f in args.rates])
print(f'args: {args}')
device = 'cpu'
if args.use_gpu:
torch.backends.cudnn.benchmark = True
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device: ', device)
print('===== load data ===================')
norm_mean = (0.4914, 0.4822, 0.4465)
norm_std = (0.2471, 0.2435, 0.2616)
train_transform = T.Compose(
[
T.RandomCrop(32, padding=4),
T.RandomHorizontalFlip(),
T.ToTensor(),
T.Normalize(norm_mean, norm_std),
]
)
val_transform = T.Compose(
[
T.ToTensor(),
T.Normalize(norm_mean, norm_std),
]
)
# get cifar10 datasets
dataset_path = args.data
train_dataset = datasets.CIFAR10(
root=dataset_path, train=True, download=True, transform=train_transform)
val_dataset = datasets.CIFAR10(
root=dataset_path, train=False, download=True, transform=val_transform)
# make DataLoader
batch_size = args.batch_size
train_loader = torch.utils.data.DataLoader(
train_dataset,
batch_size=batch_size,
num_workers=args.workers,
shuffle=True,
drop_last=True,
pin_memory=True,
)
val_loader = torch.utils.data.DataLoader(
val_dataset,
batch_size=batch_size,
num_workers=args.workers,
drop_last=True,
pin_memory=True,
)
# load model
model = VGG11_BN()
model.load_state_dict(torch.load(
args.model_path, map_location=device), strict=True)
if torch.cuda.device_count() > 1 and args.use_DataParallel:
print('use {} GPUs.'.format(torch.cuda.device_count()))
model = torch.nn.DataParallel(model)
model.to(device)
print('===== model: before pruning ==========')
print(model)
# Model information for pruning
model_info = OrderedDict()
model_info['features.0'] = {'arg': 'out_ch_conv1'}
model_info['features.1'] = {'arg': 'out_ch_conv1'}
model_info['features.4'] = {'arg': 'out_ch_conv2'}
model_info['features.5'] = {'arg': 'out_ch_conv2'}
model_info['features.8'] = {'arg': 'out_ch_conv3'}
model_info['features.9'] = {'arg': 'out_ch_conv3'}
model_info['features.11'] = {'arg': 'out_ch_conv4'}
model_info['features.12'] = {'arg': 'out_ch_conv4'}
model_info['features.15'] = {'arg': 'out_ch_conv5'}
model_info['features.16'] = {'arg': 'out_ch_conv5'}
model_info['features.18'] = {'arg': 'out_ch_conv6'}
model_info['features.19'] = {'arg': 'out_ch_conv6'}
model_info['features.22'] = {'arg': 'out_ch_conv7'}
model_info['features.23'] = {'arg': 'out_ch_conv7'}
model_info['features.25'] = {'arg': 'out_ch_conv8'}
model_info['features.26'] = {'arg': 'out_ch_conv8'}
model_info['classifier.0'] = {'arg': 'out_ch_fc1'}
model_info['classifier.3'] = {'arg': 'out_ch_fc2'}
model_info['classifier.6'] = {'arg': None}
# load weight of trained model
if torch.cuda.device_count() > 1 and args.use_DataParallel:
weights = copy.deepcopy(model.module.state_dict())
else:
weights = copy.deepcopy(model.state_dict())
# calculate accuracy with unpruned trained model
Ab = validate(val_loader, model, device, epoch=1)
print('Accuracy before pruning: ', Ab)
# tune pruning rate
print('===== start pruning rate tuning =====')
optim_params = dict(lr=args.learning_rate,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov=True)
total_steps = args.epochs * len(train_loader)
scheduler = WarmupCosineLR
scheduler_params = dict(warmup_epochs=total_steps * 0.3,
max_epochs=total_steps)
criterion = torch.nn.CrossEntropyLoss()
weights, Afinal, n_args_channels = auto_prune(VGG11_BN, model_info, weights, Ab,
train_loader, val_loader, criterion,
optim_type='SGD',
optim_params=optim_params,
lr_scheduler=scheduler,
scheduler_params=scheduler_params,
update_lr='iter',
use_gpu=args.use_gpu,
use_DataParallel=args.use_DataParallel,
acc_control=args.acc_control,
rates=args.rates,
max_search_times=args.max_search_times,
epochs=args.epochs,
model_path=args.model_path,
pruned_model_path=args.pruned_model_path,
residual_connections=True)
print('===== model: after pruning ==========')
print(model)
print('===== Results =====')
print('Model size before pruning (Byte):', os.path.getsize(args.model_path))
if os.path.exists(args.pruned_model_path):
print('Model size after pruning (Byte):',
os.path.getsize(args.pruned_model_path))
print('Compression rate : {:.3f}'.format(
1-os.path.getsize(args.pruned_model_path)/os.path.getsize(args.model_path)))
print('Acc. before pruning: {:.2f}'.format(Ab))
print('Acc. after pruning : {:.2f}'.format(Afinal))
print('Arguments name & number of channels for pruned model: ', n_args_channels)
def validate(val_loader, model, device, epoch):
model.eval()
with torch.no_grad():
with tqdm(val_loader, leave=False) as pbar:
pbar.set_description('Epoch {} Validation'.format(epoch))
hit = 0
total = 0
for _, (images, targets) in enumerate(pbar):
images = images.to(device)
targets = targets.to(device)
outputs = model(images)
outClass = outputs.cpu().detach().numpy().argmax(axis=1)
hit += (outClass == targets.cpu().numpy()).sum()
total += len(targets)
val_acc = hit / total * 100
pbar.set_postfix({'valid Acc': val_acc})
return val_acc
if __name__ == '__main__':
main()
|
#!/usr/bin/env bash
set -e
cmd="$1"
shift
until $cmd; do
>&2 echo "Waiting..."
sleep 1
done
>&2 echo "Executing command"
exec "$@"
|
../ghc/dist-stage2/build/ghc/ghc \
-B.. \
--interactive \
Test.hs
|
from .inference_methods import (inference_qpbo, inference_lp,
inference_ad3, inference_ogm,
inference_dispatch, get_installed)
from .common import compute_energy
__all__ = ["inference_qpbo", "inference_lp", "inference_ad3",
"inference_dispatch", "get_installed", "compute_energy",
"inference_ogm"]
|
import { JSONObject } from './json';
export type MitosisNode = {
'@type': '@builder.io/mitosis/node';
name: string;
meta: JSONObject;
/**
* Key-value store of string values for DOM attributes.
* ```js
* {
* defaultValue: 'initial text',
* width: '100px',
* }
* ```
*/
properties: { [key: string]: string | undefined };
/**
* Key-value store of expression values for DOM attributes. These are always represented as strings.
*
* ```js
* {
* disabled: "state.isDisabled",
* defaultValue: "`${props.text} + ' initial'`",
* width: "props.width * 10",
* height: "100",
* }
* ```
*/
bindings: { [key: string]: string | undefined };
children: MitosisNode[];
};
|
<gh_stars>0
// Code generated by ogen, DO NOT EDIT.
package api
import (
"bytes"
"context"
"fmt"
"io"
"math"
"math/bits"
"net"
"net/http"
"net/url"
"regexp"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/go-faster/errors"
"github.com/go-faster/jx"
"github.com/google/uuid"
"github.com/ogen-go/ogen/conv"
ht "github.com/ogen-go/ogen/http"
"github.com/ogen-go/ogen/json"
"github.com/ogen-go/ogen/otelogen"
"github.com/ogen-go/ogen/uri"
"github.com/ogen-go/ogen/validate"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/trace"
)
// No-op definition for keeping imports.
var (
_ = context.Background()
_ = fmt.Stringer(nil)
_ = strings.Builder{}
_ = errors.Is
_ = sort.Ints
_ = http.MethodGet
_ = io.Copy
_ = json.Marshal
_ = bytes.NewReader
_ = strconv.ParseInt
_ = time.Time{}
_ = conv.ToInt32
_ = uuid.UUID{}
_ = uri.PathEncoder{}
_ = url.URL{}
_ = math.Mod
_ = bits.LeadingZeros64
_ = validate.Int{}
_ = ht.NewRequest
_ = net.IP{}
_ = otelogen.Version
_ = trace.TraceIDFromHex
_ = otel.GetTracerProvider
_ = metric.NewNoopMeterProvider
_ = regexp.MustCompile
_ = jx.Null
_ = sync.Pool{}
)
func (v TestObjectQueryParameterDeepObject) encodeURI(e uri.Encoder) error {
if err := e.EncodeField("min", func(e uri.Encoder) error {
return e.EncodeValue(conv.IntToString(v.Min))
}); err != nil {
return errors.Wrap(err, `field min`)
}
if err := e.EncodeField("max", func(e uri.Encoder) error {
return e.EncodeValue(conv.IntToString(v.Max))
}); err != nil {
return errors.Wrap(err, `field max`)
}
if err := e.EncodeField("filter", func(e uri.Encoder) error {
return e.EncodeValue(conv.StringToString(v.Filter))
}); err != nil {
return errors.Wrap(err, `field filter`)
}
return nil
}
func (v *TestObjectQueryParameterDeepObject) decodeURI(d uri.Decoder) error {
if v == nil {
return errors.New(`unable to decode TestObjectQueryParameterDeepObject to nil`)
}
return d.DecodeFields(func(name string, d uri.Decoder) error {
switch name {
case "min":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToInt(s)
if err != nil {
return err
}
v.Min = c
return nil
case "max":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToInt(s)
if err != nil {
return err
}
v.Max = c
return nil
case "filter":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToString(s)
if err != nil {
return err
}
v.Filter = c
return nil
default:
return errors.Errorf(`unexpected field %q`, name)
}
})
}
func (v TestObjectQueryParameterFormObject) encodeURI(e uri.Encoder) error {
if err := e.EncodeField("min", func(e uri.Encoder) error {
return e.EncodeValue(conv.IntToString(v.Min))
}); err != nil {
return errors.Wrap(err, `field min`)
}
if err := e.EncodeField("max", func(e uri.Encoder) error {
return e.EncodeValue(conv.IntToString(v.Max))
}); err != nil {
return errors.Wrap(err, `field max`)
}
if err := e.EncodeField("filter", func(e uri.Encoder) error {
return e.EncodeValue(conv.StringToString(v.Filter))
}); err != nil {
return errors.Wrap(err, `field filter`)
}
return nil
}
func (v *TestObjectQueryParameterFormObject) decodeURI(d uri.Decoder) error {
if v == nil {
return errors.New(`unable to decode TestObjectQueryParameterFormObject to nil`)
}
return d.DecodeFields(func(name string, d uri.Decoder) error {
switch name {
case "min":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToInt(s)
if err != nil {
return err
}
v.Min = c
return nil
case "max":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToInt(s)
if err != nil {
return err
}
v.Max = c
return nil
case "filter":
s, err := d.DecodeValue()
if err != nil {
return err
}
c, err := conv.ToString(s)
if err != nil {
return err
}
v.Filter = c
return nil
default:
return errors.Errorf(`unexpected field %q`, name)
}
})
}
|
class AttributeLabelManager
{
private $attributeLabels = [];
/**
* Add a new attribute label.
* @param string $attribute The attribute name
* @param string $label The label for the attribute
*/
public function addLabel($attribute, $label)
{
$this->attributeLabels[$attribute] = $label;
}
/**
* Retrieve the label for a given attribute.
* @param string $attribute The attribute name
* @return string|null The label for the attribute, or null if not found
*/
public function getLabel($attribute)
{
return isset($this->attributeLabels[$attribute]) ? $this->attributeLabels[$attribute] : null;
}
/**
* Remove the label for a given attribute.
* @param string $attribute The attribute name
*/
public function removeLabel($attribute)
{
unset($this->attributeLabels[$attribute]);
}
/**
* Retrieve all attribute labels as an associative array.
* @return array All attribute labels as an associative array
*/
public function getAllLabels()
{
return $this->attributeLabels;
}
} |
<reponame>shin-kinoshita/dbflute-core
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.chelper;
import org.dbflute.cbean.ConditionBean;
import org.dbflute.cbean.scoping.SpecifyQuery;
/**
* The decorator for ScalarCondition (the old name: ScalarSubQuery).
* @param <CB> The type of condition-bean.
* @author jflute
*/
public class HpSLCDecorator<CB extends ConditionBean> {
// ===================================================================================
// Attribute
// =========
protected final HpSLCCustomized<CB> _option;
// ===================================================================================
// Constructor
// ===========
public HpSLCDecorator(HpSLCCustomized<CB> option) {
_option = option;
}
// ===================================================================================
// Decoration
// ==========
/**
* Partition the scope of condition by the specified query. <br>
* You can add a correlation condition to the sub-query.
* <pre>
* cb.query().scalar_Equal().<span style="color: #994747">max</span>(<span style="color: #553000">purchaseCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">purchaseCB</span>.specify().<span style="color: #994747">columnPurchasePrice</span>();
* <span style="color: #553000">purchaseCB</span>.query().setPaymentCompleteFlg_Equal_True();
* }).<span style="color: #CC4747">partitionBy</span>(<span style="color: #553000">colCB</span> <span style="color: #90226C; font-weight: bold"><span style="font-size: 120%">-</span>></span> {
* <span style="color: #553000">colCB</span>.specify().<span style="color: #CC4747">columnMemberId</span>(); <span style="color: #3F7E5E">// *Point!</span>
* });
* </pre>
* @param colCBLambda The callback for query to specify the partition column. (NotNull)
*/
public void partitionBy(SpecifyQuery<CB> colCBLambda) {
assertSpecifyQuery(colCBLambda);
_option.setPartitionBySpecify(colCBLambda);
// It's difficult for using relation in partition-by so unsupported.
// The alias-name problem occurs so if you try, check ColumnQuery way.
// (You need to synchronize QyCall...)
}
// ===================================================================================
// Assist Helper
// =============
protected void assertSpecifyQuery(SpecifyQuery<?> specifyQuery) {
if (specifyQuery == null) {
String msg = "The argument 'specifyQuery' for ScalarCondition should not be null.";
throw new IllegalArgumentException(msg);
}
}
}
|
#!/bin/bash
swift build -Xlinker -L/usr/local/lib
./.build/x86_64-apple-macosx/debug/Flea "$@" |
<gh_stars>0
import { Component, OnInit } from '@angular/core';
import { ServicingService } from '../services/addServicing.service';
import { Router } from '@angular/router';
import { QueueTableService } from '../services/queue-table.service';
import { NgxSpinnerService } from 'ngx-spinner';
import { NgbDateStruct, NgbDateParserFormatter } from '@ng-bootstrap/ng-bootstrap';
@Component({
selector: 'app-reports',
templateUrl: './reports.component.html',
styleUrls: ['./reports.component.scss']
})
export class ReportsComponent implements OnInit {
public service_type = [];
public status = [];
public report: any = [];
key: string = 'id';
reverse: boolean = false;
model: NgbDateStruct;
model1: NgbDateStruct;
EndDateString: string;
StrtDateString: string;
MessageNoData: string;
page:number = 1;
user: any = {};
SearchData:string
InsuranceUsr:string;
InsuranceCheck:boolean = false;
DataPerPage:string;
RecordCount:string;
disableCust:boolean = false;
public SvcID:string;
constructor(private spinner: NgxSpinnerService,
private _detailsTable: QueueTableService,
private router: Router,
private ngbDateParserFormatter: NgbDateParserFormatter,
private service: ServicingService) {
this.service_type = [
{ id: "15", type: 'Custody' },
{ id: "1", type: 'Servicing Pickup and Dropoff'},
{ id: "4", type: 'Internal Movement' },
{ id: "5", type: 'Home Delivery' },
{ id: "6", type: 'StockYard' },
{ id: "7", type: 'Test Drive' },
];
this.user.service_type = this.service_type[0].id;
this.status = [
{ id: 0, type: 'Active' },
{ id: 1, type: 'Closed' },
{ id: 2, type: 'Cancelled' }
];
var prev_url = this._detailsTable.getPreviousUrl();
var curr_url = this._detailsTable.getCurrentUrl();
if(curr_url === '/pages/queue-details' && prev_url === '/pages/reports'){
}
else{
localStorage.removeItem('startDate');
localStorage.removeItem('endDate');
localStorage.removeItem('service_status');
localStorage.removeItem('service_type');
}
if(localStorage.getItem('reports_startDate') == null && localStorage.getItem('reports_endDate') == null){
const date = new Date();
this.model = {day:date.getUTCDate(),month:date.getUTCMonth() + 1,year: date.getUTCFullYear() };
this.EndDateString = this.model.year + '-' + this.model.month + '-' + this.model.day;
var dt = new Date();
dt.setDate( dt.getDate() - 14 );
this.model1 = { day: dt.getUTCDate(), month: dt.getUTCMonth() + 1, year: dt.getUTCFullYear()};
this.StrtDateString = this.model1.year + '-' + this.model1.month + '-' + this.model1.day;
if(localStorage.getItem('service_status') != null){
this.user.status = JSON.parse(localStorage.getItem('service_status'))
for(var i = 0; i<this.status.length ;i++){
if(this.status[i].id === JSON.parse(localStorage.getItem('service_status'))){
this.user.status = this.status[i];
}
}
}
if(localStorage.getItem('service_type') != null){
this.user.service_type = JSON.parse(localStorage.getItem('service_type'))
}
}
else if(localStorage.getItem('reports_startDate') == null){
var EndDate = JSON.parse(localStorage.getItem('reports_endDate'));
this.model = JSON.parse(localStorage.getItem('reports_endDate'));
this.EndDateString = this.ngbDateParserFormatter.format(EndDate);
var date = new Date();
this.model1 = {day:date.getUTCDate(),month:date.getUTCMonth() + 1,year: date.getUTCFullYear() };
this.StrtDateString = this.model1.year + '-' + this.model1.month + '-' + this.model1.day;
if(localStorage.getItem('service_status') != null){
this.user.status = JSON.parse(localStorage.getItem('service_status'))
for(var i = 0; i<this.status.length ;i++){
if(this.status[i].id === localStorage.getItem('service_status')){
this.user.status = this.status[i].id
}
}
}
if(localStorage.getItem('service_type') != null){
this.user.service_type = JSON.parse(localStorage.getItem('service_type'))
}
}
else if(localStorage.getItem('reports_endDate') == null){
var StartDate = JSON.parse(localStorage.getItem('reports_startDate'));
this.model1 = JSON.parse(localStorage.getItem('reports_startDate'));
this.StrtDateString = this.ngbDateParserFormatter.format(StartDate);
var dt = new Date();
this.model = { day: dt.getUTCDate(), month: dt.getUTCMonth() + 1, year: dt.getUTCFullYear()};
this.EndDateString = this.model.year + '-' + this.model.month + '-' + this.model.day;
if(localStorage.getItem('service_status') != null){
this.user.status = JSON.parse(localStorage.getItem('service_status'))
for(var i = 0; i<this.status.length ;i++){
if(this.status[i].id === localStorage.getItem('service_status')){
this.user.status = this.status[i].id
}
}
}
if(localStorage.getItem('service_type') != null){
this.user.service_type = JSON.parse(localStorage.getItem('service_type'))
}
}
else{
var EndDate = JSON.parse(localStorage.getItem('reports_endDate'));
var StartDate = JSON.parse(localStorage.getItem('reports_startDate'));
this.model1 = JSON.parse(localStorage.getItem('reports_startDate'));
this.model = JSON.parse(localStorage.getItem('reports_endDate'));
this.EndDateString = this.ngbDateParserFormatter.format(EndDate);
this.StrtDateString = this.ngbDateParserFormatter.format(StartDate);
if(localStorage.getItem('service_status') != null){
for(var i = 0; i<this.status.length ;i++){
if(this.status[i].id === localStorage.getItem('service_status')){
this.user.status = this.status[i].id
}
}
}
if(localStorage.getItem('service_type') != null){
this.user.service_type = localStorage.getItem('service_type')
}
}
}
ngOnInit() {
var prev_url = this._detailsTable.getPreviousUrl();
var curr_url = this._detailsTable.getCurrentUrl();
if(prev_url === '/pages/queue-details' && curr_url === '/pages/reports'){ }
else{
localStorage.removeItem('startDate');
localStorage.removeItem('endDate');
}
this.InsuranceUsr = JSON.parse(sessionStorage.getItem('insurance'));
this.user.service_type = "1";
this.user.status = "1";
window.onbeforeunload = function(e) {
localStorage.removeItem('reports_startDate');
localStorage.removeItem('reports_endDate');
};
if(sessionStorage.getItem('selectedsvc')){
this.SvcID = sessionStorage.getItem('selectedsvc');
}
else{
this.SvcID = JSON.parse(sessionStorage.getItem('globalsvcid'));
}
if(this.InsuranceUsr == "1"){
this.InsuranceCheck = true;
}
else{
this.InsuranceCheck = false;
}
this.user.status = this.status[0].id;
this.Search(1);
}
onEndSelectDate(date: NgbDateStruct) {
if (date != null) {
this.model = date;
this.EndDateString = this.ngbDateParserFormatter.format(date);
localStorage.setItem('reports_endDate',JSON.stringify(this.model));
}
}
service_status_changed(ev){
for(var i = 0; i<this.status.length ;i++){
if(this.status[i].id === ev){
localStorage.setItem('service_status',ev);
}
}
}
service_type_changed(ev){
for(var i = 0; i<this.service_type.length ;i++){
if(this.service_type[i].id === ev){
localStorage.setItem('service_type',JSON.stringify(ev));
}
}
}
onStrtSelectDate(date: NgbDateStruct) {
if (date != null) {
this.model1 = date;
this.StrtDateString = this.ngbDateParserFormatter.format(date);
localStorage.setItem('reports_startDate',JSON.stringify(this.model1));
}
}
sort(key){
this.key = key;
this.reverse = !this.reverse;
}
setDefaultDate(): NgbDateStruct {
var startDate = new Date();
let startYear = startDate.getFullYear().toString();
let startMonth = startDate.getMonth() + 1;
let startDay = "1";
return this.ngbDateParserFormatter.parse(startYear + "-" + startMonth.toString() + "-" + startDay);
}
Search(p:number) {
this.spinner.show();
this.page = p - 1 ;
this.report = [];
if(this.user.service_type == "4" || this.user.service_type == "6"){
this.disableCust = true;
}
else{
this.disableCust = false;
}
this.MessageNoData = null ;
const reportPara ={
requesttype: 'getreportsv2',
startdate: this.StrtDateString,
enddate: this.EndDateString,
servicetype: this.user.service_type,
status: this.user.status,
svcid:this.SvcID,
pagenumber:this.page
}
const reqpara = JSON.stringify(reportPara)
this.service.webServiceCall(reqpara).subscribe
(res => {
if (res[0].login === 0) {
sessionStorage.removeItem('currentUser');
this.router.navigate(['/auth/login']);
}
else {
if (res[0].pagecount[0].hasOwnProperty('noqueues')) {
this.MessageNoData = "No Data";
this.spinner.hide();
}
else {
this.RecordCount = res[0].pagecount[0].record_count;
this.DataPerPage = res[0].pagecount[0].pagelimit;
this.report = res[1].record;
this.spinner.hide();
}}
});
}
openQDetails(data:any){
sessionStorage.removeItem('clickedOn');
sessionStorage.setItem('QueueId',data.id)
this._detailsTable.queueID = data.id;
this.router.navigate(['/pages/queue-details']);
}
ngOnDestroy(){}
}
|
#!/bin/sh
# you will need to read the top level README, and run boostrap.py
# and buildout in order to make pyjsbuild
options="$*"
#if [ -z $options ] ; then options="-O";fi
../../bin/pyjsbuild --print-statements -m $options Hello
|
<filename>src/AjaxClient.js<gh_stars>1-10
/**
* AjaxClient
* Simple XMLHttpRequest client.
* Now supported 'post' method,dataType 'json'
*/
export class AjaxClient {
constructor() {
}
getAsync(options) {
options.type = 'get';
this.ajax(options);
}
postAsync(options) {
options.type = 'post';
this.ajax(options);
}
async get(options) {
options.type = 'get';
if (options.contentType === 'application/json') {
if (this.typeOf(options.data) === 'String') {
} else {
const obj = options.data;
options.data = JSON.stringify(obj);
}
}
return new Promise((resolve) => {
options.success = (data,xhr) => {
resolve({ success: true, data: data,response:xhr });
};
options.error = (e,xhr) => {
resolve({
success: false,
cause: 'error',
error: e,
response:xhr,
});
};
options.timeout = (e) => {
resolve({
success: false,
cause: 'timeout',
error: e,
response:null,
});
};
this.ajax(options);
});
}
async post(options) {
options.type = 'post';
if (options.contentType === 'application/json') {
if (this.typeOf(options.data) === 'String') {
} else {
const obj = options.data;
options.data = JSON.stringify(obj);
}
}
return new Promise((resolve) => {
options.success = (response) => {
resolve({
success: true,
data: response
});
};
options.error = (e) => {
resolve({
success: false,
cause: 'error',
error: e,
});
};
options.timeout = (e) => {
resolve({
success: false,
cause: 'timeout',
error: e,
});
};
this.ajax(options);
});
}
ajax(options) {
//use XMLHttpRequest2 style
const xhr = new XMLHttpRequest();
if (!options) {
throw Error('Please specify options like #ajax(options)');
}
if (!options.url) {
throw Error('Please specify url.');
}
//use async mode
const ASYNC = true;
if (options.type && options.type.toLowerCase() === 'post') {
xhr.open('POST', options.url, ASYNC);
} else if (options.type && options.type.toLowerCase() === 'get') {
xhr.open('GET', options.url, ASYNC);
} else {
throw Error(`type:${options.type} is not supported`);
}
//Supported only 'json' method by now.
if (options.dataType && options.dataType === 'json') {
xhr.responseType = 'text';
} else if (options.dataType && options.dataType === 'text') {
xhr.responseType = 'text';
} else {
throw Error(`Please check dataType:${options.dataType}. "json" or "text" is supported as dataType now.`);
}
if (options.contentType) {
try {
xhr.setRequestHeader('Content-Type', options.contentType);
} catch (e) {
throw Error(`Invalid content type ${options.contentType}`);
}
} else {
if (options.type && options.type.toLowerCase() === 'post' || options.type && options.type.toLowerCase() === 'put') {
throw Error('Please specify contentType.');
}
}
//Original headers
if (options.headers) {
for (const key in options.headers) {
const value = options.headers[key];
xhr.setRequestHeader(key, value);
}
}
// todo add support "xhrFields" for ajaxclient2(using FETCH API)
// Note:in fetch API
// fetch(url, {
// mode: 'cors', // instead of "crossDomain: true" in jQuery
// credentials: 'include' // with credentials
// })
// Note:in jQuery API
// $.ajax({
// url: "some",
// crossDomain: true,
// xhrFields: {
// withCredentials: true
// }
// Note:in XHR
// xhr.withCredentials = true;
if (options.xhrFields) { // options.crossDomain is not mandatory on XHR
const { xhrFields } = options;
if (xhrFields.withCredentials === true) {
xhr.withCredentials = true;
}
}
if (options.timeoutMillis) {
xhr.timeout = options.timeoutMillis;
}
xhr.onload = evt => {
if (xhr.status == 200) {
let data = '';
if (options.dataType == 'json') {
data = JSON.parse(xhr.response);
} else {
data = xhr.response;
}
if (options.success) {
options.success(data, xhr);
}
} else {
//console.error("error:" + xhr.statusText);
if (options.error) {
options.error(evt, xhr);
}
}
};
if (options.timeout) {
xhr.ontimeout = (e) => {
options.timeout(e, xhr);
};
}
if (options.error) {
xhr.onerror = (e) => {
options.error(e, xhr);
}
}
if (options.type && options.type.toLowerCase() === 'post') {
if (options.data) {
if (options.contentType.startsWith('application/x-www-form-urlencoded')) {
const fnEncodeForm = (formData) => {
const params = [];
for (const name in formData) {
const value = formData[name];
const param = encodeURIComponent(name) + '=' + encodeURIComponent(value);
params.push(param);
}
return params.join('&').replace(/%20/g, '+');// encoded space(=%20) to '+'
};
const formData = fnEncodeForm(options.data);
xhr.send(formData);
} else {
xhr.send(options.data);
}
} else {
throw Error('.data is not specified.data must be specified on "POST" mode.');
}
} else if (options.type && options.type.toLowerCase() === 'get') {
xhr.send(null);
} else {
}
}
typeOf(obj) {
return Object.prototype.toString.call(obj)
.slice(8, -1);
}
}
|
#!/bin/bash
# testing compilation of generated kotlin models
rm -rf ../generated/kotlin
pushd ../
python3 generator.py
popd
cp -f ../build-files/kotlin-build.gradle ../generated/kotlin
mv ../generated/kotlin/kotlin-build.gradle ../generated/kotlin/build.gradle
pushd ../generated/kotlin
#gradle compileKotlin --info
gradle build
popd
|
#!/bin/bash
dieharder -d 101 -g 209 -S 3271649736
|
<reponame>ethan-choe/Proto-01<gh_stars>0
const Phaser = require('phaser');
const SerialPortReader = require('../SerialPortReader.js')
class EndScene extends Phaser.Scene {
constructor() {
super('EndScene');
SerialPortReader.addListener(this.onSerialMessage.bind(this));
}
onSerialMessage(msg) {
this.serialMsg = msg;
}
create() {
this.overlay = document.querySelector('#end-scene');
this.overlay.classList.remove('hidden');
this.cursors = {
up: this.input.keyboard.addKey(Phaser.Input.Keyboard.KeyCodes.UP),
};
}
update() {
this.sound.stopAll();
if (this.serialMsg === 'j') {
this.overlay.classList.add('hidden');
// Transition to gameplay
this.scene.start('StartScene')
}
// if (this.cursors.up.isDown) {
// this.overlay.classList.add('hidden');
// // Transition to gameplay
// this.scene.start('StartScene')
// }
}
}
module.exports = EndScene; |
#!/usr/bin/env bash
# ----------------------------------------------------------------------
# Runs all crawlers.
# ----------------------------------------------------------------------
function make_backup_of() {
BKP_FILENAME="${1}.bkp"
if [ -f $BKP_FILENAME ]; then
rm $BKP_FILENAME
fi
if [ -f $1 ]; then
mv $1 $BKP_FILENAME
fi
}
DEFAULT_FILENAME=items.json
echo "**************************************************"
echo "*** BEGIN"
echo "**************************************************"
echo ""
echo "**************************************************"
echo "*** crawler cnpq"
cd cnpq
make_backup_of $DEFAULT_FILENAME
scrapy crawl --output=$DEFAULT_FILENAME --output-format=json cnpq
echo "**************************************************"
echo "*** crawler dfg.de"
cd ../dfg.de
make_backup_of $DEFAULT_FILENAME
python prizes.py
echo "**************************************************"
echo "*** crawler faperj"
cd ../faperj
make_backup_of $DEFAULT_FILENAME
scrapy crawl --output=$DEFAULT_FILENAME --output-format=json faperj
echo "**************************************************"
echo "*** crawler grant.gov"
cd ../grant.gov
make_backup_of grants_gov_ca.json
python spiders/cooperative_agreement.py
make_backup_of grants_gov_g.json
python spiders/grants.py
echo "**************************************************"
echo "*** crawler nsf"
cd ../nsf
make_backup_of $DEFAULT_FILENAME
scrapy crawl --output=$DEFAULT_FILENAME --output-format=json nsf
echo ""
echo "**************************************************"
echo "*** END"
echo "**************************************************"
|
#!/bin/bash
## oc new-project imagenes-wordpress --description="Crea las imagenes de mysql8 y wp" --display-name="Imagenes-Wordpress"
## ssh-keygen -f /home/origin/.ssh/id_rsa -b 2048 -t rsa -q -N '' -m pem -C 'ctrl.srv.lab'
## oc secrets new-sshauth github-secret-pc --ssh-privatekey='/home/emilio/.ssh/id_rsa'
## use oc create secret github-secret-jemiliolopez
## oc secrets link builder github-secret-pc
## ssh-keygen -f /home/origin/.ssh/id_rsa -b 2048 -t rsa -q -N '' -m pem -C 'ctrl.srv.lab'
## oc create secret github-secret-jemiliolopez --ssh-privatekey='/home/emilio/.ssh/id_rsa'
## oc secrets link builder github-secret-jemiliolopez
# oc apply -f wp-builds/PVs/os-nfs-wpp-pv08.json -f wp-builds/PVs/os-nfs-masterp-pv07.json -f wp-builds/PVs/os-nfs-slave1-pv06.json -f wp-builds/PVs/os-nfs-slave2-pv05.json
# sleep 5
# oc apply -f wp-builds/PVs/os-nfs-wpp-pv08.json -f wp-builds/PVs/os-nfs-masterp-pv07.json -f wp-builds/PVs/os-nfs-slave1-pv06.json -f wp-builds/PVs/os-nfs-slave2-pv05.json && sleep 10 && \
oc process -f ./../build/Test-is-mariadb-10.3.json \
-p PROJECT_NAME="$PROJECT_NAME" \
-p APP_NAME="$APP_NAME" \
-p STRATEGY_TYPE="$STRATEGY_TYPE" \
-p SOURCE_IMAGE_STREAM="$SOURCE_IMAGE_STREAM" \
-p SSH_GITHUB="$SSH_GITHUB" \
-p SOURCE_REPOSITORY_MYSQL_URL="$SOURCE_REPOSITORY_MYSQL_URL" \
-p SOURCE_REPOSITORY_MYSQL_TAG="$SOURCE_REPOSITORY_MYSQL_TAG" \
-p MYSQL_USER="$MYSQL_USER" \
-p MYSQL_PASSWORD="$MYSQL_PASSWORD" \
-p MYSQL_DATABASE="$MYSQL_DATABASE" \
-p MYSQL_ROOT_PASSWORD="$MYSQL_ROOT_PASSWORD" \
| oc apply -f -
|
"""Hashing module
A simple module for hashing.
Contains base class :py:class:`Hash` from which all
hash implementations inherits.
For now contains only MD5 hash class :py:class:`Md5`
"""
from .hash import Hash
from .md5 import Md5
__all__ = ["Hash", "Md5"]
|
<gh_stars>1-10
package parse
import (
"fmt"
"funlang/ast"
"testing"
)
func TestParseExpression(t *testing.T) {
parser := newParser("10+30")
parser.advance()
a := parser.parseExpression()
if len(parser.errs.list) > 0 {
t.Error("errors were not expected in this case", parser.errs.Error())
}
if _, ok := a.(*ast.BinaryExpression); !ok {
t.Error("parsing failed for binary expression")
}
}
func TestParsePrecendence(t *testing.T) {
parser := newParser("10*30+50")
parser.advance()
a := parser.parseExpression()
if len(parser.errs.list) > 0 {
t.Error("errs were not expected in this case", parser.errs.Error())
}
if _, ok := a.(*ast.BinaryExpression); !ok {
t.Error("parsing failed for binary expression")
}
if fmt.Sprint(a) != "((10 * 30) + 50)" {
t.Error("parsing failed")
}
}
func TestParseMemberExpression(t *testing.T) {
parser := newParser("x(10)")
parser.advance()
fmt.Println(parser.parseExpression())
}
func TestParseExpression2(t *testing.T) {
parser := newParser("(10+32)*a")
parser.advance()
a := parser.parseExpression()
if len(parser.errs.list) > 0 {
t.Error("errs were not expected in this case", parser.errs.Error())
}
if _, ok := a.(*ast.BinaryExpression); !ok {
t.Error("expected a binary expression")
}
t.Log(a)
}
func TestParseAssignExpression(t *testing.T) {
parser := newParser("a = 10 + 20;")
parser.advance()
a := parser.parseExpression()
if len(parser.errs.list) > 0 {
t.Error("errs were not expected in this case", parser.errs.Error())
}
if _, ok := a.(*ast.AssignExpression); !ok {
t.Error("expected a assign expression")
}
t.Log(a)
}
|
function distanceToPlane(pointX, pointY, pointZ, planeNormalX, planeNormalY, planeNormalZ, planePointX, planePointY, planePointZ) {
// Calculate the distance using the formula: distance = |(point - planePoint) dot product planeNormal| / |planeNormal|
const pointVector = new Cesium.Cartesian3(pointX, pointY, pointZ);
const planePointVector = new Cesium.Cartesian3(planePointX, planePointY, planePointZ);
const planeNormalVector = new Cesium.Cartesian3(planeNormalX, planeNormalY, planeNormalZ);
const pointToPlaneVector = Cesium.Cartesian3.subtract(pointVector, planePointVector, new Cesium.Cartesian3());
const dotProduct = Cesium.Cartesian3.dot(pointToPlaneVector, planeNormalVector);
const planeNormalMagnitude = Cesium.Cartesian3.magnitude(planeNormalVector);
const distance = Cesium.Cartesian3.magnitude(pointToPlaneVector) * Math.abs(dotProduct) / planeNormalMagnitude;
return distance;
} |
#!/bin/bash
rm -Rfv linux-host || exit 1
mkdir -p linux-host || exit 1
rm -Rfv linux-build || exit 1
mkdir -p linux-build || exit 1
chmod +x configure || exit 1
srcdir="`pwd`"
instdir="`pwd`/linux-host"
cd linux-build || exit 1
../configure || exit 1
make -j || exit 1
mkdir ../linux-host/include || exit 1
cp -v *.h ../linux-host/include || exit 1
mkdir ../linux-host/lib || exit 1
cp -v *.a ../linux-host/lib || exit 1
|
def validate_and_add_movie(title, year):
if not title or not year:
return "title and year are required.", True # Flash message and redirect
elif len(year) != 4 or len(title) > 60:
return "info format is invalid", True # Flash message and redirect
else:
movie = Movie(title=title, year=year)
db.session.add(movie)
db.session.commit()
return "item created.", True # Flash message and redirect |
<reponame>alex-dorokhov/gdx-pay
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.pay.gwt.googlewallet;
import com.badlogic.gdx.pay.Information;
import com.badlogic.gdx.pay.PurchaseManager;
import com.badlogic.gdx.pay.PurchaseManagerConfig;
import com.badlogic.gdx.pay.PurchaseObserver;
/**
* The purchase manager implementation for Google Wallet for GWT.
*
* @author noblemaster
*/
public class PurchaseManagerGwtGoogleWallet implements PurchaseManager {
@Override
public String storeName() {
return PurchaseManagerConfig.STORE_NAME_GWT_GOOGLEWALLET;
}
@Override
public void install (PurchaseObserver observer, PurchaseManagerConfig config, boolean autoFetchInformation) {
// TODO Auto-generated method stub
}
@Override
public boolean installed() {
// TODO Auto-generated method stub
return false;
}
@Override
public void dispose() {
// TODO Auto-generated method stub
}
@Override
public void purchase(String identifier) {
// TODO Auto-generated method stub
}
@Override
public void purchaseRestore() {
// TODO Auto-generated method stub
}
@Override
public Information getInformation(String identifier) {
// not implemented yet for this purchase manager
return Information.UNAVAILABLE;
}
}
|
package io.cattle.platform.trigger;
import static io.cattle.platform.core.model.tables.HostTable.*;
import io.cattle.platform.core.addon.metadata.InstanceInfo;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.dao.ServiceDao;
import io.cattle.platform.core.dao.VolumeDao;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.DeploymentUnit;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.Instance;
import io.cattle.platform.core.model.Volume;
import io.cattle.platform.engine.manager.LoopManager;
import io.cattle.platform.engine.model.Trigger;
import io.cattle.platform.loop.factory.LoopFactoryImpl;
import io.cattle.platform.object.ObjectManager;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
public class DeploymentUnitReconcileTrigger implements Trigger {
LoopManager loopManager;
ServiceDao serviceDao;
VolumeDao volumeDao;
ObjectManager objectManager;
public DeploymentUnitReconcileTrigger(LoopManager loopManager, ServiceDao serviceDao, VolumeDao volumeDao, ObjectManager objectManager) {
super();
this.loopManager = loopManager;
this.serviceDao = serviceDao;
this.volumeDao = volumeDao;
this.objectManager = objectManager;
}
@Override
public void trigger(Long accountId, Long clusterId, Object resource, String source) {
for (Long id : getDeploymentUnits(resource)) {
loopManager.kick(LoopFactoryImpl.DU_RECONCILE, ServiceConstants.KIND_DEPLOYMENT_UNIT, id, resource);
}
}
private Collection<Long> getDeploymentUnits(Object obj) {
Host host = null;
if (obj instanceof Host) {
host = (Host) obj;
}
if (obj instanceof Agent) {
Agent agent = (Agent) obj;
host = objectManager.findAny(Host.class, HOST.AGENT_ID, agent.getId());
}
if (host != null) {
return serviceDao.getServiceDeploymentUnitsOnHost(host);
}
if (obj instanceof DeploymentUnit) {
return Arrays.asList(((DeploymentUnit) obj).getId());
}
if (obj instanceof Instance) {
return Arrays.asList(((Instance) obj).getDeploymentUnitId());
}
if (obj instanceof InstanceInfo) {
return Arrays.asList(((InstanceInfo) obj).getDeploymentUnitId());
}
if (obj instanceof Volume) {
Volume vol = (Volume) obj;
if (vol.getDeploymentUnitId() == null && vol.getVolumeTemplateId() != null) {
return volumeDao.findDeploymentUnitsForVolume(vol);
}
if (vol.getDeploymentUnitId() != null) {
return Arrays.asList(vol.getDeploymentUnitId());
}
}
return Collections.emptyList();
}
} |
function extractSourceMapFilename(sourceMappingURL) {
const match = sourceMappingURL.match(/=(.*).map/);
if (match) {
return match[1];
} else {
return "Invalid sourceMappingURL format";
}
}
// Test the function
const sourceMappingURL = "//# sourceMappingURL=TSpanElement.d.ts.map";
console.log(extractSourceMapFilename(sourceMappingURL)); // Output: TSpanElement.d.ts |
package conf
import (
"github.com/hesen/blog/middleware"
)
// IgnoredAuthPath 不需要验证的路由
var IgnoredAuthPath = &middleware.PathRegexp{ "login$", }
// [...]string{
// "login$",
// }
|
<reponame>seawindnick/javaFamily<gh_stars>0
package com.java.study.algorithm.zuo.dadvanced.advanced_class_04;
/**
* 调整[0,x)区间上的数出现的概率
* 【题目】
* 假设函数Math.random()等概率随机返回一个在[0,1)范围上的 数,那么我们知道,在[0,x)区间上的数出现的概率为x (0<x≤1)。
* 给定一个大于0的整数k,并且可以使用 Math.random()函数,请实现一个函数依然返回在[0,1)范围上 的数,但是在[0,x)区间上的数出现的概率为xk(0<x≤1)。
*/
public class Code_02_ProbabilityXPowerK {
public static double ProbabilityXPowerK(int k) {
double result = 0;
for (int i = k; i > 0; i--) {
result = random();
if (result < 0.5) {
return result;
}
}
return result;
}
public static double ProbabilityXPowerK2(int k) {
if (k < 1) {
return 0;
}
double res = -1;
for (int i = 0; i != k; i++) {
res = Math.max(res, Math.random());
}
return res;
}
public static double random() {
return Math.random();
}
public static void main(String[] args) {
Integer lessHalf = 0;
Integer moreHalf = 0;
for (int i = 0; i < 10000; i++) {
Double result = ProbabilityXPowerK2(4);
if (result < 0.5) {
lessHalf++;
} else {
moreHalf++;
}
}
System.out.println(lessHalf);
System.out.println(moreHalf);
}
} |
<filename>components/builder-web/app/shared/channels/channels.component.ts<gh_stars>10-100
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component, Input, Output, EventEmitter } from '@angular/core';
import { MatDialog } from '@angular/material';
import { SimpleConfirmDialog } from '../../shared/dialog/simple-confirm/simple-confirm.dialog';
@Component({
selector: 'hab-channels',
template: require('./channels.component.html')
})
export class ChannelsComponent {
@Input() channels: string[];
@Input() canDemote: boolean = false;
@Output() onDemote: EventEmitter<string> = new EventEmitter<string>();
constructor(
private confirmDialog: MatDialog,
) { }
demotable(channel) {
return this.canDemote && channel !== 'unstable';
}
outerClick(e) {
e.stopPropagation();
}
demote(channel) {
this.confirmDialog
.open(SimpleConfirmDialog, {
width: '480px',
data: {
heading: 'Confirm demote',
body: `Are you sure you want to remove this package from the ${channel} channel?`,
action: 'demote it'
}
})
.afterClosed()
.subscribe((confirmed) => {
if (confirmed) {
this.onDemote.emit(channel);
}
});
}
}
|
# Import necessary modules
from django.core.mail import send_mail
from staff.models import Staff
def send_email_to_staff(subject, message):
# Get all active staff members
active_staff_members = Staff.objects.filter(is_active=True)
# Send email to all active staff members
send_mail(subject, message, 'sender@example.com', [staff_member.email for staff_member in active_staff_members])
# Return the number of staff members to whom the email was successfully sent
return active_staff_members.count() |
import { BaseColor } from './base-color.class';
/**
* HSB and HSV are the same
*
* Hue = ranges from 0 to 360°
* Saturation = ranges from 0 to 100%
* Brightness or Value = ranges from 0 to 100%
* Alpha = range from 0-1
*/
export declare class Hsva extends BaseColor {
hue: number;
saturation: number;
value: number;
alpha: number;
constructor(hue: number, saturation: number, value: number, alpha: number);
toString(showAlphaChannel?: boolean): string;
getHue(): number;
getSaturation(): number;
getValue(): number;
getAlpha(): number;
}
|
#!/bin/bash
# Name:mysql.sh
# This is a ShellScript For Auto DB Backup and Delete old Backup
#
# create mysql backup user:
# grant select,lock tables,show databases,show view,event,reload,super,file on *.* to 'mysqlbackuper'@'localhost' identified by 'PASSWORD';
# flush privileges;
#
set -e
readonly BASE_PATH=$(cd `dirname $0`; pwd)
readonly ROOT_PATH=${BASE_PATH%%/backup/*}
source ${ROOT_PATH}/bin/ini-file.sh
readonly CONFIG_INI_FILE="${ROOT_PATH}/config.ini"
readonly BACKUP_PATH=$(get_field_value ${CONFIG_INI_FILE} backup path)
readonly BACKUP_KEEP_DAY=$(get_field_value ${CONFIG_INI_FILE} backup keep_day)
readonly BACKUP_USER=$(get_field_value ${CONFIG_INI_FILE} backup user)
readonly BACKUP_USER_GROUP=$(get_field_value ${CONFIG_INI_FILE} backup user_group)
readonly BACKUP_SYNC_ENTABLED=$(get_field_value ${CONFIG_INI_FILE} backup remote_sync)
readonly MYSQL_BIN_DIR=$(get_field_value ${CONFIG_INI_FILE} mysql bin_path)
readonly DB_HOST=$(get_field_value ${CONFIG_INI_FILE} mysql host)
readonly DB_PORT=$(get_field_value ${CONFIG_INI_FILE} mysql port)
readonly DB_SOCK=$(get_field_value ${CONFIG_INI_FILE} mysql sock)
readonly DB_USER=$(get_field_value ${CONFIG_INI_FILE} mysql user)
readonly DB_PASSWD=$(get_field_value ${CONFIG_INI_FILE} mysql password)
readonly IGONRE_TABLES=$(get_field_value ${CONFIG_INI_FILE} mysql igonre_tables)
readonly BACKUP_DIR="${BACKUP_PATH}/mysql/"
readonly BACKUP_SYNC_DIR="${BACKUP_PATH}/sync/mysql/"
readonly LOG_FILE="${ROOT_PATH}/logs/backup/mysql.log"
function make_mysql_connect() {
if [ "$1" == "" ] || [ "${MYSQL_BIN_DIR}" == "" ]; then
return 1
fi
local command_name=$1
local command="${MYSQL_BIN_DIR}${command_name}"
if [ "$1" != "mysqldump" ]; then
if [ "${DB_USER}" != "" ]; then
command="${command} -u${DB_USER}"
fi
if [ "${DB_HOST}" != "" ]; then
command="${command} -h${DB_HOST}"
fi
if [ "${DB_PORT}" != "" ]; then
command="${command} -P${DB_PORT}"
fi
if [ "${DB_PASSWD}" != "" ]; then
command="${command} -p${DB_PASSWD}"
fi
if [ "${DB_SOCK}" != "" ]; then
command="${command} -S ${DB_SOCK}"
fi
command="${command} --show-warnings=false "
fi
echo ${command}
return 0
}
# list mysql database
function list_all_database() {
if [ "${MYSQL_BIN_DIR}" == "" ]; then
return 1
fi
local connect=$(make_mysql_connect mysql)
local all_db=$(${connect} -Bse 'show databases')
if [ "$?" == 0 ]; then
echo ${all_db}
return 0
else
reutrn 1
fi
}
function backup() {
if [ "$1" == "" ] || [ "${MYSQL_BIN_DIR}" == "" ]; then
return 1
fi
if [ "${BACKUP_PATH}" == "" ]; then
return 1
fi
if [ ! -d "${BACKUP_DIR}" ]; then
mkdir -p ${BACKUP_DIR}
fi
local database_name=$1
printf "find database: ${database_name} ..... " >> ${LOG_FILE}
# filter: information_schema, performance_schema, test, mysql
if [ "${database_name}" == "information_schema" ] || [ "${database_name}" == "performance_schema" ] \
|| [ "${database_name}" == "test" ] || [ "${database_name}" == "mysql" ]; then
printf "[INGORE ]\n" >> ${LOG_FILE}
return 0
fi
# todo ingore tables ...
local connect=$(make_mysql_connect mysqldump)
local commad="${connect} --routines --events ${database_name}"
local yesterday=$(date -d "yesterday" +"%Y%m%d")
# backup
local flag="Y"
local backup_file="${BACKUP_DIR}${database_name}_${yesterday}.sql.gz"
# no safe
# ${commad} | gzip > ${backup_file}
local dump_flag_file="${ROOT_PATH}/runtime/backup.mysql.dumpflag"
cat /dev/null > ${dump_flag_file} #清空dumpflagfile(用来临时存放备份结果状态)
(${commad} || echo "N" > ${dump_flag_file}) | (gzip || echo "N" > ${dump_flag_file} ) > ${backup_file}
if [ -e "${dump_flag_file}" ] && [ -s "${dump_flag_file}" ]; then
flag="N"
read flag < ${dump_flag_file}
fi
if [ "${flag}" == "Y" ]; then
chmod 600 ${backup_file} > /dev/null 2>&1
printf "[SUCCESS]\n" >> ${LOG_FILE}
else
rm -f ${backup_file} > /dev/null 2>&1
echo ${database_name} >> ${BACKUP_DIR}.$(date +"%Y%m%d").error
printf "[FAILD ]\n" >> ${LOG_FILE}
return 0
fi
return 0
}
function compress() {
local yesterday=$(date -d "yesterday" +"%Y%m%d")
local compress_file="${yesterday}.tar.gz"
printf "compress backup to tar.gz ... " >> ${LOG_FILE}
# count
local count=$(ls ${BACKUP_DIR} | grep ${yesterday}.sql.gz | wc -l)
if [ "${count}" -lt 1 ]; then
printf "[NOFOUND]\n" >> ${LOG_FILE}
return 1
fi
tar -zcf ${compress_file} ${BACKUP_DIR}*_${yesterday}.sql.gz > /dev/null 2>&1
if [ "$?" == 0 ]; then
printf "[SUCCESS]\n" >> ${LOG_FILE}
else
printf "[FAILD ]\n" >> ${LOG_FILE}
return 1
fi
# not found compress file.
if [ ! -f ${compress_file} ]; then
return 1
fi
printf "move backup compress file to sync dir ... " >> ${LOG_FILE}
mv ${compress_file} ${BACKUP_SYNC_DIR} > /dev/null 2>&1
if [ "$?" == 0 ]; then
printf "[SUCCESS]\n" >> ${LOG_FILE}
else
printf "[FAILD ]\n" >> ${LOG_FILE}
return 1
fi
# write finished time, for local backup.
echo $(date +"%Y%m%d") > ${BACKUP_SYNC_DIR}.finished
# change the backup file permissions
if [ "${BACKUP_USER}" != "" -a "${BACKUP_USER_GROUP}" != "" ]; then
printf "change compress file own ..." >> ${LOG_FILE}
chown -R ${BACKUP_USER}.${BACKUP_USER_GROUP} ${BACKUP_SYNC_DIR} > /dev/null 2>&1
if [ "$?" == 0 ]; then
printf "[SUCCESS]\n" >> ${LOG_FILE}
else
printf "[FAILD ]\n" >> ${LOG_FILE}
fi
fi
return 0
}
function lock() {
local lock_file="${ROOT_PATH}/runtime/.backup.mysql.lock"
if [ "$1" == "" ]; then
printf "Please Usage: lock [start | end]\n"
return 1
elif [ "$1" == "start" ]; then
# lock
if [ -f "${lock_file}" ]; then
printf "The running!\n"
exit 1
fi
touch .lock
elif [ "$1" == "end" ]; then
# delete lock file
rm -f ${lock_file} > /dev/null 2>&1
fi
}
function clear() {
if [ "${BACKUP_PATH}" == "" ]; then
return 1
fi
local keep=${BACKUP_KEEP_DAY}
if [ "${keep}" -lt 1 ]; then
keep=7
fi
# find & delete
find ${BACKUP_DIR} -name "*.sql.gz" -type f -mtime +${keep} -exec rm {} \; > /dev/null 2>&1
if [ "${BACKUP_SYNC_ENTABLED}" != "false" ]; then
find ${BACKUP_SYNC_DIR} -name "*.tar.gz" -type f -mtime +${keep} -exec rm {} \; > /dev/null 2>&1
fi
# delete lock file
lock end
return 0
}
function init() {
# check log file dir
local log_path=$(dirname ${LOG_FILE})
if [ ! -d ${log_path} ]; then
mkdir -p ${log_path} > /dev/null 2>&1
if [ "$?" != 0 ]; then
return 1
fi
fi
# check backup path
if [ "${BACKUP_PATH}" == "" ] || [ ! -d ${BACKUP_PATH} ]; then
echo "[ERROR] not dir backup_path(=${BACKUP_PATH})" >> ${LOG_FILE}
return 1
fi
# check backup down path
if [ "${BACKUP_SYNC_ENTABLED}" != "false" ]; then
if [ "${BACKUP_SYNC_DIR}" == "" ] || [ ! -d ${BACKUP_SYNC_DIR} ]; then
echo "[ERROR] not dir sync_path(=${BACKUP_SYNC_DIR})" >> ${LOG_FILE}
return 1
fi
fi
# lock
lock start
return 0
}
# Backup Done.
init
ALL_DATABASE=$(list_all_database)
if [ "$?" == 0 ]; then
for db in ${ALL_DATABASE}
do
backup ${db}
done
# write finished time.
echo $(date +"%Y%m%d") > ${BACKUP_DIR}.finished
# Compress backup file
if [ "${BACKUP_SYNC_ENTABLED}" != "false" ]; then
compress
fi
# Clear runtime file
clear
fi
exit 0
|
<gh_stars>1-10
#ifndef token_h
#define token_h
#include "array.h"
#include <gc/cord.h>
struct Module;
struct Token;
/**
* This is attached to operation tokens and is called later to do the actual byte code
* generation.
*/
typedef int (*f_inst_call)(struct Module *state, struct Token *type, array_t *params);
/**
* A Token holds almost all of the data, since an assembler is so simple
* we can actually put the values inside the token.
*/
typedef struct Token {
// all tokens have this
int id;
CORD data;
// different tokens have these set mutually exclusive
f_inst_call call;
// lightning apparently does lots of casting, so we just need the biggest numeric for storage
unsigned long value;
} Token;
Token *Token_create(int id, CORD data, f_inst_call call);
#define TK(I,C) (Token_create(I, CORD_from_char_star(te-ts,ts), C))
#define KW(T) Parse(pParser, TK_##T, TK(TK_##T,0), state);
#define SYM(T) Parse(pParser, TK_##T, TK(TK_##T,0), state);
#define IDENT() Parse(pParser, TK_IDENT, TK(TK_IDENT,0), state);
#define OP(T) Parse(pParser, TK_OP, TK(TK_OP,inst_##T), state);
#define TYPE(T) tk = TK(TK_TYPE,0); tk->value = OpType_##T; Parse(pParser, TK_TYPE, tk, state);
#define LABEL() Parse(pParser, TK_LABEL, TK(TK_LABEL,0), state);
#define NO_LEAF(name) if(state->current_is_leaf)\
die(state, "You cannot do " # name " in a function declared %%leaf.");
#define REG(T) tk = TK(TK_REG,0); tk->value = JIT_##T; Parse(pParser, TK_REG, tk, state);
#endif
|
import cv2
import numpy as np
def visualize_persons(img, person_dicts):
for person in person_dicts:
bbox = person['bbox']
label = person['label']
x_min, y_min, x_max, y_max = bbox
color = (0, 255, 0) # BGR color for bounding box (green in this case)
thickness = 2 # Thickness of the bounding box lines
# Draw bounding box
img = cv2.rectangle(img, (x_min, y_min), (x_max, y_max), color, thickness)
# Add label text
org = (x_min, y_min - 5) # Position the label slightly above the bounding box
img = cv2.putText(img, label, org, cv2.FONT_HERSHEY_SIMPLEX, 0.5, color, 1)
return img |
def add(x, y):
#Add x and y and return the result
return x + y
def subtract(x, y):
#Subtract y from x and return the result
return x - y
def multiply(x, y):
#Multiply x and y and return the result
return x * y
def divide(x, y):
#Divide x by y and return the result
return x / y
def exponentiate(x, y):
#Raise x to the power of y and return the result
return x ** y
def modulo(x, y):
#Return x modulo y
return x % y |
<reponame>vadi2/codeql
package org.springframework.security.config.annotation.web.builders;
import org.springframework.security.config.annotation.AbstractConfiguredSecurityBuilder;
import org.springframework.security.config.annotation.SecurityBuilder;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.web.DefaultSecurityFilterChain;
import org.springframework.security.web.util.matcher.RequestMatcher;
import org.springframework.security.config.Customizer;
import org.springframework.security.config.annotation.web.configurers.ExpressionUrlAuthorizationConfigurer;
import org.springframework.security.config.annotation.web.AbstractRequestMatcherRegistry;
public final class HttpSecurity extends AbstractConfiguredSecurityBuilder<DefaultSecurityFilterChain, HttpSecurity>
implements SecurityBuilder<DefaultSecurityFilterChain>, HttpSecurityBuilder<HttpSecurity> {
public HttpSecurity requestMatcher(RequestMatcher requestMatcher) {
return this;
}
public HttpSecurity authorizeRequests(
Customizer<ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry> authorizeRequestsCustomizer)
throws Exception {
return this;
}
public ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry authorizeRequests()
throws Exception {
return null;
}
public HttpSecurity requestMatchers(Customizer<RequestMatcherConfigurer> requestMatcherCustomizer) {
return this;
}
public RequestMatcherConfigurer requestMatchers() {
return null;
}
public final class MvcMatchersRequestMatcherConfigurer extends RequestMatcherConfigurer {
}
public class RequestMatcherConfigurer extends AbstractRequestMatcherRegistry<RequestMatcherConfigurer> {
}
}
|
package com.acgist.snail.gui.javafx.window.main;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.acgist.snail.config.SystemConfig;
import com.acgist.snail.gui.javafx.Desktops;
import com.acgist.snail.gui.javafx.window.Window;
import com.acgist.snail.gui.javafx.window.statistics.StatisticsWindow;
import javafx.scene.input.KeyCode;
import javafx.scene.input.KeyEvent;
import javafx.stage.Modality;
import javafx.stage.Stage;
/**
* <p>主窗口</p>
*
* @author acgist
*/
public final class MainWindow extends Window<MainController> {
private static final Logger LOGGER = LoggerFactory.getLogger(MainWindow.class);
private static final MainWindow INSTANCE;
public static final MainWindow getInstance() {
return INSTANCE;
}
static {
LOGGER.debug("初始化主窗口");
INSTANCE = new MainWindow();
}
private MainWindow() {
}
@Override
public void start(Stage stage) throws Exception {
this.buildWindow(stage, SystemConfig.getName(), 1000, 600, "/fxml/main.fxml", Modality.NONE);
this.icon();
this.help();
this.statistics();
}
@Override
public void show() {
super.maximize();
super.show();
}
/**
* <p>F1:帮助</p>
*/
private void help() {
this.stage.addEventHandler(KeyEvent.KEY_RELEASED, event -> {
if(event.getCode() == KeyCode.F1) {
Desktops.browse(SystemConfig.getSupport());
}
});
}
/**
* <p>F12:统计</p>
*/
private void statistics() {
this.stage.addEventHandler(KeyEvent.KEY_RELEASED, event -> {
if(event.getCode() == KeyCode.F12) {
StatisticsWindow.getInstance().show();
}
});
}
}
|
<reponame>Leyla-Li/data-structures-and-algorithms<filename>401-challenges/linkedList/__test__/linked-list.test.js
'use strict';
const LinkedList = require('../linked-list');
const linkedList = new LinkedList();
describe('test for linkLists methods', ()=>{
it('can insert() a node at head', ()=>{
let value = 10;
linkedList.insert(value);
expect(linkedList.head.value).toEqual(10);
});
it('can use includes() to see if the value in a linked list', ()=>{
let value = 10;
expect(linkedList.includes(value)).toEqual(true);
});
it('can use toString() to print out all the values', ()=>{
let value = 10;
linkedList.insert(value);
expect(linkedList.toString()).toEqual(`10${value}`);
});
it('can use append(value) to add new node with value to the end of the list',()=>{
let value = 62;
linkedList.append(value);
console.log(linkedList.toString());
expect(linkedList.includes(value)).toEqual(true);
});
it('can use insertBefore(value, newVal) to add a new node with the value of newVal before the node with the value of value', ()=>{
let value = 10;
let newVal = 55;
linkedList.insertBefore(value, newVal);
expect(linkedList.includes(55)).toEqual(true);
});
it('can use insertAfter(value, newVal) to add a new node with the value of newVal after the node with the value of value', ()=>{
let value = 10;
let newVal = 12;
linkedList.insertAfter(value, newVal);
console.log(linkedList);
expect(linkedList.includes(12)).toEqual(true);
});
it('can use valueAtK() to check the value of the node at position that is k from the tail node of the linked list', ()=>{
// console.log(linkedList);
expect(linkedList.valueAtK(0)).toEqual(62);
});
});
|
(function () {
'use strict';
const Beacon = cordova.require('fr.milky.cordova.smartbeacon.Beacon');
const Constructor = BeaconList;
const _super = Array.prototype;
function BeaconList(array) {
if (array.__isBeaconList__) return array;
if (this && this instanceof BeaconList) throw new Error('With this class, you souldnt use the `new` operator (Array inheritance in ES5 does not work)');
if (array && !Array.isArray(array)) throw new TypeError('Argument must be an array or nothing');
array = array || [];
// array will be an other instance now
const beaconList = array.map(function (beacon) {
if (!(beacon instanceof Beacon)) return new Beacon(beacon);
return beacon;
});
const propertyNames = Object.getOwnPropertyNames(BeaconList.prototype);
for (let index = 0; index < propertyNames.length; index++) {
const key = propertyNames[index];
if (!BeaconList.prototype.hasOwnProperty(key)) continue;
Object.defineProperty(beaconList, key, {
enumerable: false,
value: BeaconList.prototype[key]
});
}
Object.defineProperty(beaconList, '__isBeaconList__', {
enumerable: false, writable: false, configurable: false,
value: true
});
return beaconList;
}
BeaconList.prototype = Object.create(Object.prototype, {
add: {
enumerable: false, writable: false, configurable: false,
value: function add(beacon) {
if (!(beacon instanceof Beacon)) beacon = new Beacon(beacon);
return this.push(beacon);
}
},
remove: {
enumerable: false, writable: false, configurable: false,
value: function remove(beacon) {
return this.splice(this.indexOf(beacon), 1)[0];
}
},
sort: {
enumerable: false, writable: false, configurable: false,
value: function sort(iteratee) {
return Constructor.create(_super.sort.apply(this, arguments));
}
},
concat: {
enumerable: false, writable: false, configurable: false,
value: function concat(iterable) {
return Constructor.create(_super.concat.apply(this, arguments));
}
},
splice: {
enumerable: false, writable: false, configurable: false,
value: function splice(startIndex, length, iterable) {
return Constructor.create(_super.splice.apply(this, arguments));
}
},
map: {
enumerable: false, writable: false, configurable: false,
value: function map(iteratee, context) {
return Constructor.create(_super.map.apply(this, arguments));
}
},
filter: {
enumerable: false, writable: false, configurable: false,
value: function filter(iteratee, context) {
return Constructor.create(_super.filter.apply(this, arguments));
}
},
reject: {
enumerable: false, writable: false, configurable: false,
value: function reject(array) {
const list = Constructor.create(array);
return this.filter(function filterRejectIteratee(beacon) {
return !list.includes(beacon);
});
}
},
indexOf: {
enumerable: false, writable: false, configurable: false,
value: function indexOf(beacon) {
for (let index = 0; index < this.length; index++) {
if (this[index].equals(beacon)) return index;
}
return -1;
}
},
includes: {
enumerable: false, writable: false, configurable: false,
value: function includes(beacon) {
return this.indexOf(beacon) > -1;
}
},
toBeaconList: {
enumerable: false, writable: false, configurable: false,
value: function toBeaconList() {
return Constructor(this.toArray());
}
},
toArray: {
enumerable: false, writable: false, configurable: false,
value: function toArray() {
return this.slice();
}
}
});
BeaconList.create = function create(array) {
return Constructor(array);
};
module.exports = BeaconList;
})(); |
def most_frequent(lst):
max = 0
frequent = lst[0]
for item in lst:
freq = lst.count(item)
if freq > max:
max = freq
frequent = item
return frequent |
<gh_stars>1-10
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const gigSchema = new Schema({
gigName: { type: String },
gigType: { type: String },
userName: { type: String },
phoneNum: { type: String, required: false },
email: { type: String },
dateBooked: { type: Date, default: Date.now },
date: { type: String },
addOne: { type: String, required: false },
addTwo: { type: String, required: false },
city: { type: String, required: false },
endTime: { type: String, required: false },
startTime: { type: String, required: false },
usState: { type: String },
zip: { type: String, required: false },
gigDate: Date, //'2002-12-09'
userNotes: { type: String, required: false },
});
const Gig = mongoose.model("Gig", gigSchema);
module.exports = Gig;
|
#!/bin/bash
#$ -cwd
#Abort on any error,
#set -e
#echo Running on $HOSTNAME
#echo Current PATH is $PATH
#source $HOME/.bash_profile
################################################################
# Variables: FILLL IN DOWN TO THE END OF VARIABLES
Phy_dir=$HOME/scratch/tree_health/ITS_ratio/WORKED_Phytophthora_infestans.ASM14294v1.31
known_fa="${Phy_dir}/P_infestans_genes_for_Pete_Thorpe_translated.fasta"
#known_fa="${Phy_dir}/Pi_T30_4.AA.fasta"
known_fa_nucl="${Phy_dir}/Pi_T30_4nt.fa"
Aug_species="P_austrocedrae_pcbio_complete_20170601"
prefix="PHSU"
# default name
test_fa="aa.fa"
min_len_gene="20"
threads=6
python_directory=$HOME/public_scripts/gene_model_testing
Working_directory=/home/pt40963/scratch/phy_genomes/ppseu/Ppseu_${Aug_species}
test_gff="${Working_directory}/Ppseu_${Aug_species}.gff"
# for the repeat masking and GFF I used a altered gene name version
genome_test="P.seudo.v1.genome.fasta"
genome="/home/pt40963/scratch/phy_genomes/ppseu/P.seudo.v1.genome.fasta"
#genome="${Phy_dir}/Phytophthora_infestans.ASM14294v1.31.fa"
# FOR HGT
# tax_filter_out is the phylum your beast lives in, or clade if you want to get a more refined HGT result
# tax_filter_up_to e.g. metazoan = tax_filter_up_to
# for aphid: #tax_filter_out=6656 #tax_filter_up_to=33208
# for nematodes, #tax_filter_out=6231 ,#tax_filter_up_to=33208
# for Phytophthora
T_30_4=403677
species_tx_id=4787
tax_filter_out=4783
# Stramenopiles - heterokonts
tax_filter_up_to=33634
# If you want to run transrate to get the RNAseq read mapping to gene
# fill these out. Else, just let it fail, as it is the last step.
left_reads="/home/pt40963/scratch/pcbio/RNAseq/R1.fq.gz"
right_reads="/home/pt40963/scratch/pcbio/RNAseq/R2.fq.gz"
# END OF USER VARIABLES. NOTHING TO FILL IN FROM HERE.
#######################################################################
export Phy_dir
export known_fa
export known_fa_nucl
export prefix
export test_fa
export min_len_gene
export threads
export python_directory
export Working_directory
export test_gff
export genome
export T_30_4
export species_tx_id
export tax_filter_out
export tax_filter_up_to
export left_reads
export right_reads
export genome_test
rm -rf ${Working_directory}
mkdir ${Working_directory}
cd ${Working_directory}
# run Augustus with the desired models.
echo "running Augustus"
aug="$HOME/scratch/Downloads/augustus-3.2.1/bin/augustus
--singlestrand=true
--genemodel=complete
--protein=on
--start=on
--alternatives-from-evidence=true
--stop=on
--cds=on
--introns=on
--gff3=off
--extrinsicCfgFile=/home/pt40963/Downloads/augustus-3.0.3/config/species/Myzus_cerasi/extrinsic.Myzus_cerasi.cfg
--hintsfile=/home/pt40963/scratch/pcbio/filtered_assembly/mapping_hybrid/hints_RNAseq.gff
--outfile=${test_gff}
--noInFrameStop=true
--species=${Aug_species}
${genome}"
echo ${aug}
eval ${aug}
wait
echo "Augustus done"
/${python_directory}/Gene_model_testing_Master.sh
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.