text
stringlengths 1
1.05M
|
|---|
#!/bin/bash
if [ -z "$FC_CONTAINER_ID" ]
then
echo "Skip as this is local env ..."
exit 0
else
echo "Building in cloud ...."
fi
# cd to app root
CWD=$(dirname $0)
if [[ `basename $(pwd)` = 'scripts' ]]; then
cd ../
else
cd `dirname $CWD`
fi
npm install
npm run build
cp ./package.json ./dist
cd ./dist
npm install --only=prod
|
# DISCOVER MODE #####################################################################################################
if [ "$MODE" = "discover" ]; then
if [ "$REPORT" = "1" ]; then
if [ ! -z "$WORKSPACE" ]; then
args="$args -w $WORKSPACE"
LOOT_DIR=$INSTALL_DIR/loot/workspace/$WORKSPACE
echo -e "$OKBLUE[*] Saving loot to $LOOT_DIR [$RESET${OKGREEN}OK${RESET}$OKBLUE]$RESET"
mkdir -p $LOOT_DIR 2> /dev/null
mkdir $LOOT_DIR/ips 2> /dev/null
mkdir $LOOT_DIR/screenshots 2> /dev/null
mkdir $LOOT_DIR/nmap 2> /dev/null
mkdir $LOOT_DIR/notes 2> /dev/null
mkdir $LOOT_DIR/reports 2> /dev/null
mkdir $LOOT_DIR/output 2> /dev/null
mkdir $LOOT_DIR/scans 2> /dev/null
fi
OUT_FILE=$(echo "$TARGET" | tr / -)
echo "$TARGET $MODE `date +"%Y-%m-%d %H:%M"`" 2> /dev/null >> $LOOT_DIR/scans/tasks.txt 2> /dev/null
echo "sniper -t $TARGET -m $MODE --noreport $args" >> $LOOT_DIR/scans/$OUTFILE-$MODE.txt 2> /dev/null
if [ "$SLACK_NOTIFICATIONS" == "1" ]; then
/bin/bash "$INSTALL_DIR/bin/slack.sh" "[xerosecurity.com] •?((¯°·._.• Started Sn1per scan: $TARGET [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
sniper -t $TARGET -m $MODE --noreport $args | tee $LOOT_DIR/output/sniper-$MODE-`date +"%Y%m%d%H%M"`.txt 2>&1
exit
fi
echo -e "$OKRED ____ /\\"
echo -e "$OKRED Sn1per by @xer0dayz @XeroSecurity \ \\"
echo -e "$OKRED https://xerosecurity.com \ \\"
echo -e "$OKRED ___ / \\"
echo -e "$OKRED \ \\"
echo -e "$OKRED === > [ \\"
echo -e "$OKRED / \ \\"
echo -e "$OKRED \ / /"
echo -e "$OKRED === > [ /"
echo -e "$OKRED / /"
echo -e "$OKRED ___ \ /"
echo -e "$OKRED / /"
echo -e "$OKRED ____ / /"
echo -e "$OKRED \/$RESET"
echo ""
OUT_FILE=$(echo "$TARGET" | tr / -)
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING PING DISCOVERY SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
nmap -sP $TARGET | tee $LOOT_DIR/ips/sniper-$OUT_FILE-ping.txt
cat $LOOT_DIR/ips/sniper-$OUT_FILE-ping.txt 2> /dev/null | grep "scan report" | awk '{print $5}' > $LOOT_DIR/ips/sniper-$OUT_FILE-ping-sorted.txt
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING TCP PORT SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
nmap -T4 -v -p $QUICK_PORTS -sS $TARGET 2> /dev/null | tee $LOOT_DIR/ips/sniper-$OUT_FILE-tcp.txt 2>/dev/null
cat $LOOT_DIR/ips/sniper-$OUT_FILE-tcp.txt | grep open | grep on | awk '{print $6}' > $LOOT_DIR/ips/sniper-$OUT_FILE-tcpips.txt
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED CURRENT TARGETS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cat $LOOT_DIR/ips/sniper-$OUT_FILE-ping-sorted.txt $LOOT_DIR/ips/sniper-$OUT_FILE-tcpips.txt 2> /dev/null > $LOOT_DIR/ips/sniper-$OUT_FILE-ips-unsorted.txt
sort -u $LOOT_DIR/ips/sniper-$OUT_FILE-ips-unsorted.txt > $LOOT_DIR/ips/discover-$OUT_FILE-sorted.txt
cat $LOOT_DIR/ips/discover-$OUT_FILE-sorted.txt
echo ""
echo -e "$OKRED[+]$RESET Target list saved to $LOOT_DIR/ips/discover-$OUT_FILE-sorted.txt "
echo -e "$OKRED[i] To scan all IP's, use sniper -f $LOOT_DIR/ips/discover-$OUT_FILE-sorted.txt -m flyover -w $WORKSPACE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED SCAN COMPLETE! $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ "$SLACK_NOTIFICATIONS" == "1" ]; then
/bin/bash "$INSTALL_DIR/bin/slack.sh" "[xerosecurity.com] •?((¯°·._.• Finished Sn1per scan: $TARGET [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
sniper -f $LOOT_DIR/ips/discover-$OUT_FILE-sorted.txt -m flyover -w $WORKSPACE
exit
fi
|
<filename>cmd/thanos/rule.go
package main
import (
"context"
"fmt"
"math/rand"
"net"
"net/http"
"net/url"
"os"
"os/signal"
"path"
"path/filepath"
"strconv"
"strings"
"sync"
"syscall"
"time"
"github.com/go-kit/kit/log"
"github.com/go-kit/kit/log/level"
"github.com/oklog/run"
opentracing "github.com/opentracing/opentracing-go"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/model"
"github.com/prometheus/common/route"
"github.com/prometheus/prometheus/discovery/file"
"github.com/prometheus/prometheus/discovery/targetgroup"
promlabels "github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/rules"
"github.com/prometheus/prometheus/storage/tsdb"
"github.com/prometheus/prometheus/util/strutil"
"github.com/prometheus/tsdb/labels"
"github.com/thanos-io/thanos/pkg/alert"
"github.com/thanos-io/thanos/pkg/block/metadata"
"github.com/thanos-io/thanos/pkg/component"
"github.com/thanos-io/thanos/pkg/discovery/cache"
"github.com/thanos-io/thanos/pkg/discovery/dns"
"github.com/thanos-io/thanos/pkg/extprom"
extpromhttp "github.com/thanos-io/thanos/pkg/extprom/http"
"github.com/thanos-io/thanos/pkg/objstore/client"
"github.com/thanos-io/thanos/pkg/promclient"
thanosrule "github.com/thanos-io/thanos/pkg/rule"
v1 "github.com/thanos-io/thanos/pkg/rule/api"
"github.com/thanos-io/thanos/pkg/runutil"
"github.com/thanos-io/thanos/pkg/shipper"
"github.com/thanos-io/thanos/pkg/store"
"github.com/thanos-io/thanos/pkg/store/storepb"
"github.com/thanos-io/thanos/pkg/tracing"
"github.com/thanos-io/thanos/pkg/ui"
"google.golang.org/grpc"
kingpin "gopkg.in/alecthomas/kingpin.v2"
)
// registerRule registers a rule command.
func registerRule(m map[string]setupFunc, app *kingpin.Application, name string) {
cmd := app.Command(name, "ruler evaluating Prometheus rules against given Query nodes, exposing Store API and storing old blocks in bucket")
grpcBindAddr, httpBindAddr, cert, key, clientCA := regCommonServerFlags(cmd)
labelStrs := cmd.Flag("label", "Labels to be applied to all generated metrics (repeated). Similar to external labels for Prometheus, used to identify ruler and its blocks as unique source.").
PlaceHolder("<name>=\"<value>\"").Strings()
dataDir := cmd.Flag("data-dir", "data directory").Default("data/").String()
ruleFiles := cmd.Flag("rule-file", "Rule files that should be used by rule manager. Can be in glob format (repeated).").
Default("rules/").Strings()
evalInterval := modelDuration(cmd.Flag("eval-interval", "The default evaluation interval to use.").
Default("30s"))
tsdbBlockDuration := modelDuration(cmd.Flag("tsdb.block-duration", "Block duration for TSDB block.").
Default("2h"))
tsdbRetention := modelDuration(cmd.Flag("tsdb.retention", "Block retention time on local disk.").
Default("48h"))
alertmgrs := cmd.Flag("alertmanagers.url", "Alertmanager replica URLs to push firing alerts. Ruler claims success if push to at least one alertmanager from discovered succeeds. The scheme may be prefixed with 'dns+' or 'dnssrv+' to detect Alertmanager IPs through respective DNS lookups. The port defaults to 9093 or the SRV record's value. The URL path is used as a prefix for the regular Alertmanager API path.").
Strings()
alertmgrsTimeout := cmd.Flag("alertmanagers.send-timeout", "Timeout for sending alerts to alertmanager").Default("10s").Duration()
alertQueryURL := cmd.Flag("alert.query-url", "The external Thanos Query URL that would be set in all alerts 'Source' field").String()
alertExcludeLabels := cmd.Flag("alert.label-drop", "Labels by name to drop before sending to alertmanager. This allows alert to be deduplicated on replica label (repeated). Similar Prometheus alert relabelling").
Strings()
webRoutePrefix := cmd.Flag("web.route-prefix", "Prefix for API and UI endpoints. This allows thanos UI to be served on a sub-path. This option is analogous to --web.route-prefix of Promethus.").Default("").String()
webExternalPrefix := cmd.Flag("web.external-prefix", "Static prefix for all HTML links and redirect URLs in the UI query web interface. Actual endpoints are still served on / or the web.route-prefix. This allows thanos UI to be served behind a reverse proxy that strips a URL sub-path.").Default("").String()
webPrefixHeaderName := cmd.Flag("web.prefix-header", "Name of HTTP request header used for dynamic prefixing of UI links and redirects. This option is ignored if web.external-prefix argument is set. Security risk: enable this option only if a reverse proxy in front of thanos is resetting the header. The --web.prefix-header=X-Forwarded-Prefix option can be useful, for example, if Thanos UI is served via Traefik reverse proxy with PathPrefixStrip option enabled, which sends the stripped prefix value in X-Forwarded-Prefix header. This allows thanos UI to be served on a sub-path.").Default("").String()
objStoreConfig := regCommonObjStoreFlags(cmd, "", false)
queries := cmd.Flag("query", "Addresses of statically configured query API servers (repeatable). The scheme may be prefixed with 'dns+' or 'dnssrv+' to detect query API servers through respective DNS lookups.").
PlaceHolder("<query>").Strings()
fileSDFiles := cmd.Flag("query.sd-files", "Path to file that contain addresses of query peers. The path can be a glob pattern (repeatable).").
PlaceHolder("<path>").Strings()
fileSDInterval := modelDuration(cmd.Flag("query.sd-interval", "Refresh interval to re-read file SD files. (used as a fallback)").
Default("5m"))
dnsSDInterval := modelDuration(cmd.Flag("query.sd-dns-interval", "Interval between DNS resolutions.").
Default("30s"))
dnsSDResolver := cmd.Flag("query.sd-dns-resolver", "Resolver to use. Possible options: [golang, miekgdns]").
Default("golang").Hidden().String()
m[name] = func(g *run.Group, logger log.Logger, reg *prometheus.Registry, tracer opentracing.Tracer, _ bool) error {
lset, err := parseFlagLabels(*labelStrs)
if err != nil {
return errors.Wrap(err, "parse labels")
}
alertQueryURL, err := url.Parse(*alertQueryURL)
if err != nil {
return errors.Wrap(err, "parse alert query url")
}
tsdbOpts := &tsdb.Options{
MinBlockDuration: *tsdbBlockDuration,
MaxBlockDuration: *tsdbBlockDuration,
RetentionDuration: *tsdbRetention,
NoLockfile: true,
}
lookupQueries := map[string]struct{}{}
for _, q := range *queries {
if _, ok := lookupQueries[q]; ok {
return errors.Errorf("Address %s is duplicated for --query flag.", q)
}
lookupQueries[q] = struct{}{}
}
var fileSD *file.Discovery
if len(*fileSDFiles) > 0 {
conf := &file.SDConfig{
Files: *fileSDFiles,
RefreshInterval: *fileSDInterval,
}
fileSD = file.NewDiscovery(conf, logger)
}
if fileSD == nil && len(*queries) == 0 {
return errors.Errorf("No --query parameter was given.")
}
return runRule(g,
logger,
reg,
tracer,
lset,
*alertmgrs,
*alertmgrsTimeout,
*grpcBindAddr,
*cert,
*key,
*clientCA,
*httpBindAddr,
*webRoutePrefix,
*webExternalPrefix,
*webPrefixHeaderName,
time.Duration(*evalInterval),
*dataDir,
*ruleFiles,
objStoreConfig,
tsdbOpts,
alertQueryURL,
*alertExcludeLabels,
*queries,
fileSD,
time.Duration(*dnsSDInterval),
*dnsSDResolver,
)
}
}
// runRule runs a rule evaluation component that continuously evaluates alerting and recording
// rules. It sends alert notifications and writes TSDB data for results like a regular Prometheus server.
func runRule(
g *run.Group,
logger log.Logger,
reg *prometheus.Registry,
tracer opentracing.Tracer,
lset labels.Labels,
alertmgrURLs []string,
alertmgrsTimeout time.Duration,
grpcBindAddr string,
cert string,
key string,
clientCA string,
httpBindAddr string,
webRoutePrefix string,
webExternalPrefix string,
webPrefixHeaderName string,
evalInterval time.Duration,
dataDir string,
ruleFiles []string,
objStoreConfig *pathOrContent,
tsdbOpts *tsdb.Options,
alertQueryURL *url.URL,
alertExcludeLabels []string,
queryAddrs []string,
fileSD *file.Discovery,
dnsSDInterval time.Duration,
dnsSDResolver string,
) error {
configSuccess := prometheus.NewGauge(prometheus.GaugeOpts{
Name: "thanos_rule_config_last_reload_successful",
Help: "Whether the last configuration reload attempt was successful.",
})
configSuccessTime := prometheus.NewGauge(prometheus.GaugeOpts{
Name: "thanos_rule_config_last_reload_success_timestamp_seconds",
Help: "Timestamp of the last successful configuration reload.",
})
duplicatedQuery := prometheus.NewCounter(prometheus.CounterOpts{
Name: "thanos_rule_duplicated_query_address",
Help: "The number of times a duplicated query addresses is detected from the different configs in rule",
})
alertMngrAddrResolutionErrors := prometheus.NewCounter(prometheus.CounterOpts{
Name: "thanos_rule_alertmanager_address_resolution_errors",
Help: "The number of times resolving an address of an alertmanager has failed inside Thanos Rule",
})
rulesLoaded := prometheus.NewGaugeVec(
prometheus.GaugeOpts{
Name: "thanos_rule_loaded_rules",
Help: "Loaded rules partitioned by file and group",
},
[]string{"strategy", "file", "group"},
)
ruleEvalWarnings := prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "thanos_rule_evaluation_with_warnings_total",
Help: "The total number of rule evaluation that were successful but had warnings which can indicate partial error.",
}, []string{"strategy"},
)
ruleEvalWarnings.WithLabelValues(strings.ToLower(storepb.PartialResponseStrategy_ABORT.String()))
ruleEvalWarnings.WithLabelValues(strings.ToLower(storepb.PartialResponseStrategy_WARN.String()))
reg.MustRegister(configSuccess)
reg.MustRegister(configSuccessTime)
reg.MustRegister(duplicatedQuery)
reg.MustRegister(alertMngrAddrResolutionErrors)
reg.MustRegister(rulesLoaded)
reg.MustRegister(ruleEvalWarnings)
for _, addr := range queryAddrs {
if addr == "" {
return errors.New("static querier address cannot be empty")
}
}
db, err := tsdb.Open(dataDir, log.With(logger, "component", "tsdb"), reg, tsdbOpts)
if err != nil {
return errors.Wrap(err, "open TSDB")
}
{
done := make(chan struct{})
g.Add(func() error {
<-done
return db.Close()
}, func(error) {
close(done)
})
}
// FileSD query addresses.
fileSDCache := cache.New()
dnsProvider := dns.NewProvider(
logger,
extprom.WrapRegistererWithPrefix("thanos_ruler_query_apis_", reg),
dns.ResolverType(dnsSDResolver),
)
// Run rule evaluation and alert notifications.
var (
alertmgrs = newAlertmanagerSet(logger, alertmgrURLs, dns.ResolverType(dnsSDResolver))
alertQ = alert.NewQueue(logger, reg, 10000, 100, labelsTSDBToProm(lset), alertExcludeLabels)
ruleMgrs = thanosrule.Managers{}
)
{
notify := func(ctx context.Context, expr string, alerts ...*rules.Alert) {
res := make([]*alert.Alert, 0, len(alerts))
for _, alrt := range alerts {
// Only send actually firing alerts.
if alrt.State == rules.StatePending {
continue
}
a := &alert.Alert{
StartsAt: alrt.FiredAt,
Labels: alrt.Labels,
Annotations: alrt.Annotations,
GeneratorURL: alertQueryURL.String() + strutil.TableLinkForExpression(expr),
}
if !alrt.ResolvedAt.IsZero() {
a.EndsAt = alrt.ResolvedAt
}
res = append(res, a)
}
alertQ.Push(res)
}
st := tsdb.Adapter(db, 0)
opts := rules.ManagerOptions{
NotifyFunc: notify,
Logger: log.With(logger, "component", "rules"),
Appendable: st,
ExternalURL: nil,
TSDB: st,
}
for _, strategy := range storepb.PartialResponseStrategy_value {
s := storepb.PartialResponseStrategy(strategy)
ctx, cancel := context.WithCancel(context.Background())
ctx = tracing.ContextWithTracer(ctx, tracer)
opts := opts
opts.Registerer = extprom.WrapRegistererWith(prometheus.Labels{"strategy": strings.ToLower(s.String())}, reg)
opts.Context = ctx
opts.QueryFunc = queryFunc(logger, dnsProvider, duplicatedQuery, ruleEvalWarnings, s)
ruleMgrs[s] = rules.NewManager(&opts)
g.Add(func() error {
ruleMgrs[s].Run()
<-ctx.Done()
return nil
}, func(error) {
cancel()
ruleMgrs[s].Stop()
})
}
}
{
// TODO(bwplotka): https://github.com/thanos-io/thanos/issues/660
sdr := alert.NewSender(logger, reg, alertmgrs.get, nil, alertmgrsTimeout)
ctx, cancel := context.WithCancel(context.Background())
g.Add(func() error {
for {
sdr.Send(ctx, alertQ.Pop(ctx.Done()))
select {
case <-ctx.Done():
return ctx.Err()
default:
}
}
}, func(error) {
cancel()
})
}
{
ctx, cancel := context.WithCancel(context.Background())
g.Add(func() error {
return runutil.Repeat(30*time.Second, ctx.Done(), func() error {
if err := alertmgrs.update(ctx); err != nil {
level.Error(logger).Log("msg", "refreshing alertmanagers failed", "err", err)
alertMngrAddrResolutionErrors.Inc()
}
return nil
})
}, func(error) {
cancel()
})
}
// Run File Service Discovery and update the query addresses when the files are modified
if fileSD != nil {
var fileSDUpdates chan []*targetgroup.Group
ctxRun, cancelRun := context.WithCancel(context.Background())
fileSDUpdates = make(chan []*targetgroup.Group)
g.Add(func() error {
fileSD.Run(ctxRun, fileSDUpdates)
return nil
}, func(error) {
cancelRun()
})
ctxUpdate, cancelUpdate := context.WithCancel(context.Background())
g.Add(func() error {
for {
select {
case update := <-fileSDUpdates:
// Discoverers sometimes send nil updates so need to check for it to avoid panics
if update == nil {
continue
}
fileSDCache.Update(update)
case <-ctxUpdate.Done():
return nil
}
}
}, func(error) {
cancelUpdate()
close(fileSDUpdates)
})
}
// Handle reload and termination interrupts.
reload := make(chan struct{}, 1)
{
cancel := make(chan struct{})
reload <- struct{}{} // initial reload
g.Add(func() error {
for {
select {
case <-cancel:
return errors.New("canceled")
case <-reload:
}
level.Debug(logger).Log("msg", "configured rule files", "files", strings.Join(ruleFiles, ","))
var files []string
for _, pat := range ruleFiles {
fs, err := filepath.Glob(pat)
if err != nil {
// The only error can be a bad pattern.
level.Error(logger).Log("msg", "retrieving rule files failed. Ignoring file.", "pattern", pat, "err", err)
continue
}
files = append(files, fs...)
}
level.Info(logger).Log("msg", "reload rule files", "numFiles", len(files))
if err := ruleMgrs.Update(dataDir, evalInterval, files); err != nil {
configSuccess.Set(0)
level.Error(logger).Log("msg", "reloading rules failed", "err", err)
continue
}
configSuccess.Set(1)
configSuccessTime.Set(float64(time.Now().UnixNano()) / 1e9)
rulesLoaded.Reset()
for s, mgr := range ruleMgrs {
for _, group := range mgr.RuleGroups() {
rulesLoaded.WithLabelValues(s.String(), group.File(), group.Name()).Set(float64(len(group.Rules())))
}
}
}
}, func(error) {
close(cancel)
})
}
{
cancel := make(chan struct{})
g.Add(func() error {
c := make(chan os.Signal, 1)
for {
signal.Notify(c, syscall.SIGHUP)
select {
case <-c:
select {
case reload <- struct{}{}:
default:
}
case <-cancel:
return errors.New("canceled")
}
}
}, func(error) {
close(cancel)
})
}
// Periodically update the addresses from static flags and file SD by resolving them using DNS SD if necessary.
{
ctx, cancel := context.WithCancel(context.Background())
g.Add(func() error {
return runutil.Repeat(dnsSDInterval, ctx.Done(), func() error {
dnsProvider.Resolve(ctx, append(fileSDCache.Addresses(), queryAddrs...))
return nil
})
}, func(error) {
cancel()
})
}
// Start gRPC server.
{
l, err := net.Listen("tcp", grpcBindAddr)
if err != nil {
return errors.Wrap(err, "listen API address")
}
logger := log.With(logger, "component", component.Rule.String())
store := store.NewTSDBStore(logger, reg, db, component.Rule, lset)
opts, err := defaultGRPCServerOpts(logger, reg, tracer, cert, key, clientCA)
if err != nil {
return errors.Wrap(err, "setup gRPC options")
}
s := grpc.NewServer(opts...)
storepb.RegisterStoreServer(s, store)
g.Add(func() error {
return errors.Wrap(s.Serve(l), "serve gRPC")
}, func(error) {
s.Stop()
})
}
// Start UI & metrics HTTP server.
{
router := route.New()
// redirect from / to /webRoutePrefix
if webRoutePrefix != "" {
router.Get("/", func(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, webRoutePrefix, http.StatusFound)
})
}
router.WithPrefix(webRoutePrefix).Post("/-/reload", func(w http.ResponseWriter, r *http.Request) {
reload <- struct{}{}
})
flagsMap := map[string]string{
// TODO(bplotka in PR #513 review): pass all flags, not only the flags needed by prefix rewriting.
"web.external-prefix": webExternalPrefix,
"web.prefix-header": webPrefixHeaderName,
}
ins := extpromhttp.NewInstrumentationMiddleware(reg)
ui.NewRuleUI(logger, ruleMgrs, alertQueryURL.String(), flagsMap).Register(router.WithPrefix(webRoutePrefix), ins)
api := v1.NewAPI(logger, ruleMgrs)
api.Register(router.WithPrefix(path.Join(webRoutePrefix, "/api/v1")), tracer, logger, ins)
mux := http.NewServeMux()
registerMetrics(mux, reg)
registerProfile(mux)
mux.Handle("/", router)
l, err := net.Listen("tcp", httpBindAddr)
if err != nil {
return errors.Wrapf(err, "listen HTTP on address %s", httpBindAddr)
}
g.Add(func() error {
level.Info(logger).Log("msg", "Listening for ui requests", "address", httpBindAddr)
return errors.Wrap(http.Serve(l, mux), "serve query")
}, func(error) {
runutil.CloseWithLogOnErr(logger, l, "query and metric listener")
})
}
confContentYaml, err := objStoreConfig.Content()
if err != nil {
return err
}
uploads := true
if len(confContentYaml) == 0 {
level.Info(logger).Log("msg", "No supported bucket was configured, uploads will be disabled")
uploads = false
}
if uploads {
// The background shipper continuously scans the data directory and uploads
// new blocks to Google Cloud Storage or an S3-compatible storage service.
bkt, err := client.NewBucket(logger, confContentYaml, reg, component.Rule.String())
if err != nil {
return err
}
// Ensure we close up everything properly.
defer func() {
if err != nil {
runutil.CloseWithLogOnErr(logger, bkt, "bucket client")
}
}()
s := shipper.New(logger, nil, dataDir, bkt, func() labels.Labels { return lset }, metadata.RulerSource)
ctx, cancel := context.WithCancel(context.Background())
g.Add(func() error {
defer runutil.CloseWithLogOnErr(logger, bkt, "bucket client")
return runutil.Repeat(30*time.Second, ctx.Done(), func() error {
if _, err := s.Sync(ctx); err != nil {
level.Warn(logger).Log("err", err)
}
return nil
})
}, func(error) {
cancel()
})
}
level.Info(logger).Log("msg", "starting rule node")
return nil
}
type alertmanagerSet struct {
resolver dns.Resolver
addrs []string
mtx sync.Mutex
current []*url.URL
}
func newAlertmanagerSet(logger log.Logger, addrs []string, dnsSDResolver dns.ResolverType) *alertmanagerSet {
return &alertmanagerSet{
resolver: dns.NewResolver(dnsSDResolver.ToResolver(logger)),
addrs: addrs,
}
}
func (s *alertmanagerSet) get() []*url.URL {
s.mtx.Lock()
defer s.mtx.Unlock()
return s.current
}
const defaultAlertmanagerPort = 9093
func (s *alertmanagerSet) update(ctx context.Context) error {
var result []*url.URL
for _, addr := range s.addrs {
var (
name = addr
qtype dns.QType
resolvedDomain []string
)
if nameQtype := strings.SplitN(addr, "+", 2); len(nameQtype) == 2 {
name, qtype = nameQtype[1], dns.QType(nameQtype[0])
}
u, err := url.Parse(name)
if err != nil {
return errors.Wrapf(err, "parse URL %q", name)
}
// Get only the host and resolve it if needed.
host := u.Host
if qtype != "" {
if qtype == dns.A {
_, _, err = net.SplitHostPort(host)
if err != nil {
// The host could be missing a port. Append the defaultAlertmanagerPort.
host = host + ":" + strconv.Itoa(defaultAlertmanagerPort)
}
}
resolvedDomain, err = s.resolver.Resolve(ctx, host, qtype)
if err != nil {
return errors.Wrap(err, "alertmanager resolve")
}
} else {
resolvedDomain = []string{host}
}
for _, resolved := range resolvedDomain {
result = append(result, &url.URL{
Scheme: u.Scheme,
Host: resolved,
Path: u.Path,
User: u.User,
})
}
}
s.mtx.Lock()
s.current = result
s.mtx.Unlock()
return nil
}
func parseFlagLabels(s []string) (labels.Labels, error) {
var lset labels.Labels
for _, l := range s {
parts := strings.SplitN(l, "=", 2)
if len(parts) != 2 {
return nil, errors.Errorf("unrecognized label %q", l)
}
if !model.LabelName.IsValid(model.LabelName(string(parts[0]))) {
return nil, errors.Errorf("unsupported format for label %s", l)
}
val, err := strconv.Unquote(parts[1])
if err != nil {
return nil, errors.Wrap(err, "unquote label value")
}
lset = append(lset, labels.Label{Name: parts[0], Value: val})
}
return lset, nil
}
func labelsTSDBToProm(lset labels.Labels) (res promlabels.Labels) {
for _, l := range lset {
res = append(res, promlabels.Label{
Name: l.Name,
Value: l.Value,
})
}
return res
}
func removeDuplicateQueryAddrs(logger log.Logger, duplicatedQueriers prometheus.Counter, addrs []string) []string {
set := make(map[string]struct{})
for _, addr := range addrs {
if _, ok := set[addr]; ok {
level.Warn(logger).Log("msg", "Duplicate query address is provided - %v", addr)
duplicatedQueriers.Inc()
}
set[addr] = struct{}{}
}
deduplicated := make([]string, 0, len(set))
for key := range set {
deduplicated = append(deduplicated, key)
}
return deduplicated
}
// queryFunc returns query function that hits the HTTP query API of query peers in randomized order until we get a result
// back or the context get canceled.
func queryFunc(
logger log.Logger,
dnsProvider *dns.Provider,
duplicatedQuery prometheus.Counter,
ruleEvalWarnings *prometheus.CounterVec,
partialResponseStrategy storepb.PartialResponseStrategy,
) rules.QueryFunc {
var spanID string
switch partialResponseStrategy {
case storepb.PartialResponseStrategy_WARN:
spanID = "/rule_instant_query HTTP[client]"
case storepb.PartialResponseStrategy_ABORT:
spanID = "/rule_instant_query_part_resp_abort HTTP[client]"
default:
// Programming error will be caught by tests.
panic(errors.Errorf("unknown partial response strategy %v", partialResponseStrategy).Error())
}
return func(ctx context.Context, q string, t time.Time) (promql.Vector, error) {
// Add DNS resolved addresses from static flags and file SD.
// TODO(bwplotka): Consider generating addresses in *url.URL
addrs := dnsProvider.Addresses()
removeDuplicateQueryAddrs(logger, duplicatedQuery, addrs)
for _, i := range rand.Perm(len(addrs)) {
u, err := url.Parse(fmt.Sprintf("http://%s", addrs[i]))
if err != nil {
return nil, errors.Wrapf(err, "url parse %s", addrs[i])
}
span, ctx := tracing.StartSpan(ctx, spanID)
v, warns, err := promclient.PromqlQueryInstant(ctx, logger, u, q, t, promclient.QueryOptions{
Deduplicate: true,
PartialResponseStrategy: partialResponseStrategy,
})
span.Finish()
if err != nil {
level.Error(logger).Log("err", err, "query", q)
} else {
if len(warns) > 0 {
ruleEvalWarnings.WithLabelValues(strings.ToLower(partialResponseStrategy.String())).Inc()
// TODO(bwplotka): Propagate those to UI, probably requires changing rule manager code ):
level.Warn(logger).Log("warnings", strings.Join(warns, ", "), "query", q)
}
return v, nil
}
}
return nil, errors.Errorf("no query peer reachable")
}
}
|
import request from "../../lib/request";
const app = getApp();
import serviceData from '../../data/config';
Page({
data : {
products:[],
currentPage:1,
perPage : 5
},
onLoad(option){
var categoryId = option.id;
var pageData = new Object();
pageData.page = this.data.currentPage;
pageData.per_page = this.data.perPage;
//request({path:'/categories/' + categoryId + '/products', data: pageData})
//.then(({data:products}) => this.setData({products}));
this.setData({products: serviceData.categoryData});
/*wx.setNavigationBarTitle({
title: option.title,
success: function(res) {
// success
}
})*/
},
navigateToProduct(event) {
var productId = event.currentTarget.dataset.goodsId;
wx.navigateTo({
url: '../products/products?id=' + productId
});
},
lower : function(option){
var categoryId = option.id;
console.log('lower more products data');
wx.showNavigationBarLoading();
var that = this;
setTimeout(()=>{
wx.hideNavigationBarLoading();
var nextPageData = new Object();
nextPageData.per_page = this.data.perPage;
nextPageData.page = this.data.currentPage +1;
var products = serviceData.categoryData;
this.setData({currentPage:++this.data.currentPage});
this.setData({products:this.data.products.concat(products)});//concat 拼接在一起
}, 1000);
},
});
|
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.coption;
import java.util.List;
import org.dbflute.cbean.cipher.GearedCipherManager;
import org.dbflute.cbean.coption.parts.SplitOptionParts;
import org.dbflute.cbean.dream.SpecifiedColumn;
import org.dbflute.cbean.sqlclause.query.QueryClauseArranger;
import org.dbflute.dbway.ExtensionOperand;
import org.dbflute.dbway.OnQueryStringConnector;
import org.dbflute.util.DfCollectionUtil;
import org.dbflute.util.Srl;
/**
* The class of simple-string-option.
* @author jflute
*/
public class SimpleStringOption implements ConditionOption {
// ===================================================================================
// Attribute
// =========
protected SplitOptionParts _splitOptionParts;
// ===================================================================================
// Split
// =====
protected SimpleStringOption doSplitByBlank() {
getSplitOptionParts().splitByBlank();
return this;
}
protected SimpleStringOption doSplitBySpace() {
getSplitOptionParts().splitBySpace();
return this;
}
protected SimpleStringOption doSplitBySpaceContainsDoubleByte() {
getSplitOptionParts().splitBySpaceContainsDoubleByte();
return this;
}
protected SimpleStringOption doSplitBySpaceContainsDoubleByte(int splitLimitCount) {
getSplitOptionParts().splitBySpaceContainsDoubleByte(splitLimitCount);
return this;
}
protected SimpleStringOption doSplitByPipeLine() {
getSplitOptionParts().splitByPipeLine();
return this;
}
protected SimpleStringOption doSplitByVarious(List<String> delimiterList) {
getSplitOptionParts().splitByVarious(delimiterList);
return this;
}
protected SplitOptionParts getSplitOptionParts() {
if (_splitOptionParts == null) {
_splitOptionParts = createSplitOptionParts();
}
return _splitOptionParts;
}
protected SplitOptionParts createSplitOptionParts() {
return new SplitOptionParts();
}
public boolean isSplit() {
return getSplitOptionParts().isSplit();
}
public String[] generateSplitValueArray(String value) {
return getSplitOptionParts().generateSplitValueArray(value);
}
protected SimpleStringOption doCutSplit(int splitLimitCount) {
getSplitOptionParts().limitSplit(splitLimitCount);
return this;
}
// ===================================================================================
// Real Value
// ==========
public String generateRealValue(String value) {
return value;
}
// ===================================================================================
// Interface Implementation
// ========================
public String getRearOption() {
return "";
}
public boolean hasCompoundColumn() {
return false;
}
public List<SpecifiedColumn> getCompoundColumnList() {
return DfCollectionUtil.emptyList();
}
public boolean hasStringConnector() {
return false;
}
public OnQueryStringConnector getStringConnector() {
return null;
}
public ExtensionOperand getExtensionOperand() {
return null;
}
public QueryClauseArranger getWhereClauseArranger() {
return null;
}
public GearedCipherManager getGearedCipherManager() {
return null;
}
// ===================================================================================
// General Helper
// ==============
protected String replace(String text, String fromText, String toText) {
return Srl.replace(text, fromText, toText);
}
// ===================================================================================
// Deep Copy
// =========
public SimpleStringOption createDeepCopy() {
final SimpleStringOption deepCopy = newDeepCopyInstance();
if (_splitOptionParts != null) {
deepCopy._splitOptionParts = (SplitOptionParts) _splitOptionParts;
}
return deepCopy;
}
protected SimpleStringOption newDeepCopyInstance() {
return new SimpleStringOption();
}
}
|
<reponame>Ashindustry007/competitive-programming
// https://open.kattis.com/problems/dicecup
#include <iostream>
#include <set>
#include <vector>
using namespace std;
typedef vector<int> vi;
typedef set<int> si;
int main() {
int m, n;
cin >> m >> n;
vi v(m + n + 2, 0);
for (int i = 1; i <= m; i++) {
for (int j = 1; j <= n; j++) {
v[i + j]++;
}
}
int M = 0;
for (int i = 0; i < m + n + 2; i++) {
M = max(M, v[i]);
}
si s;
for (int i = 1; i <= m; i++) {
for (int j = 1; j <= n; j++) {
if (M == v[i + j]) {
s.insert(i + j);
}
}
}
for (int c : s) cout << c << endl;
}
|
<gh_stars>0
package com.ylesb.plan;
/**
* @title: PlanTest
* @projectName plan
* @description: TODO
* @author White
* @site : [www.ylesb.com]
* @date 2021/12/2016:05
*/
import com.baomidou.mybatisplus.test.autoconfigure.MybatisPlusTest;
import com.ylesb.plan.mapper.UserMapper;
import com.ylesb.plan.entity.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.testng.annotations.Test;
/**
* @className : PlanTest
* @description : [描述说明该类的功能]
* @author : [XuGuangchao]
* @site : [www.ylesb.com]
* @version : [v1.0]
* @createTime : [2021/12/20 16:05]
* @updateUser : [XuGuangchao]
* @updateTime : [2021/12/20 16:05]
* @updateRemark : [描述说明本次修改内容]
*/
@MybatisPlusTest
public class PlanTest {
@Autowired
private UserMapper userMapper;
@Test
public void testInsert() {
System.out.println(("----- selectAll method test ------"));
User user = new User();
user.setUserId(10001);
user.setUserName("徐广超");
user.setPassword("<PASSWORD>");
userMapper.insert(user);
}
}
|
const applyDiscount = (originalPrice, discount, taxRate = 0) => {
let discountAmount = originalPrice * (discount / 100);
let priceAfterDiscount = originalPrice - discountAmount;
let taxAmount = priceAfterDiscount * (taxRate / 100);
let finalPrice = priceAfterDiscount + taxAmount;
return finalPrice;
}
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
echo "************ baremetalds assisted gather command ************"
if [[ ! -e "${SHARED_DIR}/server-ip" ]]; then
echo "No server IP found; skipping log gathering."
exit 0
fi
# Fetch packet basic configuration
# shellcheck source=/dev/null
source "${SHARED_DIR}/packet-conf.sh"
function getlogs() {
echo "### Downloading logs..."
scp -r "${SSHOPTS[@]}" "root@${IP}:/tmp/artifacts/*" "${ARTIFACT_DIR}"
}
# Gather logs regardless of what happens after this
trap getlogs EXIT
echo "### Gathering logs..."
timeout -s 9 30m ssh "${SSHOPTS[@]}" "root@${IP}" bash - <<EOF |& sed -e 's/.*auths.*/*** PULL_SECRET ***/g'
set -xeuo pipefail
cd /home/assisted
source /root/config
# Get sosreport including sar data
sosreport --ticket-number "\${HOSTNAME}" --batch -o container_log,filesys,kvm,libvirt,logs,networkmanager,podman,processor,rpm,sar,virsh,yum --tmp-dir /tmp/artifacts
# Get assisted logs
export LOGS_DEST=/tmp/artifacts
export KUBECTL="kubectl --kubeconfig=\${HOME}/.kube/config"
make download_service_logs
make download_cluster_logs ADDITIONAL_PARAMS="--download-all --must-gather"
EOF
|
# Imports
import torch.nn as nn
import torch
# CNN Model
class CNN(nn.Module):
def __init__(self):
super(CNN, self).__init__()
# Convolutional blocks
self.conv1 = nn.Sequential(
nn.Conv2d(1, 6, 3, 1),
nn.ReLU(),
nn.MaxPool2d(2, 2))
self.conv2 = nn.Sequential(
nn.Conv2d(6, 16, 3, 1),
nn.ReLU(),
nn.MaxPool2d(2, 2))
# Fully connected layers
self.fc1 = nn.Linear(5 * 5 * 16, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
# Forward pass through convolutional blocks
out = self.conv1(x)
out = self.conv2(out)
# Flatten the tensor for use in fully connected layers
out = out.view(-1, 5 * 5 * 16)
# Forward pass through fully connected layers
out = self.fc1(out)
out = self.fc2(out)
out = self.fc3(out)
return out
# Create an instance of the model
model = CNN()
# Move model to GPU if available
if torch.cuda.is_available():
model.cuda()
|
#!/bin/bash
# Get an updated config.sub and config.guess
cp $BUILD_PREFIX/share/libtool/build-aux/config.* ./build-aux
export PERL=${BUILD_PREFIX}/bin/perl
if [[ ${HOST} =~ .*linux.* ]]; then
export CFLAGS="${CFLAGS} -lrt"
fi
M4=m4 \
./configure --prefix=${PREFIX} --host=${HOST}
make -j${CPU_COUNT} ${VERBOSE_AT}
if [[ "${CONDA_BUILD_CROSS_COMPILATION}" != "1" ]]; then
make check
fi
make install
strings ${PREFIX}/bin/bison | grep ${BUILD_PREFIX}/bin/m4 || exit 0
echo "ERROR :: BUILD_PREFIX of ${BUILD_PREFIX}/bin/m4 found in ${PREFIX}/bin/bison"
exit 1
|
description = """
Adds django-mptt support to your project.
For more information:
http://django-mptt.github.com/django-mptt/
"""
|
<filename>open-sphere-base/core/src/main/java/io/opensphere/core/util/collections/TroveUtilities.java
package io.opensphere.core.util.collections;
import java.util.List;
import java.util.Set;
import gnu.trove.TByteCollection;
import gnu.trove.TCharCollection;
import gnu.trove.TCollections;
import gnu.trove.TDoubleCollection;
import gnu.trove.TFloatCollection;
import gnu.trove.TIntCollection;
import gnu.trove.TLongCollection;
import gnu.trove.TShortCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableByteCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableByteList;
import gnu.trove.impl.unmodifiable.TUnmodifiableByteSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableCharCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableCharList;
import gnu.trove.impl.unmodifiable.TUnmodifiableCharSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableDoubleCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableDoubleList;
import gnu.trove.impl.unmodifiable.TUnmodifiableDoubleSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableFloatCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableFloatList;
import gnu.trove.impl.unmodifiable.TUnmodifiableFloatSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableIntCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableIntList;
import gnu.trove.impl.unmodifiable.TUnmodifiableIntSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableLongCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableLongList;
import gnu.trove.impl.unmodifiable.TUnmodifiableLongSet;
import gnu.trove.impl.unmodifiable.TUnmodifiableShortCollection;
import gnu.trove.impl.unmodifiable.TUnmodifiableShortList;
import gnu.trove.impl.unmodifiable.TUnmodifiableShortSet;
import gnu.trove.list.TByteList;
import gnu.trove.list.TCharList;
import gnu.trove.list.TDoubleList;
import gnu.trove.list.TFloatList;
import gnu.trove.list.TIntList;
import gnu.trove.list.TLongList;
import gnu.trove.list.TShortList;
import gnu.trove.list.array.TByteArrayList;
import gnu.trove.list.array.TCharArrayList;
import gnu.trove.list.array.TDoubleArrayList;
import gnu.trove.list.array.TFloatArrayList;
import gnu.trove.list.array.TIntArrayList;
import gnu.trove.list.array.TLongArrayList;
import gnu.trove.list.array.TShortArrayList;
import gnu.trove.map.hash.TObjectDoubleHashMap;
import gnu.trove.set.TByteSet;
import gnu.trove.set.TCharSet;
import gnu.trove.set.TDoubleSet;
import gnu.trove.set.TFloatSet;
import gnu.trove.set.TIntSet;
import gnu.trove.set.TLongSet;
import gnu.trove.set.TShortSet;
import io.opensphere.core.util.Constants;
import io.opensphere.core.util.MathUtil;
/**
* Utilities for working with Trove collections.
*/
@SuppressWarnings("PMD.GodClass")
public final class TroveUtilities
{
/** Base size of a {@link TByteArrayList}. */
private static final int TBYTEARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES + 1,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TCharArrayList}. */
private static final int TCHARARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES * Constants.CHAR_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TDoubleArrayList}. */
private static final int TDOUBLEARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TFloatArrayList}. */
private static final int TFLOATARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TIntArrayList}. */
private static final int TINTARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES * 2,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TLongArrayList}. */
private static final int TLONGARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TObjectDoubleHashMap}. */
private static final int TOBJECTDOUBLEHASHMAP_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.DOUBLE_SIZE_BYTES
+ Constants.BOOLEAN_SIZE_BYTES * 2 + Constants.INT_SIZE_BYTES * 4 + Constants.FLOAT_SIZE_BYTES * 2,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/** Base size of a {@link TShortArrayList}. */
private static final int TSHORTARRAYLIST_BASE_SIZE_BYTES = MathUtil.roundUpTo(
Constants.OBJECT_SIZE_BYTES + Constants.REFERENCE_SIZE_BYTES + Constants.INT_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES);
/**
* Get the size of a {@link TByteArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTByteArrayList(int capacity)
{
return TBYTEARRAYLIST_BASE_SIZE_BYTES
+ MathUtil.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * 1, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TCharArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTCharArrayList(int capacity)
{
return TCHARARRAYLIST_BASE_SIZE_BYTES + MathUtil
.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.CHAR_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TDoubleArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTDoubleArrayList(int capacity)
{
return TDOUBLEARRAYLIST_BASE_SIZE_BYTES + MathUtil.roundUpTo(
Constants.ARRAY_SIZE_BYTES + capacity * Constants.DOUBLE_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TFloatArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTFloatArrayList(int capacity)
{
return TFLOATARRAYLIST_BASE_SIZE_BYTES + MathUtil
.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.FLOAT_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TIntArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTIntArrayList(int capacity)
{
return TINTARRAYLIST_BASE_SIZE_BYTES + MathUtil
.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.INT_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TLongArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTLongArrayList(int capacity)
{
return TLONGARRAYLIST_BASE_SIZE_BYTES + MathUtil
.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.LONG_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TObjectDoubleHashMap}.
*
* @param capacity The capacity of the map.
* @param keySize The size of the objects which are the map keys.
* @return The size in bytes.
*/
public static int sizeOfTObjectDoubleHashMap(int capacity, int keySize)
{
return TOBJECTDOUBLEHASHMAP_BASE_SIZE_BYTES
+ MathUtil.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.DOUBLE_SIZE_BYTES,
Constants.MEMORY_BLOCK_SIZE_BYTES)
+ MathUtil.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * keySize, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Get the size of a {@link TShortArrayList}.
*
* @param capacity The capacity of the array list.
* @return The size in bytes.
*/
public static int sizeOfTShortArrayList(int capacity)
{
return TSHORTARRAYLIST_BASE_SIZE_BYTES + MathUtil
.roundUpTo(Constants.ARRAY_SIZE_BYTES + capacity * Constants.SHORT_SIZE_BYTES, Constants.MEMORY_BLOCK_SIZE_BYTES);
}
/**
* Converts a trove long list to regular java list.
*
* @param tList the trove list
* @return the java list
*/
public static List<Long> toLongList(TLongCollection tList)
{
List<Long> list = New.list(tList.size());
tList.forEach(value -> list.add(Long.valueOf(value)));
return list;
}
/**
* Converts a trove long list to regular java set.
*
* @param tList the trove list
* @return the java set
*/
public static Set<Long> toLongSet(TLongCollection tList)
{
final float loadFactor = .75f;
Set<Long> list = New.set(Math.max((int)(tList.size() / loadFactor) + 1, 16));
tList.forEach(value -> list.add(Long.valueOf(value)));
return list;
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TByteCollection unmodifiableCollection(TByteCollection input)
{
return input instanceof TUnmodifiableByteCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TCharCollection unmodifiableCollection(TCharCollection input)
{
return input instanceof TUnmodifiableCharCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TDoubleCollection unmodifiableCollection(TDoubleCollection input)
{
return input instanceof TUnmodifiableDoubleCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TFloatCollection unmodifiableCollection(TFloatCollection input)
{
return input instanceof TUnmodifiableFloatCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TIntCollection unmodifiableCollection(TIntCollection input)
{
return input instanceof TUnmodifiableIntCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TLongCollection unmodifiableCollection(TLongCollection input)
{
return input instanceof TUnmodifiableLongCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a collection.
*
* @param input The input collection.
* @return The unmodifiable collection.
*/
public static TShortCollection unmodifiableCollection(TShortCollection input)
{
return input instanceof TUnmodifiableShortCollection ? input : TCollections.unmodifiableCollection(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TByteList unmodifiableList(TByteList input)
{
return input instanceof TUnmodifiableByteList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TCharList unmodifiableList(TCharList input)
{
return input instanceof TUnmodifiableCharList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TDoubleList unmodifiableList(TDoubleList input)
{
return input instanceof TUnmodifiableDoubleList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TFloatList unmodifiableList(TFloatList input)
{
return input instanceof TUnmodifiableFloatList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TIntList unmodifiableList(TIntList input)
{
return input instanceof TUnmodifiableIntList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TLongList unmodifiableList(TLongList input)
{
return input instanceof TUnmodifiableLongList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a list.
*
* @param input The input list.
* @return The unmodifiable list.
*/
public static TShortList unmodifiableList(TShortList input)
{
return input instanceof TUnmodifiableShortList ? input : TCollections.unmodifiableList(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TByteSet unmodifiableSet(TByteSet input)
{
return input instanceof TUnmodifiableByteSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TCharSet unmodifiableSet(TCharSet input)
{
return input instanceof TUnmodifiableCharSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TDoubleSet unmodifiableSet(TDoubleSet input)
{
return input instanceof TUnmodifiableDoubleSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TFloatSet unmodifiableSet(TFloatSet input)
{
return input instanceof TUnmodifiableFloatSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TIntSet unmodifiableSet(TIntSet input)
{
return input instanceof TUnmodifiableIntSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TLongSet unmodifiableSet(TLongSet input)
{
return input instanceof TUnmodifiableLongSet ? input : TCollections.unmodifiableSet(input);
}
/**
* Get an unmodifiable version of a set.
*
* @param input The input set.
* @return The unmodifiable set.
*/
public static TShortSet unmodifiableSet(TShortSet input)
{
return input instanceof TUnmodifiableShortSet ? input : TCollections.unmodifiableSet(input);
}
/** Disallow instantiation. */
private TroveUtilities()
{
}
}
|
<reponame>miguel76/sparql-net<filename>tests/templateFactory.test.ts
import { TemplateFactory, FlowEngine, Actions } from '../src/index'
import { newEngine } from '@comunica/actor-init-sparql-file'
const path = require('path')
const tf = new TemplateFactory({
prefixes: {
ex: 'http://example.org/',
rdf: 'http://www.w3.org/1999/02/22-rdf-syntax-ns#',
rdfs: 'http://www.w3.org/2000/01/rdf-schema#'
}
})
const ff = tf.flowFactory
// let engine = newEngine();
// let proxyEngine: IQueryEngine = {
// query: async (query: string | Algebra.Operation, queryContext: any) => {
// console.log('');
// console.log('Executing...');
// console.log(typeof query === 'string' ? query : toSparqlFragment(query));
// const res = <IActorQueryOperationOutputBindings>await engine.query(query, queryContext);
// console.log('Result variables :' + res.variables);
// return res;
// },
// getResultMediaTypes: function (context?: ActionContext): Promise<Record<string, number>> {
// throw new Error('Function not implemented.');
// },
// getResultMediaTypeFormats: function (context?: ActionContext): Promise<Record<string, string>> {
// throw new Error('Function not implemented.');
// },
// resultToString: function (queryResult: IActorQueryOperationOutput, mediaType?: string, context?: any) {
// throw new Error('Function not implemented.');
// },
// invalidateHttpCache: function (url?: string): Promise<any> {
// throw new Error('Function not implemented.');
// }
// };
const fe = new FlowEngine({
engine: newEngine(),
queryContext: {
sources: [path.join(__dirname, '../tests/test-data.ttl')]
}
})
const showBindings = ff.createActionExecutor(Actions.onAll((b) => b))
const showOneBinding = ff.createActionExecutor(Actions.onFirst((b) => b))
const showOne = ff.createActionExecutor(Actions.onFirstDefault((b) => b))
const flows = {
'action show bindings': showBindings,
'action show one set of bindings': showOneBinding,
'action show default bindings': showOne,
'undefined term reader': tf.createTermReader(),
'undefined value reader': tf.createValueReader(),
'single default binding show value': tf.createValues({
bindings: 'ex:Res1',
subflow: tf.createValueReader()
}),
'single default binding show term': tf.createValues({
bindings: 'ex:Res1',
subflow: tf.createTermReader()
}),
'single default binding show all': tf.createValues({
bindings: 'ex:Res1',
subflow: showBindings
}),
traversal: tf.createValues({
bindings: 'ex:Res1',
subflow: tf.createValueReader({ path: 'ex:prop1' })
}),
'traversal path': tf.createValues({
bindings: 'ex:Res1',
subflow: tf.createValueReader({ path: 'ex:prop1/ex:prop2' })
}),
'multiple default bindings': tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3'],
subflow: showBindings
}),
'empty parallel': ff.createParallel([]),
parallel: tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3'],
subflow: ff.createParallel([
tf.createValueReader(),
tf.createValueReader({ path: 'ex:prop1' }),
tf.createValueReader({ path: 'ex:prop2' }),
tf.createValueReader({ path: 'ex:prop3' })
])
}),
'foreach value reader': tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3', '"pippo"', '42', '3.14'],
subflow: tf.createForEach(tf.createValueReader())
}),
'foreach traversal value reader': tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3', '"pippo"', '42', '3.14'],
subflow: tf.createForEach({
select: { path: 'ex:prop1' },
subflow: tf.createValueReader()
})
}),
'foreach foreach string reader': tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3'],
subflow: tf.createForEach(
tf.createForEach({
select: { path: 'ex:prop1' },
subflow: tf.createStringReader()
})
)
}),
'foreach foreach value reader': tf.createValues({
bindings: ['ex:Res1', 'ex:Res2', 'ex:Res3'],
subflow: tf.createForEach(
tf.createForEach({
select: { path: 'ex:prop1' },
subflow: tf.createValueReader()
})
)
}),
'all triples foreach x 3': ff.createJoin({
input: '?s ?p ?o',
subflow: tf.createForEach({
select: ['?s'],
subflow: tf.createForEach({
select: ['?p'],
subflow: tf.createForEach({
select: ['?o'],
subflow: ff.createParallelDict({
s: tf.createValueReader({ var: '?s' }),
p: tf.createValueReader({ var: '?p' }),
o: tf.createValueReader({ var: '?o' })
})
})
})
})
}),
'all triples foreach x 2': ff.createJoin({
input: '?s ?p ?o',
subflow: tf.createForEach({
select: ['?s'],
subflow: tf.createForEach({
select: ['?p', '?o'],
subflow: ff.createParallelDict({
s: tf.createValueReader({ var: '?s' }),
p: tf.createValueReader({ var: '?p' }),
o: tf.createValueReader({ var: '?o' })
})
})
})
}),
'all triples foreach x 1': ff.createJoin({
input: '?s ?p ?o',
subflow: tf.createForEach({
select: ['?s', '?p', '?o'],
subflow: ff.createParallelDict({
s: tf.createValueReader({ var: '?s' }),
p: tf.createValueReader({ var: '?p' }),
o: tf.createValueReader({ var: '?o' })
})
})
}),
'all triples foreach x 1 implicit': ff.createJoin({
input: '?s ?p ?o',
subflow: tf.createForEach({
select: { allVars: true },
subflow: ff.createParallelDict({
s: tf.createValueReader({ var: '?s' }),
p: tf.createValueReader({ var: '?p' }),
o: tf.createValueReader({ var: '?o' })
})
})
})
}
// jest.setTimeout(60000);
Object.entries(flows).forEach(([label, flow]) => {
test('describe ' + label, () => expect(flow).toMatchSnapshot())
test('run ' + label, () => expect(fe.run(flow)).resolves.toMatchSnapshot())
})
// Object.entries(flows).forEach(([label, flow]) => {
// test('run ' + label, async () => {
// console.log(flow);
// let result = await te.run(flow);
// console.log(result);
// expect(result).toMatchSnapshot();
// });
// });
// Object.entries(flows).forEach(([label, flow]) => {
// test('run ' + label, done => {
// console.log(flow);
// te.run(flow).then(result => {
// expect(result).toMatchSnapshot();
// done();
// });
// });
// });
|
"""
Created on Dec 17, 2009
@author: barthelemy
"""
from __future__ import unicode_literals, absolute_import
import unittest
from py4j.compat import unicode
from py4j.java_gateway import JavaGateway, GatewayParameters
from py4j.protocol import Py4JJavaError, Py4JError
from py4j.tests.java_gateway_test import (
start_example_app_process, safe_shutdown, sleep)
def get_list(count):
return [unicode(i) for i in range(count)]
class AutoConvertTest(unittest.TestCase):
def setUp(self):
self.p = start_example_app_process()
self.gateway = JavaGateway(
gateway_parameters=GatewayParameters(auto_convert=True))
def tearDown(self):
safe_shutdown(self)
self.p.join()
sleep()
def testAutoConvert(self):
ex = self.gateway.getNewExample()
python_list = get_list(3)
java_list = ex.getList(3)
self.assertTrue(java_list.equals(python_list))
def testAutoConvertConstructor(self):
python_list = get_list(3)
java_list = self.gateway.jvm.java.util.ArrayList(python_list)
self.assertTrue(java_list.equals(python_list))
def testAutoConvertNotByteArray(self):
self.gateway.jvm.java.nio.ByteBuffer.wrap(bytearray(range(255)))
class ListTest(unittest.TestCase):
def setUp(self):
self.p = start_example_app_process()
self.gateway = JavaGateway()
def tearDown(self):
safe_shutdown(self)
self.p.join()
sleep()
def testJavaListProtocol(self):
ex = self.gateway.getNewExample()
pList = get_list(3)
jList = ex.getList(3)
pList.append("1")
jList.append("1")
pList.sort()
jList.sort()
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList.reverse()
jList.reverse()
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList.count("1"), jList.count("1"))
self.assertEqual(pList.count("2"), jList.count("2"))
self.assertEqual(pList.count("-1"), jList.count("-1"))
# Hack because this is a list of strings
self.assertEqual(max(pList), max(jList))
self.assertEqual(min(pList), min(jList))
def testJavaListProtocol2(self):
ex = self.gateway.entry_point.getNewExample()
pList = get_list(3)
pList2 = get_list(4)
jList = ex.getList(3)
jList2 = ex.getList(4)
pList3 = pList + pList2
jList3 = jList + jList2
self.assertEqual(len(pList3), len(jList3))
self.assertEqual(str(pList3), str(jList3))
pList3 = pList * 3
jList3 = jList * 3
self.assertEqual(len(pList3), len(jList3))
self.assertEqual(str(pList3), str(jList3))
pList3 = 3 * pList
jList3 = 3 * jList
self.assertEqual(len(pList3), len(jList3))
self.assertEqual(str(pList3), str(jList3))
pList3 = pList * 0
jList3 = jList * 0
self.assertEqual(len(pList3), len(jList3))
self.assertEqual(str(pList3), str(jList3))
pList += pList2
jList += jList2
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList2 *= 3
jList2 *= 3
self.assertEqual(len(pList2), len(jList2))
self.assertEqual(str(pList2), str(jList2))
pList2 *= -1
jList2 *= -1
self.assertEqual(len(pList2), len(jList2))
self.assertEqual(str(pList2), str(jList2))
def testJavaListGetSlice(self):
ex = self.gateway.getNewExample()
pList = get_list(5)
jList = ex.getList(5)
pSlice = pList[1:3]
jSlice = jList[1:3]
self.assertEqual(len(pSlice), len(jSlice))
self.assertEqual(str(pSlice), str(jSlice))
pSlice = pList[0:0]
jSlice = jList[0:0]
self.assertEqual(len(pSlice), len(jSlice))
self.assertEqual(str(pSlice), str(jSlice))
pSlice = pList[0:-2]
jSlice = jList[0:-2]
self.assertEqual(len(pSlice), len(jSlice))
self.assertEqual(str(pSlice), str(jSlice))
def testJavaListDelSlice(self):
ex = self.gateway.getNewExample()
pList = get_list(5)
jList = ex.getList(5)
del pList[1:3]
del jList[1:3]
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
def testJavaListSetSlice(self):
ex = self.gateway.getNewExample()
pList = get_list(6)
jList = ex.getList(6)
tList = ["500", "600"]
pList[0:0] = tList
jList[0:0] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList[1:2] = tList
jList[1:2] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList[3:5] = tList
jList[3:5] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList[1:5:2] = tList
jList[1:5:2] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList[0:4] = tList
jList[0:4] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList = get_list(6)
jList = ex.getList(6)
try:
pList[0:6:2] = tList
self.fail("Should have failed")
except ValueError:
self.assertTrue(True)
try:
jList[0:6:2] = tList
self.fail("Should have failed")
except ValueError:
self.assertTrue(True)
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList = get_list(6)
jList = ex.getList(6)
pList[100:100] = tList
jList[100:100] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList[1000:10000] = tList
jList[1000:10000] = tList
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
try:
self.assertTrue(jList.equals(pList))
self.fail("Should have failed")
except Exception:
self.assertTrue(True)
def testJavaList(self):
ex = self.gateway.getNewExample()
pList = get_list(3)
jList = ex.getList(3)
pList2 = get_list(3)
jList2 = ex.getList(3)
# Lists are not "hashable" in Python. Too bad.
# self.assertEqual(hash(pList),hash(pList2))
self.assertEqual(hash(jList), hash(jList2))
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList, pList2)
self.assertEqual(jList, jList2)
pList.append("4")
jList.append("4")
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList[0], jList[0])
self.assertEqual(pList[3], jList[3])
pList.extend(pList2)
jList.extend(jList2)
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual("1" in pList, "1" in jList)
self.assertEqual("500" in pList, "500" in jList)
pList[0] = "100"
jList[0] = "100"
pList[3] = "150"
jList[3] = "150"
pList[-1] = "200"
jList[-1] = "200"
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList.insert(0, "100")
jList.insert(0, "100")
pList.insert(3, "150")
jList.insert(3, "150")
pList.insert(-1, "200")
jList.insert(-1, "200")
pList.insert(len(pList), "300")
jList.insert(len(pList), "300")
pList.insert(300, "1500")
jList.insert(300, "1500")
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList.pop(), jList.pop())
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList.pop(-1), jList.pop(-1))
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
self.assertEqual(pList.pop(2), jList.pop(2))
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
del pList[0]
del jList[0]
del pList[-1]
del jList[-1]
del pList[1]
del jList[1]
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
pList.append("700")
jList.append("700")
pList.insert(0, "700")
jList.insert(0, "700")
pList.remove("700")
jList.remove("700")
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
try:
jList[15]
self.fail("Should Fail!")
except IndexError:
self.assertTrue(True)
def testRemove(self):
ex = self.gateway.getNewExample()
pList = get_list(3)
jList = ex.getList(3)
pList.append(10)
jList.append(10)
# If remove(10) was invoked on the Java side, this would not work!
# Instead, 10 has to be converted to an index...
pList.remove(10)
jList.remove(10)
self.assertEqual(len(pList), len(jList))
self.assertEqual(str(pList), str(jList))
def testBinaryOp(self):
ex = self.gateway.getNewExample()
pList = get_list(3)
jList = ex.getList(3)
jList2 = ex.getList(4)
self.assertEqual(jList, jList)
self.assertNotEqual(jList, jList2)
# self.assertLess(jList, jList2)
self.assertNotEqual(jList, pList)
# self.assertEqual(jList, pList)
# self.assertNotEqual(jList2, pList)
# self.assertGreater(jList2, pList)
def testException(self):
ex = self.gateway.getNewExample()
jList = ex.getList(1)
try:
jList.get(5)
except Py4JJavaError:
self.assertTrue(True)
except Py4JError:
self.fail()
if __name__ == "__main__":
unittest.main()
|
<gh_stars>1-10
package db_test
import (
"example/internal/tester"
"example/users/db"
"example/users/entities"
"github.com/stretchr/testify/suite"
"testing"
)
type RoleRepositoryTest struct {
suite.Suite
tester.Integration
roles db.RoleRepository
}
func TestRoleRepository(t *testing.T) {
suite.Run(t, new(RoleRepositoryTest))
}
func (s *RoleRepositoryTest) SetupTest() {
s.Integration.SetupTest()
s.roles = s.Get("role-repository").(db.RoleRepository)
}
func (s *RoleRepositoryTest) TestPersistRoles() {
r := entities.CreateRole("User")
err := s.roles.Persist(r)
s.NoError(err)
roles, err := s.roles.Find(r.ID)
s.NoError(err)
s.Len(roles, 1)
role := roles[0]
s.Equal(role.ID, r.ID)
s.Len(role.Scopes(), 3)
}
func (s *RoleRepositoryTest) TestFindMissingRoles() {
roles, err := s.roles.Find(entities.NewRoleID("king"))
s.NoError(err)
s.Len(roles, 0)
}
|
<gh_stars>0
package handlers
import (
"bytes"
"io/ioutil"
"log"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/facebookgo/inject"
"github.com/nicholasjackson/sorcery/entities"
"github.com/nicholasjackson/sorcery/global"
"github.com/nicholasjackson/sorcery/mocks"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
type RegisterTestDependencies struct {
StatsMock *mocks.MockStatsD `inject:"statsd"`
DalMock *mocks.MockDal `inject:"dal"`
}
var mockRegisterDeps *RegisterTestDependencies
func SetupRegisterTest(t *testing.T) {
RegisterHandlerDependencies = &RegisterDependencies{}
mockRegisterDeps = &RegisterTestDependencies{}
statsDMock := &mocks.MockStatsD{}
dalMock := &mocks.MockDal{}
_ = global.SetupInjection(
&inject.Object{Value: RegisterHandlerDependencies},
&inject.Object{Value: mockRegisterDeps},
&inject.Object{Value: log.New(os.Stdout, "tester", log.Lshortfile)},
&inject.Object{Value: statsDMock, Name: "statsd"},
&inject.Object{Value: dalMock, Name: "dal"},
)
mockRegisterDeps.StatsMock.Mock.On("Increment", mock.Anything).Return()
mockRegisterDeps.DalMock.Mock.On("UpsertRegistration", mock.Anything).Return(nil, nil)
mockRegisterDeps.DalMock.Mock.On("DeleteRegistration", mock.Anything).Return(nil, nil)
}
func TestRegisterCreateCallsStatsD(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(""))
RegisterCreateHandler(&responseRecorder, &request)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+CALLED)
}
func TestRegisterCreateWithNoPayloadReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(""))
RegisterCreateHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+BAD_REQUEST)
}
func TestRegisterCreateWithNoEventNameReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"callback_url": "dfdffd"
}`))
RegisterCreateHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+BAD_REQUEST)
}
func TestRegisterCreateWithNoCallbackUrlReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "dfdffd"
}`))
RegisterCreateHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+BAD_REQUEST)
}
func TestRegisterCreateWithValidRequestSavesDataWhenRegistrationDoesNotExist(t *testing.T) {
SetupRegisterTest(t)
mockRegisterDeps.DalMock.Mock.On("GetRegistrationByEventAndCallback", "event.something", "http://some_callback_url.com").Return(nil, nil)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "event.something",
"callback_url": "http://some_callback_url.com"
}`))
RegisterCreateHandler(&responseRecorder, &request)
mockRegisterDeps.DalMock.Mock.AssertNumberOfCalls(t, "UpsertRegistration", 1)
assert.Equal(t, 200, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+SUCCESS)
}
func TestRegisterCreateWithValidRequestCreatesValidRegistration(t *testing.T) {
SetupRegisterTest(t)
mockRegisterDeps.DalMock.Mock.On("GetRegistrationByEventAndCallback", "event.something", "http://some_callback_url.com").Return(nil, nil)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "event.something",
"callback_url": "http://some_callback_url.com"
}`))
RegisterCreateHandler(&responseRecorder, &request)
registration := mockRegisterDeps.DalMock.UpsertObject
assert.NotZero(t, registration.Id)
assert.Equal(t, "event.something", registration.EventName)
assert.Equal(t, "http://some_callback_url.com", registration.CallbackUrl)
}
func TestRegisterCreateWithValidRequestDoesNotSaveDataWhenRegistrationExists(t *testing.T) {
SetupRegisterTest(t)
mockRegisterDeps.DalMock.Mock.On("GetRegistrationByEventAndCallback", "event.something", "http://some_callback_url.com").Return(&entities.Registration{}, nil)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "event.something",
"callback_url": "http://some_callback_url.com"
}`))
RegisterCreateHandler(&responseRecorder, &request)
mockRegisterDeps.DalMock.Mock.AssertNumberOfCalls(t, "UpsertRegistration", 0)
assert.Equal(t, 304, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+POST+NOT_FOUND)
}
func TestRegisterDeleteCallsStatsD(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(""))
RegisterDeleteHandler(&responseRecorder, &request)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+CALLED)
}
func TestRegisterDeleteWithNoPayloadReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(""))
RegisterDeleteHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+BAD_REQUEST)
}
func TestRegisterDeleteWithNoEventNameReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"callback_url": "dfdffd"
}`))
RegisterDeleteHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+BAD_REQUEST)
}
func TestRegisterDeleteWithNoCallbackUrlReturnsBadRequest(t *testing.T) {
SetupRegisterTest(t)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "dfdffd"
}`))
RegisterDeleteHandler(&responseRecorder, &request)
assert.Equal(t, 400, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+BAD_REQUEST)
}
func TestRegisterDeleteWithValidRequestReturns404WhenRegistrationDoesNotExist(t *testing.T) {
SetupRegisterTest(t)
mockRegisterDeps.DalMock.Mock.On(
"GetRegistrationByEventAndCallback",
"event.something",
"http://some_callback_url.com").Return(nil, nil)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "event.something",
"callback_url": "http://some_callback_url.com"
}`))
RegisterDeleteHandler(&responseRecorder, &request)
mockRegisterDeps.DalMock.Mock.AssertNumberOfCalls(t, "DeleteRegistration", 0)
assert.Equal(t, 304, responseRecorder.Code)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+NOT_FOUND)
}
func TestRegisterDeleteWithValidRequestDeletesRegistration(t *testing.T) {
SetupRegisterTest(t)
registration := &entities.Registration{}
mockRegisterDeps.DalMock.Mock.On(
"GetRegistrationByEventAndCallback",
"event.something",
"http://some_callback_url.com").Return(registration, nil)
var responseRecorder httptest.ResponseRecorder
var request http.Request
request.Body = ioutil.NopCloser(bytes.NewBufferString(`
{
"event_name": "event.something",
"callback_url": "http://some_callback_url.com"
}`))
RegisterDeleteHandler(&responseRecorder, &request)
assert.Equal(t, 200, responseRecorder.Code)
mockRegisterDeps.DalMock.Mock.AssertCalled(t, "DeleteRegistration", registration)
mockRegisterDeps.StatsMock.Mock.AssertCalled(t, "Increment", REGISTER_HANDLER+DELETE+SUCCESS)
}
|
package com.foxconn.iot.sso.dao.impl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import com.foxconn.iot.sso.dao.UserDao;
import com.foxconn.iot.sso.mapper.UserMapper;
import com.foxconn.iot.sso.model.User;
@Repository
public class UserDaoImpl implements UserDao {
@Autowired
private UserMapper userMapper;
@Override
public User findByNO(String no) {
return userMapper.findByNO(no);
}
}
|
#!/bin/sh
# If you would like to do some extra provisioning you may
# add any commands you wish to this file and they will
# be run after the Homestead machine is provisioned.
cd /home/vagrant/code
# Set up the code style checking pre-commit hook
cp githooks/pre-commit .git/hooks/pre-commit
cp githooks/config-pre-commit .git/hooks/config-pre-commit
# Set up post-merge and post-checkout hooks to manage composer and npm dependencies
cp githooks/manage-dependencies .git/hooks/manage-dependencies
cp githooks/post-checkout .git/hooks/post-checkout
cp githooks/post-merge .git/hooks/post-merge
# Make sure composer is completely up to date
sudo composer self-update
sudo apt update
yes | sudo apt install htop
yes | sudo apt autoremove
composer install
# Generate the application security key
php artisan key:generate
# Reset the database and seeds
php artisan migrate:refresh --seed
# Install the socket.io server globally for laravel echo
sudo npm install -g --unsafe-perm laravel-echo-server
# Make sure all npm packages are installed
npm install
# Run the front end assets
npm run dev
# Set up supervisor for the echo server and horizon queue manager
sudo cp supervisor/horizon.conf /etc/supervisor/conf.d/horizon.conf
sudo cp supervisor/echo-server.conf /etc/supervisor/conf.d/echo-server.conf
sudo cp supervisor/npm-watch.conf /etc/supervisor/conf.d/npm-watch.conf
sudo supervisorctl reread
sudo supervisorctl update
cd
|
#!/bin/bash
echo "ESLint running for staged files..."
# from https://eslint.org/docs/user-guide/integrations#source-control - Git pre-commit hook that only lints staged changes
fileList=$(git diff --diff-filter=d --cached --name-only | grep -E '\.js$')
if [ ${#fileList} -lt 1 ]; then
echo -e "You have no staged files to test\n"
exit
fi
npx eslint ${fileList[*]} "$@"
if [ $? -ne 0 ]; then
echo -e "\nPlease fix the above linting issues before committing.\n"
exit 1
fi
|
import {SET_MODULES} from "../types";
export const getModules = (params = {}) => {
return (dispatch) => {
axios.get("/api/modules", {
params: params
}).then(response => {
dispatch({
type: SET_MODULES,
payload: response.data
});
});
}
};
export const setModules = (data) => {
return (dispatch) => {
dispatch({
type: SET_MODULES,
payload: data
});
}
};
|
def format_config(config: dict) -> str:
delta = config['CONF_DELTA']
name = config['CONF_NAME']
country = config['CONF_COUNTRY']
if delta > 60:
return f"{name} will be traveling to {country}"
else:
return f"{name} is currently in {country}"
|
<filename>src/main/java/app/habitzl/elasticsearch/status/monitor/tool/client/data/node/NodeInfo.java<gh_stars>1-10
package app.habitzl.elasticsearch.status.monitor.tool.client.data.node;
import javax.annotation.concurrent.Immutable;
import java.io.Serializable;
import java.util.Objects;
import java.util.StringJoiner;
@Immutable
public final class NodeInfo implements Serializable {
private static final long serialVersionUID = 2L;
private final String nodeId;
private final String nodeName;
private final String processId;
private final String jvmVersion;
private final String elasticsearchVersion;
private final boolean isMasterNode;
private final boolean isMasterEligibleNode;
private final boolean isDataNode;
private final boolean isIngestNode;
private final EndpointInfo endpointInfo;
private final NodeStats nodeStats;
public NodeInfo(
final String nodeId,
final String nodeName,
final String processId,
final String jvmVersion,
final String elasticsearchVersion,
final boolean isMasterNode,
final boolean isMasterEligibleNode,
final boolean isDataNode,
final boolean isIngestNode,
final EndpointInfo endpointInfo,
final NodeStats nodeStats) {
this.nodeId = nodeId;
this.nodeName = nodeName;
this.processId = processId;
this.jvmVersion = jvmVersion;
this.elasticsearchVersion = elasticsearchVersion;
this.isMasterNode = isMasterNode;
this.isMasterEligibleNode = isMasterEligibleNode;
this.isDataNode = isDataNode;
this.isIngestNode = isIngestNode;
this.endpointInfo = endpointInfo;
this.nodeStats = nodeStats;
}
public String getNodeId() {
return nodeId;
}
public String getNodeName() {
return nodeName;
}
public String getProcessId() {
return processId;
}
public String getJvmVersion() {
return jvmVersion;
}
public String getElasticsearchVersion() {
return elasticsearchVersion;
}
public boolean isMasterNode() {
return isMasterNode;
}
public boolean isMasterEligibleNode() {
return isMasterEligibleNode;
}
public boolean isDataNode() {
return isDataNode;
}
public boolean isIngestNode() {
return isIngestNode;
}
public EndpointInfo getEndpointInfo() {
return endpointInfo;
}
public NodeStats getNodeStats() {
return nodeStats;
}
@Override
@SuppressWarnings("CyclomaticComplexity")
public boolean equals(final Object o) {
boolean isEqual;
if (this == o) {
isEqual = true;
} else if (o == null || getClass() != o.getClass()) {
isEqual = false;
} else {
NodeInfo nodeInfo = (NodeInfo) o;
isEqual = Objects.equals(isMasterNode, nodeInfo.isMasterNode)
&& Objects.equals(isMasterEligibleNode, nodeInfo.isMasterEligibleNode)
&& Objects.equals(isDataNode, nodeInfo.isDataNode)
&& Objects.equals(isIngestNode, nodeInfo.isIngestNode)
&& Objects.equals(nodeId, nodeInfo.nodeId)
&& Objects.equals(nodeName, nodeInfo.nodeName)
&& Objects.equals(processId, nodeInfo.processId)
&& Objects.equals(jvmVersion, nodeInfo.jvmVersion)
&& Objects.equals(elasticsearchVersion, nodeInfo.elasticsearchVersion)
&& Objects.equals(endpointInfo, nodeInfo.endpointInfo)
&& Objects.equals(nodeStats, nodeInfo.nodeStats);
}
return isEqual;
}
@Override
public int hashCode() {
return Objects.hash(
nodeId,
nodeName,
processId,
jvmVersion,
elasticsearchVersion,
isMasterNode,
isMasterEligibleNode,
isDataNode,
isIngestNode,
endpointInfo,
nodeStats
);
}
@Override
public String toString() {
return new StringJoiner(", ", NodeInfo.class.getSimpleName() + "[", "]")
.add("nodeId='" + nodeId + "'")
.add("nodeName='" + nodeName + "'")
.add("processId='" + processId + "'")
.add("jvmVersion='" + jvmVersion + "'")
.add("elasticsearchVersion='" + elasticsearchVersion + "'")
.add("isMasterNode=" + isMasterNode)
.add("isMasterEligibleNode=" + isMasterEligibleNode)
.add("isDataNode=" + isDataNode)
.add("isIngestNode=" + isIngestNode)
.add("endpointInfo=" + endpointInfo)
.add("nodeStats=" + nodeStats)
.toString();
}
}
|
#!/bin/bash
dieharder -d 4 -g 400 -S 2943218124
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2501-1
#
# Security announcement date: 2012-06-24 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:25 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - xen:4.0.1-5.2
#
# Last versions recommanded by security team:
# - xen:4.0.1-5.2
#
# CVE List:
# - CVE-2012-0217
# - CVE-2012-0218
# - CVE-2012-2934
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade xen=4.0.1-5.2 -y
|
var path = require('path')
var express = require('express')
var cookieParser = require('cookie-parser');
var session = require('cookie-session');
var bodyParser = require('body-parser');
var methodOverride = require('method-override')
var url = require("url")
var onHeaders = require('on-headers')
module.exports = function (app) {
// the code for weapp get sessionid
// the code run before session({keys......})
app.use(
function (req, res, next) {
onHeaders(res, function setHeaders ( ) {
//return sessionid
if(url.parse(res.req.url).pathname === '/api/weapplogin') {
res.json(res._headers)
}
})
next();
}
)
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.query());
app.use(bodyParser.urlencoded({'extended':'true'}));
app.use(bodyParser.json({type:'application/vnd.api+json'}));
app.use(methodOverride());
app.use(session({keys: ['jsadmin1', 'jsadmin2', '...'],cookie:{maxAge:3600000000}}));
// 1000 hour
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Credentials",true);
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
}
|
#!/bin/bash
# This script will build the project.
export GRADLE_OPTS="-Xmx1g -Xms1g"
if [ "$TRAVIS_PULL_REQUEST" != "false" ]; then
echo -e "Build Pull Request #$TRAVIS_PULL_REQUEST => Branch [$TRAVIS_BRANCH]"
./gradlew -Prelease.useLastTag=true -Pskip.loadtest=true build
elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" == "" ]; then
echo -e 'Build Branch with Snapshot => Branch ['$TRAVIS_BRANCH']'
./gradlew -Prelease.travisci=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -Pskip.loadtest=true build snapshot --stacktrace
elif [ "$TRAVIS_PULL_REQUEST" == "false" ] && [ "$TRAVIS_TAG" != "" ]; then
echo -e 'Build Branch for Release => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG']'
case "$TRAVIS_TAG" in
version-*)
;; # Ignore Spinnaker product release tags.
*-rc\.*)
./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -Pskip.loadtest=true candidate --stacktrace
;;
*)
./gradlew -Prelease.travisci=true -Prelease.useLastTag=true -PbintrayUser="${bintrayUser}" -PbintrayKey="${bintrayKey}" -Pskip.loadtest=true final --stacktrace
;;
esac
else
echo -e 'WARN: Should not be here => Branch ['$TRAVIS_BRANCH'] Tag ['$TRAVIS_TAG'] Pull Request ['$TRAVIS_PULL_REQUEST']'
./gradlew -Prelease.useLastTag=true -Pskip.loadtest=true build
fi
|
#!/bin/bash
run_inference() {
bit_config=$1
num_layers=$2
printf "%s\n" $bit_config
python test_resnet_inference_time.py --bit-config $bit_config --num-layers $num_layers
cp ./debug_output/resnet_generated.cu ./debug_output/resnet_manual.cu
sed -i 's/h_w_fused_n_fused_i_fused_nn_fused_ii_fused_inner < 8;/h_w_fused_n_fused_i_fused_nn_fused_ii_fused_inner < 1;/g' ./debug_output/resnet_manual.cu
sed -i 's/ax0_ax1_fused_ax2_fused_ax3_fused_inner < 8;/ax0_ax1_fused_ax2_fused_ax3_fused_inner < 1;/g' ./debug_output/resnet_manual.cu
sleep 5
python test_resnet_inference_time.py --bit-config $bit_config --num-layers $num_layers --manual-code
}
run_inference "bit_config_resnet18_bops_0.75" 18
run_inference "bit_config_resnet18_bops_0.5" 18
run_inference "bit_config_resnet18_bops_0.25" 18
run_inference "bit_config_resnet18_latency_0.75" 18
run_inference "bit_config_resnet18_latency_0.5" 18
run_inference "bit_config_resnet18_latency_0.25" 18
run_inference "bit_config_resnet18_modelsize_0.75" 18
run_inference "bit_config_resnet18_modelsize_0.5" 18
run_inference "bit_config_resnet18_modelsize_0.25" 18
run_inference "bit_config_resnet50_bops_0.75" 50
run_inference "bit_config_resnet50_bops_0.5" 50
run_inference "bit_config_resnet50_bops_0.25" 50
run_inference "bit_config_resnet50_latency_0.75" 50
run_inference "bit_config_resnet50_latency_0.5" 50
run_inference "bit_config_resnet50_latency_0.25" 50
run_inference "bit_config_resnet50_modelsize_0.75" 50
run_inference "bit_config_resnet50_modelsize_0.5" 50
run_inference "bit_config_resnet50_modelsize_0.25" 50
|
docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.8.0
|
<reponame>gbtunze/sesegpu
#include <cnpy.h>
#include <stdio.h>
#include <wordexp.h>
#include <experimental/mdspan>
namespace stdex = std::experimental;
constexpr int side = 28;
using mnisttype = stdex::basic_mdspan<uint8_t, stdex::extents<stdex::dynamic_extent, side, side> >;
// Global scope to keep
cnpy::NpyArray x_test;
mnisttype getdata()
{
wordexp_t* exp = new wordexp_t;
if (wordexp("~/.keras/datasets/mnist.npz", exp, WRDE_NOCMD) != 0 || exp->we_wordc != 1)
{
fprintf(stderr, "Error expanding.\n");
std::terminate();
}
fprintf(stderr, "Loading file %s\n", exp->we_wordv[0]);
auto inputdata = cnpy::npz_load(exp->we_wordv[0]);
wordfree(exp);
x_test = inputdata["x_test"];
return mnisttype(x_test.data<uint8_t>(), (int) x_test.shape[0]);
}
const mnisttype data = getdata();
|
# content of doc/autogen.py
from keras_autodoc import DocumentationGenerator
pages = {
"layers/core.md": ["keras.layers.Dense", "keras.layers.Flatten"],
"callbacks.md": ["keras.callbacks.TensorBoard"],
}
doc_generator = DocumentationGenerator(pages)
doc_generator.generate("./sources")
|
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#include "pal_bio.h"
#include <assert.h>
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" BIO* CreateMemoryBio()
{
return CryptoNative_CreateMemoryBio();
}
extern "C" BIO* CryptoNative_CreateMemoryBio()
{
return BIO_new(BIO_s_mem());
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" BIO* BioNewFile(const char* filename, const char* mode)
{
return CryptoNative_BioNewFile(filename, mode);
}
extern "C" BIO* CryptoNative_BioNewFile(const char* filename, const char* mode)
{
return BIO_new_file(filename, mode);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t BioDestroy(BIO* a)
{
return CryptoNative_BioDestroy(a);
}
extern "C" int32_t CryptoNative_BioDestroy(BIO* a)
{
return BIO_free(a);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t BioGets(BIO* b, char* buf, int32_t size)
{
return CryptoNative_BioGets(b, buf, size);
}
extern "C" int32_t CryptoNative_BioGets(BIO* b, char* buf, int32_t size)
{
return BIO_gets(b, buf, size);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t BioRead(BIO* b, void* buf, int32_t len)
{
return CryptoNative_BioRead(b, buf, len);
}
extern "C" int32_t CryptoNative_BioRead(BIO* b, void* buf, int32_t len)
{
return BIO_read(b, buf, len);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t BioWrite(BIO* b, const void* buf, int32_t len)
{
return CryptoNative_BioWrite(b, buf, len);
}
extern "C" int32_t CryptoNative_BioWrite(BIO* b, const void* buf, int32_t len)
{
return BIO_write(b, buf, len);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t GetMemoryBioSize(BIO* bio)
{
return CryptoNative_GetMemoryBioSize(bio);
}
extern "C" int32_t CryptoNative_GetMemoryBioSize(BIO* bio)
{
long ret = BIO_get_mem_data(bio, nullptr);
// BIO_get_mem_data returns the memory size, which will always be
// an int32.
assert(ret <= INT32_MAX);
return static_cast<int32_t>(ret);
}
// TODO: temporarily keeping the un-prefixed signature of this method
// to keep tests running in CI. This will be removed once the managed assemblies
// are synced up with the native assemblies.
extern "C" int32_t BioCtrlPending(BIO* bio)
{
return CryptoNative_BioCtrlPending(bio);
}
extern "C" int32_t CryptoNative_BioCtrlPending(BIO* bio)
{
size_t result = BIO_ctrl_pending(bio);
assert(result <= INT32_MAX);
return static_cast<int32_t>(result);
}
|
#ifndef LITE_PACK_NUMBER_H
#define LITE_PACK_NUMBER_H
#include <stdint.h>
union __lip_num
{
int i;
unsigned u;
};
static inline union __lip_num __lip_num_int(int x)
{
return (union __lip_num){.i = x};
}
static inline union __lip_num __lip_num_unsigned(unsigned x)
{
return (union __lip_num){.u = x};
}
union __lip_num8
{
unsigned char c[1];
int8_t i;
uint8_t u;
};
static inline union __lip_num8 __lip_num_uchar8(unsigned char const x[])
{
return (union __lip_num8){.c = {x[0]}};
}
static inline union __lip_num8 __lip_num_i8(int8_t x)
{
return (union __lip_num8){.i = x};
}
static inline union __lip_num8 __lip_num_u8(uint8_t x)
{
return (union __lip_num8){.u = x};
}
union __lip_num16
{
unsigned char c[2];
int16_t i;
uint16_t u;
};
static inline union __lip_num16 __lip_num_uchar16(unsigned char const x[])
{
return (union __lip_num16){.c = {x[0], x[1]}};
}
static inline union __lip_num16 __lip_num_i16(int16_t x)
{
return (union __lip_num16){.i = x};
}
static inline union __lip_num16 __lip_num_u16(uint16_t x)
{
return (union __lip_num16){.u = x};
}
union __lip_num32
{
unsigned char c[4];
int32_t i;
uint32_t u;
float f;
};
static inline union __lip_num32 __lip_num_uchar32(unsigned char const x[])
{
return (union __lip_num32){.c = {x[0], x[1], x[2], x[3]}};
}
static inline union __lip_num32 __lip_num_i32(int32_t x)
{
return (union __lip_num32){.i = x};
}
static inline union __lip_num32 __lip_num_u32(uint32_t x)
{
return (union __lip_num32){.u = x};
}
static inline union __lip_num32 __lip_num_f32(float x)
{
return (union __lip_num32){.f = x};
}
union __lip_num64
{
unsigned char c[8];
int64_t i;
uint64_t u;
double f;
};
static inline union __lip_num64 __lip_num_uchar64(unsigned char const x[])
{
return (union __lip_num64){
.c = {x[0], x[1], x[2], x[3], x[4], x[5], x[6], x[7]}};
}
static inline union __lip_num64 __lip_num_i64(int64_t x)
{
return (union __lip_num64){.i = x};
}
static inline union __lip_num64 __lip_num_u64(uint64_t x)
{
return (union __lip_num64){.u = x};
}
static inline union __lip_num64 __lip_num_f64(double x)
{
return (union __lip_num64){.f = x};
}
#define __LIP_NUM(x) \
_Generic((x), int : __lip_num_int, unsigned : __lip_num_unsigned)(x)
#define __LIP_NUM8(x) \
_Generic((x), unsigned char* \
: __lip_num_uchar8, unsigned char const* \
: __lip_num_uchar8, int8_t \
: __lip_num_i8, uint8_t \
: __lip_num_u8)(x)
#define __LIP_NUM16(x) \
_Generic((x), unsigned char* \
: __lip_num_uchar16, unsigned char const* \
: __lip_num_uchar16, int16_t \
: __lip_num_i16, uint16_t \
: __lip_num_u16)(x)
#define __LIP_NUM32(x) \
_Generic((x), unsigned char* \
: __lip_num_uchar32, unsigned char const* \
: __lip_num_uchar32, int32_t \
: __lip_num_i32, uint32_t \
: __lip_num_u32, float \
: __lip_num_f32)(x)
#define __LIP_NUM64(x) \
_Generic((x), unsigned char* \
: __lip_num_uchar64, unsigned char const* \
: __lip_num_uchar64, int64_t \
: __lip_num_i64, uint64_t \
: __lip_num_u64, double \
: __lip_num_f64)(x)
#endif
|
<reponame>segmentio/localstorage-retry<filename>lib/schedule.js
'use strict';
var each = require('@ndhoule/each');
var CLOCK_LATE_FACTOR = 2;
var defaultClock = {
setTimeout: function(fn, ms) {
return window.setTimeout(fn, ms);
},
clearTimeout: function(id) {
return window.clearTimeout(id);
},
Date: window.Date
};
var clock = defaultClock;
var modes = {
ASAP: 1,
RESCHEDULE: 2,
ABANDON: 3
};
function Schedule() {
this.tasks = {};
this.nextId = 1;
}
Schedule.prototype.now = function() {
return +new clock.Date();
};
Schedule.prototype.run = function(task, timeout, mode) {
var id = this.nextId++;
this.tasks[id] = clock.setTimeout(this._handle(id, task, timeout, mode || modes.ASAP), timeout);
return id;
};
Schedule.prototype.cancel = function(id) {
if (this.tasks[id]) {
clock.clearTimeout(this.tasks[id]);
delete this.tasks[id];
}
};
Schedule.prototype.cancelAll = function() {
each(clock.clearTimeout, this.tasks);
this.tasks = {};
};
Schedule.prototype._handle = function(id, callback, timeout, mode) {
var self = this;
var start = self.now();
return function() {
delete self.tasks[id];
if (mode >= modes.RESCHEDULE && start + timeout * CLOCK_LATE_FACTOR < self.now()) {
if (mode === modes.RESCHEDULE) {
self.run(callback, timeout, mode);
}
return;
}
return callback();
};
};
Schedule.setClock = function(newClock) {
clock = newClock;
};
Schedule.resetClock = function() {
clock = defaultClock;
};
Schedule.Modes = modes;
module.exports = Schedule;
|
<gh_stars>10-100
// Copyright (c) 2022 <NAME>. All Rights Reserved.
// https://github.com/cinar/indicatorts
import {
add,
changes,
divide,
divideBy,
substract,
} from '../../helper/numArray';
import { sma } from '../trend/sma';
/**
* Default period for EMV.
*/
export const EMV_DEFAULT_PERIOD = 14;
/**
* The Ease of Movement (EMV) is a volume based oscillator measuring
* the ease of price movement.
*
* Distance Moved = ((High + Low) / 2) - ((Priod High + Prior Low) /2)
* Box Ratio = ((Volume / 100000000) / (High - Low))
* EMV(1) = Distance Moved / Box Ratio
* EMV(14) = SMA(14, EMV(1))
*
* @param period window period.
* @param highs high values.
* @param lows low values.
* @param volumes volume values.
* @return ease of movement values.
*/
export function easeOfMovement(
period: number,
highs: number[],
lows: number[],
volumes: number[]
): number[] {
const distanceMoved = changes(1, divideBy(2, add(highs, lows)));
const boxRatio = divide(divideBy(100000000, volumes), substract(highs, lows));
const emv = sma(period, divide(distanceMoved, boxRatio));
return emv;
}
/**
* The default Ease of Movement with the default period of 14.
*
* @param highs high values.
* @param lows low values.
* @param volumes volume values.
* @return ease of movement values.
*/
export function defaultEaseOfMovement(
highs: number[],
lows: number[],
volumes: number[]
): number[] {
return easeOfMovement(EMV_DEFAULT_PERIOD, highs, lows, volumes);
}
|
@test "no failure prints no output" {
run echo success
}
bats_require_minimum_version 1.5.0 # don't be fooled by order, this will run before the test above!
@test "failure prints output" {
run -1 echo "fail hard"
}
@test "empty output on failure" {
false
}
|
<reponame>goistjt/CSSE490-Hadoop
package edu.rosehulman.goistjt;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.apache.hadoop.io.Text;
public final class Upper extends UDF {
public Text evaluate(final Text t) {
if (t == null) {
return null;
}
return new Text(t.toString().toUpperCase());
}
}
|
package com.netflix.dyno.contrib;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.netflix.dyno.connectionpool.impl.CountingConnectionPoolMonitor;
import com.netflix.servo.DefaultMonitorRegistry;
import com.netflix.servo.annotations.DataSourceType;
import com.netflix.servo.annotations.Monitor;
import com.netflix.servo.monitor.Monitors;
public class DynoCPMonitor extends CountingConnectionPoolMonitor {
private static final Logger Logger = LoggerFactory.getLogger(DynoCPMonitor.class);
public DynoCPMonitor(String namePrefix) {
try {
DefaultMonitorRegistry.getInstance().register(Monitors.newObjectMonitor(namePrefix, this));
} catch (Exception e) {
Logger.warn("Failed to register metrics with monitor registry", e);
}
}
@Monitor(name = "OperationSuccess", type = DataSourceType.COUNTER)
@Override
public long getOperationSuccessCount() {
return super.getOperationSuccessCount();
}
@Monitor(name = "OperationFailure", type = DataSourceType.COUNTER)
@Override
public long getOperationFailureCount() {
return super.getOperationFailureCount();
}
@Monitor(name = "ConnectionCreated", type = DataSourceType.COUNTER)
@Override
public long getConnectionCreatedCount() {
return super.getConnectionCreatedCount();
}
@Monitor(name = "ConnectionClosed", type = DataSourceType.COUNTER)
@Override
public long getConnectionClosedCount() {
return super.getConnectionClosedCount();
}
@Monitor(name = "ConnectionCreateFailed", type = DataSourceType.COUNTER)
@Override
public long getConnectionCreateFailedCount() {
return super.getConnectionCreateFailedCount();
}
@Monitor(name = "ConnectionBorrowed", type = DataSourceType.COUNTER)
@Override
public long getConnectionBorrowedCount() {
return super.getConnectionBorrowedCount();
}
@Monitor(name = "ConnectionReturned", type = DataSourceType.COUNTER)
@Override
public long getConnectionReturnedCount() {
return super.getConnectionReturnedCount();
}
@Monitor(name = "PoolExhausted", type = DataSourceType.COUNTER)
@Override
public long getPoolExhaustedTimeoutCount() {
return super.getPoolExhaustedTimeoutCount();
}
@Monitor(name = "SocketTimeout", type = DataSourceType.COUNTER)
@Override
public long getSocketTimeoutCount() {
return super.getSocketTimeoutCount();
}
@Monitor(name = "OperationTimeout", type = DataSourceType.COUNTER)
@Override
public long getOperationTimeoutCount() {
return super.getOperationTimeoutCount();
}
@Monitor(name = "NumFailover", type = DataSourceType.COUNTER)
@Override
public long getFailoverCount() {
return super.getFailoverCount();
}
@Monitor(name = "ConnectionBusy", type = DataSourceType.COUNTER)
@Override
public long getNumBusyConnections() {
return super.getNumBusyConnections();
}
@Monitor(name = "ConnectionOpen", type = DataSourceType.COUNTER)
@Override
public long getNumOpenConnections() {
return super.getNumOpenConnections();
}
@Monitor(name = "NoHostCount", type = DataSourceType.COUNTER)
@Override
public long getNoHostCount() {
return super.getNoHostCount();
}
@Monitor(name = "UnknownError", type = DataSourceType.COUNTER)
@Override
public long getUnknownErrorCount() {
return super.getUnknownErrorCount();
}
@Monitor(name = "BadRequest", type = DataSourceType.COUNTER)
@Override
public long getBadRequestCount() {
return super.getBadRequestCount();
}
@Monitor(name = "HostCount", type = DataSourceType.GAUGE)
@Override
public long getHostCount() {
return super.getHostCount();
}
@Monitor(name = "HostUpCount", type = DataSourceType.GAUGE)
@Override
public long getHostUpCount() {
return super.getHostUpCount();
}
@Monitor(name = "HostDownCount", type = DataSourceType.GAUGE)
@Override
public long getHostDownCount() {
return super.getHostDownCount();
}
}
|
<reponame>jimmidyson/pemtokeystore
// Copyright 2016 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pemtokeystore
import (
"crypto/x509"
"encoding/pem"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
keystore "github.com/pavel-v-chernykh/keystore-go"
)
const (
DefaultKeystorePassword = "<PASSWORD>"
)
type Options struct {
PrivateKeyFiles map[string]string
CertFiles map[string]string
CACertFiles []string
CACertDirs []string
KeystorePath string
KeystorePassword string
SourceKeystorePath string
SourceKeystorePassword string
}
func CreateKeystore(opts Options) error {
if len(opts.KeystorePath) == 0 {
return fmt.Errorf("Missing keystore path")
}
keystorePassword := []byte(opts.KeystorePassword)
if len(keystorePassword) == 0 {
keystorePassword = []byte(DefaultKeystorePassword)
}
var ks keystore.KeyStore
if len(opts.SourceKeystorePath) > 0 {
sourceKeystorePassword := []byte(opts.SourceKeystorePassword)
if len(keystorePassword) == 0 {
sourceKeystorePassword = []byte(DefaultKeystorePassword)
}
sourceKs, err := readKeyStore(opts.SourceKeystorePath, sourceKeystorePassword)
if err != nil {
return err
}
ks = sourceKs
} else {
sourceKs, err := readKeyStore(opts.KeystorePath, keystorePassword)
if err != nil && !os.IsNotExist(err) {
return err
}
ks = sourceKs
}
for _, caFile := range opts.CACertFiles {
caCerts, err := readCACertsFromFile(caFile)
if err != nil {
return err
}
for alias, cert := range caCerts {
ks[alias] = &keystore.TrustedCertificateEntry{
Entry: keystore.Entry{CreationDate: time.Now()},
Certificate: cert,
}
}
}
for _, caDir := range opts.CACertDirs {
files, err := ioutil.ReadDir(caDir)
if err != nil {
return err
}
for _, file := range files {
if file.IsDir() {
continue
}
caCerts, err := readCACertsFromFile(filepath.Join(caDir, file.Name()))
if err != nil {
continue
}
for alias, cert := range caCerts {
ks[alias] = &keystore.TrustedCertificateEntry{
Entry: keystore.Entry{CreationDate: time.Now()},
Certificate: cert,
}
}
}
}
for alias, file := range opts.PrivateKeyFiles {
priv, err := privateKeyFromFile(file, keystorePassword)
if err != nil {
return err
}
certs, err := certsFromFile(opts.CertFiles[alias])
if err != nil {
return err
}
ks[alias] = &keystore.PrivateKeyEntry{
Entry: keystore.Entry{CreationDate: time.Now()},
PrivKey: priv,
CertChain: certs,
}
}
return writeKeyStore(ks, opts.KeystorePath, keystorePassword)
}
func readCACertsFromFile(caFile string) (map[string]keystore.Certificate, error) {
certs, err := certsFromFile(caFile)
if err != nil {
return nil, err
}
aliasCertMap := map[string]keystore.Certificate{}
for _, cert := range certs {
parsed, err := x509.ParseCertificates(cert.Content)
if err != nil {
return nil, err
}
if len(parsed) < 1 {
return nil, fmt.Errorf("could not decode CA certificate")
}
for _, ca := range parsed {
cn := ca.Subject.CommonName
if len(cn) == 0 {
return nil, fmt.Errorf("missing cn in CA certificate subject: %v", ca.Subject)
}
alias := strings.Replace(strings.ToLower(cn), " ", "", -1)
aliasCertMap[alias] = cert
}
}
return aliasCertMap, nil
}
func privateKeyFromFile(file string, password []byte) ([]byte, error) {
pkbs, err := pemFileToBlocks(file)
if err != nil {
return nil, err
}
if len(pkbs) != 1 {
return nil, fmt.Errorf("failed to single PEM block from file %s", file)
}
var pk interface{}
pkb := pkbs[0]
switch pkb.Type {
case "RSA PRIVATE KEY":
pk, err = x509.ParsePKCS1PrivateKey(pkb.Bytes)
case "EC PRIVATE KEY":
pk, err = x509.ParseECPrivateKey(pkb.Bytes)
default:
return nil, fmt.Errorf("unsupported private key type: %s", pkb.Type)
}
if err != nil {
return nil, err
}
return convertPrivateKeyToPKCS8(pk)
}
func certsFromFile(file string) ([]keystore.Certificate, error) {
if len(file) == 0 {
return nil, nil
}
cbs, err := pemFileToBlocks(file)
if err != nil {
return nil, err
}
var certs []keystore.Certificate
for _, cb := range cbs {
certs = append(certs, keystore.Certificate{
Type: "X509",
Content: cb.Bytes,
})
}
return certs, nil
}
func pemFileToBlocks(path string) ([]*pem.Block, error) {
raw, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
var (
pemBlocks []*pem.Block
current *pem.Block
)
for {
current, raw = pem.Decode(raw)
if current == nil {
if len(pemBlocks) > 0 {
return pemBlocks, nil
}
return nil, fmt.Errorf("failed to decode any PEM blocks from %s", path)
}
pemBlocks = append(pemBlocks, current)
if len(raw) == 0 {
break
}
}
return pemBlocks, nil
}
func writeKeyStore(ks keystore.KeyStore, path string, passphrase []byte) error {
// Let's do this atomically (temp + rename) in case anything is watching (inotify?) on
// the keystore itself.
absPath, err := filepath.Abs(path)
if err != nil {
return err
}
dir, filename := filepath.Split(absPath)
tempFile, err := ioutil.TempFile(dir, "."+filename)
if err != nil {
return err
}
err = keystore.Encode(tempFile, ks, passphrase)
tempFile.Close()
if err != nil {
return err
}
err = os.Rename(tempFile.Name(), absPath)
if err != nil {
os.Remove(tempFile.Name())
}
return err
}
func readKeyStore(filename string, password []byte) (keystore.KeyStore, error) {
f, err := os.Open(filename)
defer f.Close()
if err != nil {
return keystore.KeyStore{}, err
}
keyStore, err := keystore.Decode(f, password)
if err != nil {
return keystore.KeyStore{}, err
}
return keyStore, nil
}
|
<gh_stars>10-100
// import { useState } from "react";
import { useTheme, useMediaQuery } from "@material-ui/core";
// import { isMobileFromRdd } from "../Utils/device";
export default function useIsMobile(callback, delay) {
// const [isMobile] = useState(isMobileFromRdd());
const theme = useTheme();
const isMobile = useMediaQuery(theme.breakpoints.down("xs"));
return isMobile;
}
|
<reponame>firmanjabar/restaurant-app<gh_stars>10-100
import 'regenerator-runtime';
import './components/app-bar';
import './components/hero';
import './components/footer-ku';
import 'lazysizes';
import 'lazysizes/plugins/parent-fit/ls.parent-fit';
import '../styles/main.css';
import '../styles/responsive.css';
import '../styles/form.css';
import '../styles/like.css';
import '../styles/spinner.css';
import App from './views/App';
import swRegister from './utils/sw-register';
// import WebSocketInitiator from './utils/websocket-initiator';
// import CONFIG from './global/config';
const app = new App({
button: document.querySelector('.menu'),
drawer: document.querySelector('.nav-list'),
content: document.querySelector('#main-content'),
toggle: document.querySelector('#dark-mode'),
currentTheme: localStorage.getItem('theme'),
});
window.addEventListener('hashchange', () => {
document.querySelector('.container').scrollIntoView();
app.renderPage();
});
window.addEventListener('DOMContentLoaded', () => {
app.renderPage();
swRegister();
// WebSocketInitiator.init(CONFIG.WEB_SOCKET_SERVER);
});
|
<filename>Dungeon_Offline_backend/db/migrate/20190819142539_create_world_characters.rb
class CreateWorldCharacters < ActiveRecord::Migration[5.2]
def change
create_table :world_characters do |t|
t.integer :character_id
t.integer :world_id
t.timestamps
end
end
end
|
import re
def count_word_occurrences(file_path):
word_counts = {}
with open(file_path, 'r') as file:
for line in file:
words = re.findall(r'\b\w+\b', line.lower())
for word in words:
if word in word_counts:
word_counts[word] += 1
else:
word_counts[word] = 1
return word_counts
|
def largest_string(list_of_strings):
longest = ""
for string in list_of_strings:
if len(string) >= len(longest):
longest = string
return longest
|
export class OAuthProvider
{
constructor()
{
}
obj_to_query(obj) {
var parts = [];
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
parts.push(encodeURIComponent(key) + '=' + encodeURIComponent(obj[key]));
}
}
return "?" + parts.join('&');
}
}
|
public class ExpressionEvaluator {
public static int eval(String expression) {
String[] tokens = expression.split(" ");
Stack<Integer> stack = new Stack<>();
int result = 0;
for (String token : tokens) {
if (token.equals("+") || token.equals("-") || token.equals("*") || token.equals("/")) {
int b = stack.pop();
int a = stack.pop();
switch (token) {
case "+":
result = a + b;
break;
case "-":
result = a - b;
break;
case "*":
result = a * b;
break;
case "/":
result = a / b;
break;
}
stack.push(result);
} else {
stack.push(Integer.parseInt(token));
}
}
return result;
}
}
|
<reponame>snowcrystall/gitaly_emg
package cgroups
import (
"fmt"
"hash/crc32"
"os"
"strings"
"github.com/containerd/cgroups"
specs "github.com/opencontainers/runtime-spec/specs-go"
"gitlab.com/gitlab-org/gitaly/v14/internal/command"
cgroupscfg "gitlab.com/gitlab-org/gitaly/v14/internal/gitaly/config/cgroups"
)
// CGroupV1Manager is the manager for cgroups v1
type CGroupV1Manager struct {
cfg cgroupscfg.Config
hierarchy func() ([]cgroups.Subsystem, error)
}
func newV1Manager(cfg cgroupscfg.Config) *CGroupV1Manager {
return &CGroupV1Manager{
cfg: cfg,
hierarchy: func() ([]cgroups.Subsystem, error) {
return defaultSubsystems(cfg.Mountpoint)
},
}
}
func (cg *CGroupV1Manager) Setup() error {
resources := &specs.LinuxResources{}
if cg.cfg.CPU.Enabled {
resources.CPU = &specs.LinuxCPU{
Shares: &cg.cfg.CPU.Shares,
}
}
if cg.cfg.Memory.Enabled {
resources.Memory = &specs.LinuxMemory{
Limit: &cg.cfg.Memory.Limit,
}
}
for i := 0; i < int(cg.cfg.Count); i++ {
_, err := cgroups.New(cg.hierarchy, cgroups.StaticPath(cg.cgroupPath(i)), resources)
if err != nil {
return fmt.Errorf("failed creating cgroup: %w", err)
}
}
return nil
}
func (cg *CGroupV1Manager) AddCommand(cmd *command.Command) error {
checksum := crc32.ChecksumIEEE([]byte(strings.Join(cmd.Args(), "")))
groupID := uint(checksum) % cg.cfg.Count
cgroupPath := cg.cgroupPath(int(groupID))
control, err := cgroups.Load(cg.hierarchy, cgroups.StaticPath(cgroupPath))
if err != nil {
return fmt.Errorf("failed loading %s cgroup: %w", cgroupPath, err)
}
if err := control.Add(cgroups.Process{Pid: cmd.Pid()}); err != nil {
// Command could finish so quickly before we can add it to a cgroup, so
// we don't consider it an error.
if strings.Contains(err.Error(), "no such process") {
return nil
}
return fmt.Errorf("failed adding process to cgroup: %w", err)
}
return nil
}
func (cg *CGroupV1Manager) Cleanup() error {
processCgroupPath := cg.currentProcessCgroup()
control, err := cgroups.Load(cg.hierarchy, cgroups.StaticPath(processCgroupPath))
if err != nil {
return fmt.Errorf("failed loading cgroup %s: %w", processCgroupPath, err)
}
if err := control.Delete(); err != nil {
return fmt.Errorf("failed cleaning up cgroup %s: %w", processCgroupPath, err)
}
return nil
}
func (cg *CGroupV1Manager) cgroupPath(groupID int) string {
return fmt.Sprintf("/%s/shard-%d", cg.currentProcessCgroup(), groupID)
}
func (cg *CGroupV1Manager) currentProcessCgroup() string {
return fmt.Sprintf("/%s/gitaly-%d", cg.cfg.HierarchyRoot, os.Getpid())
}
func defaultSubsystems(root string) ([]cgroups.Subsystem, error) {
subsystems := []cgroups.Subsystem{
cgroups.NewMemory(root),
cgroups.NewCpu(root),
}
return subsystems, nil
}
|
#!/usr/bin/env bash
CHECKPOINT_DIR=$(dirname $0)/'../../checkpoints'
mkdir -p $CHECKPOINT_DIR && cd $CHECKPOINT_DIR
wget --content-disposition https://cloud.tsinghua.edu.cn/f/9ea515945bb2452696e8/?dl=1
echo "downloaded the checkpoint and putting it in: " $CHECKPOINT_DIR
|
const splitIntoTwoParts = (arr) => {
const midpoint = Math.floor(arr.length / 2);
const part1 = arr.slice(0, midpoint);
const part2 = arr.slice(midpoint, arr.length);
return [part1, part2]
};
splitIntoTwoParts([5, 12, 18, 25]); // [[5, 12], [18, 25]]
|
/**
* The GUI elements for the OGC-Server plugin.
*/
package io.opensphere.server.display;
|
#!/usr/bin/env bash
# https://github.com/raycast/script-commands
# dotfiles folder
DOTFILES_FOLDER="$(pwd | grep -o '.*dotfiles')"
# Load helper functions
#shellcheck source=/dev/null
source "$DOTFILES_FOLDER/lib/functions"
SCRIPT_COMMANDS_FOLDER="$HOME"/Documents/Thiago/Repos/script-commands
# Clone Scripts repository
if [ ! -d "$SCRIPT_COMMANDS_FOLDER" ]; then
git clone git@github.com:raycast/script-commands.git "$SCRIPT_COMMANDS_FOLDER"
fi
ENABLE_SCRIPTS_FOLDER_DST="$SCRIPT_COMMANDS_FOLDER"/_enabled-commands
ENABLE_SCRIPTS_FOLDER_SRC="$SCRIPT_COMMANDS_FOLDER"/commands
scripts=(
browsing/shorten-url.sh
communication/emojis/emojis-search.sh
developer-utils/base64-decode-input.sh
developer-utils/base64-encode-input.sh
developer-utils/prettify-json.sh
developer-utils/brew/brew-install.sh
developer-utils/brew/brew-doctor.sh
developer-utils/brew/brew-outdated.sh
developer-utils/brew/brew-update.sh
developer-utils/brew/brew-upgrade.sh
google-maps/google-maps.sh
navigation/open-documents.sh
navigation/open-downloads.sh
navigation/open-home.sh
web-searches/giphy.sh
web-searches/google-search.sh
web-searches/youtube.sh
)
install() {
# Copy Scripts
for script in "${scripts[@]}";
do
info "Copying script $script"
cp "$ENABLE_SCRIPTS_FOLDER_SRC"/"$script" "$ENABLE_SCRIPTS_FOLDER_DST"
done
}
execute() {
install
}
execute 2>&1 | tee -a "$DOTFILE_LOG_FILE"
|
sudo apt-get install python3-tk
|
<reponame>adarshaacharya/csoverflow
import { AuthActions, AuthState, AuthActionTypes } from './auth.types';
const initialState: AuthState = {
token: localStorage.getItem('cstoken'),
isAuthenticated: null,
loading: false,
user: null,
};
export const authReducer = (state: AuthState = initialState, action: AuthActions): AuthState => {
switch (action.type) {
case AuthActionTypes.USER_LOADED:
return {
...state,
isAuthenticated: true,
loading: false,
user: action.payload,
};
case AuthActionTypes.REGISTER_SUCCESS:
return {
...state,
token: action.payload, // token will be set on local storage by create-store.ts subscription listener
isAuthenticated: true,
loading: false,
};
case AuthActionTypes.LOGIN_SUCESS:
return {
...state,
token: action.payload,
isAuthenticated: true,
loading: false,
};
case AuthActionTypes.AUTH_ERROR:
case AuthActionTypes.LOGOUT:
return {
...state,
token: null,
isAuthenticated: false,
loading: false,
user: null,
};
case AuthActionTypes.SET_LOADING:
return {
...state,
loading: true,
};
default:
return state;
}
};
|
def downgrade(engine_name):
globals()[f"downgrade_{engine_name}"]()
def downgrade_registrar():
# Implement the schema downgrade for the 'registrar' database engine
# Example: Revert changes made by upgrade_registrar function
pass
def downgrade_analytics():
# Implement the schema downgrade for the 'analytics' database engine
# Example: Revert changes made by upgrade_analytics function
pass
|
package io.casperlabs.casper
import cats.Monad
import cats.syntax.functor._
import io.casperlabs.catscontrib.TaskContrib.TaskOps
import io.casperlabs.shared.LogStub
import monix.execution.Scheduler
import monix.eval.Task
import org.scalatest.{Assertion, Assertions, Matchers}
import org.scalactic.source
object scalatestcontrib extends Matchers with Assertions {
implicit class AnyShouldF[F[_]: Monad, T](leftSideValue: F[T])(implicit pos: source.Position) {
def shouldBeF(value: T): F[Assertion] =
leftSideValue.map(_ shouldBe value)
}
def effectTest[T](f: Task[T])(implicit scheduler: Scheduler): T =
f.unsafeRunSync(scheduler)
}
|
package com.napier.sem.queries;
import com.napier.sem.objects.City;
import com.napier.sem.objects.Continent;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
public class CityQueries {
private Connection conn;
public CityQueries(Connection conn)
{
this.conn = conn;
}
/**
*
* @param name
* @return list of cities
*/
public List<City> getCityPopulation(String name)
{
if (name == null || name.isEmpty())
{
throw new IllegalArgumentException("Provided city is null or empty");
}
else
{
try
{
Statement stmt = conn.createStatement();
String query =
"SELECT city.ID, city.Name, city.Population "
+ "FROM city "
+ "WHERE city.Name LIKE ? ";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, name);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setID(resultSet.getInt("city.ID"));
city.setName(resultSet.getString("city.Name"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
}
catch (Exception e)
{
System.out.println(e.getMessage());
System.out.println("Failed to get city population");
}
return null;
}
}
/**
*
* @param district
* @return list of cities in a district specified by the user organised by largest to smallest
*/
public List<City> getCitiesInDistrictByLargestToSmallestPopulation(String district)
{
if (district == null || district.isEmpty()) {
throw new IllegalArgumentException("District is null or empty");
} else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.ID, city.Name, country.Name, city.District, city.Population "
+ "FROM city JOIN country ON city.CountryCode = country.Code "
+ "WHERE District LIKE ? "
+ "ORDER BY city.Population DESC";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, district);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setID(resultSet.getInt("city.ID"));
city.setName(resultSet.getString("city.Name"));
city.setCountry(resultSet.getString("country.Name"));
city.setDistrict(resultSet.getString("city.District"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get cities in district");
}
return null;
}
}
/**
*
* @param country
* @return list of cities in country ordered by population
*/
public List<City> getCitiesInCountryByLargestToSmallestPopulation(String country)
{
if (country == null || country.isEmpty()) {
throw new IllegalArgumentException("District is null or empty");
} else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.ID, city.Name, country.Name, city.District, city.Population "
+ "FROM city JOIN country ON city.CountryCode = country.Code "
+ "WHERE country.Name LIKE ? "
+ "ORDER BY city.Population DESC";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, country);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setID(resultSet.getInt("city.ID"));
city.setName(resultSet.getString("city.Name"));
city.setCountry(resultSet.getString("country.Name"));
city.setDistrict(resultSet.getString("city.District"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get cities in country");
}
return null;
}
}
public List<City> getTopNPopulatedCitiesInAContinentDescending(String continent, int n)
{
List<City> result = getCitiesInAContinentDescending(continent);
if (n >= result.size()) {
throw new IllegalArgumentException("The provided number is invalid. The number of cities in " + continent + " is " + result.size());
}
return result.subList(0, n);
}
/**
* Helper method to get Top N cities in a continent organised by largest population to smallest
* @param continent
* @return list of cities
*/
public List<City> getCitiesInAContinentDescending(String continent)
{
if (continent == null || continent.isEmpty()) {
throw new IllegalArgumentException("Continent is null or empty");
} else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.ID, city.Name, country.Name, city.District, city.Population "
+ "FROM city JOIN country ON city.CountryCode = country.Code "
+ "WHERE country.Continent LIKE ? "
+ "ORDER BY city.Population DESC";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, continent);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setID(resultSet.getInt("city.ID"));
city.setName(resultSet.getString("city.Name"));
city.setCountry(resultSet.getString("country.Name"));
city.setDistrict(resultSet.getString("city.District"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get cities in a continent");
}
return null;
}
}
/**
* Helper method to get top n populated cities in the world where N is provided by the user
* @return a list of cities
*/
public List<City> getCitiesInTheWorld()
{
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.ID, city.Name, country.Name, city.District, city.Population "
+ "FROM city JOIN country ON city.CountryCode = country.Code "
+ "ORDER BY city.Population DESC";
PreparedStatement preparedStatement = conn.prepareStatement(query);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setID(resultSet.getInt("city.ID"));
city.setName(resultSet.getString("city.Name"));
city.setCountry(resultSet.getString("country.Name"));
city.setDistrict(resultSet.getString("city.District"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get cities in the world");
}
return null;
}
/**
* Get top n populated cities in the world
* @param n
* @return a list of cities
*/
public List<City> getTopNPopulatedCitiesInTheWorld(int n)
{
List<City> result = getCitiesInTheWorld();
if (n >= result.size()) {
throw new IllegalArgumentException("The provided number is invalid. The number of cities in the world is " + result.size());
}
return result.subList(0, n);
}
/**
* Method for getting a population of a continent
* @param continent
* @return list of cities ordered by population within a specified continent
*/
public List<City> getCitiesWithinContinentByPopulation(String continent)
{
if (continent == null || continent.isEmpty()) {
throw new IllegalArgumentException("Continent is null or empty");
} else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.Name, city.Population, country.Continent "
+ "FROM city "
+ "JOIN country ON city.CountryCode = country.Code "
+ "WHERE country.Continent LIKE ? "
+ "ORDER BY city.Population DESC";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, continent);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setName(resultSet.getString("city.Name"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get city populations");
}
return null;
}
}
/**
* Method for getting a population of a continent
* @param continent
* @return list of cities ordered by population within a specified continent
*/
public List<City> getTopNCitiesByPopulationInContinent(int N, String continent)
{
if (continent == null || continent.isEmpty()) {
throw new IllegalArgumentException("Continent is null or empty");
} else if (N == 0){
throw new IllegalArgumentException("N is 0");
}
else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT city.Name, city.Population "
+ "FROM city "
+ "JOIN country ON city.ID = country.Capital "
+ "WHERE country.Continent LIKE ? "
+ "ORDER BY city.Population DESC "
+ "LIMIT ?";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, continent);
preparedStatement.setInt(2, N);
ResultSet resultSet = preparedStatement.executeQuery();
List<City> cities = new ArrayList<City>();
while(resultSet.next()) {
City city = new City();
city.setName(resultSet.getString("city.Name"));
city.setPopulation(resultSet.getInt("city.Population"));
cities.add(city);
}
return cities;
} catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get city populations");
}
return null;
}
}
}
|
def compare_lists(lst1, lst2):
if len(lst1) != len(lst2):
return False
for i in range(len(lst1)):
if lst1[i] != lst2[i]:
return False
return True
|
import json
def extract_state_and_msg(response):
data = json.loads(response)
state = data.get('state')
msg = data.get('msg')
return state, msg
|
sudo apt-get update
# Install CodeDeploy Agent
sudo apt-get install wget ruby-full -y
cd /home/ubuntu
wget https://bucket-name.s3.region-identifier.amazonaws.com/latest/install
chmod +x ./install
sudo ./install auto > /tmp/logfile
sudo service codedeploy-agent status
rm install
# Install AWS CLI
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
unzip awscliv2.zip
sudo ./aws/install
rm awscliv2.zip
rm -rf aws
|
#!/bin/sh
cd ../costaclub-web
git pull https://nathan-costa:PDZAKT3b4@github.com/costacruise/costaclub-web.git master
phraseapp pull
var=( $(ls -t /www) )
gulp
sudo \cp --verbose src/resources/core/*.* /www/${var[0]}/resources/core
|
import React from "react"
const NewsletterTagLine = ({ content }) => (
<div className="c-newsletter__tagline">{content}</div>
)
export default NewsletterTagLine
|
<gh_stars>1-10
package tftest
import (
"context"
"encoding/json"
"fmt"
"io"
"log"
"os"
"os/exec"
"os/signal"
"path"
"sync"
"syscall"
"testing"
"golang.org/x/sys/unix"
)
const (
tfstateFilename = "terraform.tfstate"
planFilename = "plan.tf"
)
// TerraformPluginCacheDir is where the plugins we download are kept. See
// InitCache() (called on boot) and CleanCache(). You can also override it
// before calling anything to move it if it's a problem.
var TerraformPluginCacheDir = "/tmp/tftest/plugin_cache"
func init() {
InitCache()
}
// InitCache creates the cache directory. It does not care about errors.
func InitCache() {
os.MkdirAll(TerraformPluginCacheDir, 0700)
}
// CleanCache cleans our plugin cache by removing it. Put this in TestMain in
// your tests.
func CleanCache() {
os.RemoveAll(TerraformPluginCacheDir)
}
// State is the parsed state from terraform apply actions.
type State map[string]interface{}
// Harness is the entrypoint into the tftest system
type Harness struct {
terraformPath string
testingT *testing.T
state State // will be nil until after apply
tfstatePath string
plandir string
commandLock sync.Mutex
commandCancel context.CancelFunc
sigCancel context.CancelFunc
}
// New creates a new tftest harness
func New(t *testing.T) *Harness {
var h Harness
h.terraformPath = os.Getenv("TFTEST_TERRAFORM")
if h.terraformPath == "" {
var err error
h.terraformPath, err = exec.LookPath("terraform")
if err != nil {
t.Fatal(err)
}
}
h.testingT = t
return &h
}
func (h *Harness) t() *testing.T {
return h.testingT
}
func (h *Harness) tf(plandir string, command ...string) error {
h.commandLock.Lock()
defer h.commandLock.Unlock()
if h.commandCancel != nil {
h.commandCancel()
}
var ctx context.Context
ctx, h.commandCancel = context.WithCancel(context.Background())
// FIXME stream output with pipes
cmd := exec.CommandContext(ctx, h.terraformPath, command...)
cmd.Dir = plandir
cmd.Env = append(os.Environ(), fmt.Sprintf("TF_PLUGIN_CACHE_DIR=%s", TerraformPluginCacheDir))
out, err := cmd.CombinedOutput()
h.t().Log(string(out)) // basically, always log since people can turn it off by not supplying -v
return err
}
// Apply the harness and resources with terraform. Apply additionally sets up a
// Cleanup hook to teardown the environment when the test tears down, and
// parses the state (see State()).
//
// The cleanup hook is not installed when NO_CLEANUP=1 is set in the environment.
func (h *Harness) Apply(planfile string) {
h.plandir = h.t().TempDir() // out dir for state; will be reaped automatically
source, err := os.Open(planfile)
if err != nil {
h.t().Fatalf("Could not open plan file: %v", err)
}
defer source.Close()
target, err := os.Create(path.Join(h.plandir, "plan.tf"))
if err != nil {
h.t().Fatalf("Could not open target file for writing: %v", err)
}
defer target.Close()
if _, err := io.Copy(target, source); err != nil {
h.t().Fatalf("Could not copy source to target: %v", err)
}
if os.Getenv("NO_CLEANUP") == "" {
h.t().Cleanup(h.Destroy)
}
if err := h.tf(h.plandir, fmt.Sprintf("-chdir=%s", h.plandir), "init"); err != nil {
h.t().Fatalf("while initializing terraform: %v", err)
}
if err := h.tf(h.plandir, "apply", "-auto-approve"); err != nil {
h.t().Fatalf("while applying terraform: %v", err)
}
h.readState()
}
func (h *Harness) readState() {
f, err := os.Open(path.Join(h.plandir, tfstateFilename))
if err != nil {
h.t().Fatalf("while reading tfstate: %v", err)
}
defer f.Close()
h.state = State{}
if err := json.NewDecoder(f).Decode(&h.state); err != nil {
h.t().Fatalf("while decoding tfstate JSON: %v", err)
}
}
// Refresh applies terraform update to an existing tftest plandir.
func (h *Harness) Refresh() {
if h.plandir == "" {
h.t().Fatal("run Apply() first!")
}
if err := h.tf(h.plandir, "refresh"); err != nil {
h.t().Fatalf("while refresh terraform: %v", err)
}
h.readState()
}
// Destroy the harness and resources with terraform. Discard this struct after calling this method.
func (h *Harness) Destroy() {
if err := h.tf(h.plandir, "destroy", "-auto-approve"); err != nil {
h.t().Fatalf("while destroying resources with terraform: %v", err)
}
}
// State corresponds to the terraform state. This is ingested on each "apply"
// step, and will be nil until apply is called the first time.
func (h *Harness) State() State {
return h.state
}
// PlanDir returns the path to the plan and state, which may be useful in
// certain failure situations.
func (h *Harness) PlanDir() string {
return h.plandir
}
// HandleSignals handles SIGINT and SIGTERM to ensure that containers get
// cleaned up. It is expected that no other signal handler will be installed
// afterwards. If the forward argument is true, it will forward the signal back
// to its own process after deregistering itself as the signal handler,
// allowing your test suite to exit gracefully. Set it to false to stay out of
// your way.
//
// taken from https://github.com/erikh/duct
func (h *Harness) HandleSignals(forward bool) {
ctx, cancel := context.WithCancel(context.Background())
h.sigCancel = cancel
sigChan := make(chan os.Signal, 2)
go func() {
select {
case sig := <-sigChan:
log.Println("Signalled; will destroy terraform now")
h.Destroy()
signal.Stop(sigChan) // stop letting us get notified
if forward {
unix.Kill(os.Getpid(), sig.(syscall.Signal))
}
case <-ctx.Done():
signal.Stop(sigChan)
}
}()
signal.Notify(sigChan, unix.SIGINT, unix.SIGTERM)
}
|
#!/bin/sh
dos2unix rc.local conf/serial-starter.d etc/dbus-serialbattery/service/run etc/dbus-serialbattery/service/log/run etc/dbus-serialbattery/LICENSE etc/dbus-serialbattery/README.md etc/dbus-serialbattery/start-serialbattery.sh etc/dbus-serialbattery/disabledriver.sh etc/dbus-serialbattery/installlocal.sh etc/dbus-serialbattery/installrelease.sh etc/dbus-serialbattery/dbus-serialbattery.py etc/dbus-serialbattery/dbushelper.py etc/dbus-serialbattery/battery.py etc/dbus-serialbattery/utils.py etc/dbus-serialbattery/lltjbd.py etc/dbus-serialbattery/daly.py etc/dbus-serialbattery/ant.py etc/dbus-serialbattery/util_max17853.py etc/dbus-serialbattery/mnb.py etc/dbus-serialbattery/jkbms.py etc/dbus-serialbattery/sinowealth.py etc/dbus-serialbattery/renogy.py
tar -czvf venus-data.tar.gz --mode='a+rwX' rc.local conf/serial-starter.d etc/dbus-serialbattery/service/run etc/dbus-serialbattery/service/log/run etc/dbus-serialbattery/LICENSE etc/dbus-serialbattery/README.md etc/dbus-serialbattery/start-serialbattery.sh etc/dbus-serialbattery/disabledriver.sh etc/dbus-serialbattery/installlocal.sh etc/dbus-serialbattery/installrelease.sh etc/dbus-serialbattery/dbus-serialbattery.py etc/dbus-serialbattery/dbushelper.py etc/dbus-serialbattery/battery.py etc/dbus-serialbattery/utils.py etc/dbus-serialbattery/lltjbd.py etc/dbus-serialbattery/daly.py etc/dbus-serialbattery/ant.py etc/dbus-serialbattery/util_max17853.py etc/dbus-serialbattery/mnb.py etc/dbus-serialbattery/jkbms.py etc/dbus-serialbattery/sinowealth.py etc/dbus-serialbattery/renogy.py
|
# Create Resource Group for Terraform Remote State
groupName='demo-tfstate'
groupLocation='Australia East'
group=$(az group create --name ${groupName} --location "${groupLocation}" --verbose)
# Create Storage Account for Terraform Remote State
accountName=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 12 | head -n 1)
storage=$(az storage account create \
--name ${accountName} \
--resource-group $(echo $group | jq .name -r) \
--location $(echo $group | jq .location -r) \
--sku Standard_LRS \
)
# Create container for Terraform Remote State
containerName='tfstate'
az storage container create \
--name ${containerName} \
--account-name $(echo $storage | jq .name -r) \
--account-key $(az storage account keys list --resource-group $(echo $group | jq .name -r) --account-name $(echo $storage | jq .name -r) | jq .[0].value -r)
|
var redback = require('../');
exports.createClient = function (options) {
return redback.createClient('redis://localhost/11', options);
};
|
<filename>tailwind.config.js
module.exports = {
theme: {
extend: {
colors: {
primary: {
lighter: '#fecdd3',
default: '#f43f5e',
darker: '#be123c',
50: '#fff1f2',
100: '#ffe4e6',
200: '#fecdd3',
300: '#fda4af',
400: '#fb7185',
500: '#f43f5e',
600: '#e11d48',
700: '#be123c',
800: '#9f1239',
900: '#881337'
},
colors: {
transparent: 'transparent',
current: 'currentColor',
'white': '#ffffff'
}
}
}
},
variants: {},
plugins: []
};
|
import datetime
import tempfile
import json
from pathlib import Path
from telegram.client import Telegram
def send_telegram_message(config_file):
# Read configuration from the JSON file
with open(config_file, 'r') as file:
config = json.load(file)
# Extract configuration values
api_id = config.get('api_id')
api_hash = config.get('api_hash')
phone_number = config.get('phone_number')
chat_id = config.get('chat_id')
message = config.get('message')
# Validate configuration values
if None in (api_id, api_hash, phone_number, chat_id, message):
print("Error: Incomplete configuration. Please provide all required values.")
return
# Initialize the Telegram client
with Telegram(api_id, api_hash, phone_number) as client:
# Connect to the Telegram client
client.connect()
# Send the message to the specified chat
sent_message = client.send_message(chat_id, message)
# Log the message sending activity to a file with timestamp
log_file = Path(tempfile.gettempdir()) / "telegram_log.txt"
with open(log_file, 'a') as log:
log.write(f"{datetime.datetime.now()} - Message sent to chat {chat_id}: {sent_message.text}\n")
if __name__ == "__main__":
config_file = "telegram_config.json"
send_telegram_message(config_file)
|
<filename>open-sphere-base/mantle/src/main/java/io/opensphere/mantle/crust/DataUtil.java
package io.opensphere.mantle.crust;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicInteger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.data.util.DataModelCategory;
import io.opensphere.core.model.LatLonAlt;
import io.opensphere.core.util.collections.New;
import io.opensphere.mantle.MantleToolbox;
import io.opensphere.mantle.controller.DataGroupController;
import io.opensphere.mantle.data.DataGroupInfo;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.MetaDataInfo;
import io.opensphere.mantle.data.SpecialKey;
import io.opensphere.mantle.data.element.DataElement;
import io.opensphere.mantle.data.element.MetaDataProvider;
import io.opensphere.mantle.data.element.impl.DefaultDataElement;
import io.opensphere.mantle.data.element.impl.DefaultMapDataElement;
import io.opensphere.mantle.data.geom.MapGeometrySupport;
import io.opensphere.mantle.data.geom.impl.SimpleMapPointGeometrySupport;
import io.opensphere.mantle.data.geom.impl.SimpleMapPolylineGeometrySupport;
import io.opensphere.mantle.data.impl.DefaultDataGroupInfo;
import io.opensphere.mantle.data.impl.DefaultMetaDataInfo;
import io.opensphere.mantle.data.impl.specialkey.LatitudeKey;
import io.opensphere.mantle.data.impl.specialkey.LongitudeKey;
/**
* The purpose of this class is to reduce the effort required to deal with the
* complex APIs in Mantle (and, to some extent, Core).
*/
public class DataUtil implements NamedDataSetManager
{
/** Static counter for unique IDs. */
private static final AtomicInteger ID_COUNTER = new AtomicInteger(0);
/** We need this to create a DefaultDataTypeInfo. */
private Toolbox myToolbox;
/** For manipulating data groups. */
private DataGroupController myGroupController;
/** The name of the root data element in the data type tree. */
private String myRootName;
/** All layers created by this DataUtil are subordinate to this root. */
private DataGroupInfo myRootGroup;
/** Map of data sets by their unique layer names. */
private final Map<String, GenericDataSet> myNamedDataSets = new TreeMap<>();
/**
* Grant access to the Toolbox and all of its many, many subcomponents.
*
* @param toolbox the toolbox through which application state is accessed.
*/
public void setToolbox(Toolbox toolbox)
{
myToolbox = toolbox;
MantleToolbox mantleTools = myToolbox.getPluginToolboxRegistry().getPluginToolbox(MantleToolbox.class);
myGroupController = mantleTools.getDataGroupController();
}
/**
* Gets the value of the {@link #myRootName} field.
*
* @return the value stored in the {@link #myRootName} field.
*/
public String getRootName()
{
return myRootName;
}
/**
* Create a root DataGroupInfo; all layers created by this DataUtil instance
* will be subordinate to the root. Note: can only be called after
* {@link #setToolbox(Toolbox)} is called.
*
* @param name as you might suspect
*/
public void setupRoot(String name)
{
myRootName = name;
myRootGroup = new DefaultDataGroupInfo(true, myToolbox, myRootName, myRootName);
myGroupController.addRootDataGroupInfo(myRootGroup, this);
}
/**
* This is the main method for the class. Put simply, it takes a set of
* key-value maps and installs it in Mantle as a Layer.
*
* @param cat DataModelCategory
* @param typeId the layer name
* @param meta type structure for the layer
* @param recs the data set
*/
public void registerData(DataModelCategory cat, String typeId, MetaDataInfo meta, List<Map<String, Object>> recs)
{
GenericDataSet data = new GenericDataSet(this, myToolbox, cat, typeId, this::getRootName);
data.create(myRootGroup, meta);
// special keys for latitude and longitude may be null; in that case,
// the records will not be shown on the map
String latKey = meta.getKeyForSpecialType(LatitudeKey.DEFAULT);
String lonKey = meta.getKeyForSpecialType(LongitudeKey.DEFAULT);
data.setMapSupport(latKey, lonKey);
data.populate(recs, new TreeSet<>(meta.getKeyNames()));
addDataSet(data);
}
/**
* {@inheritDoc}
* <p>
* Index a new data set by its name within the resident Map (thread-safe).
* </p>
*
* @see io.opensphere.mantle.crust.NamedDataSetManager#addDataSet(io.opensphere.mantle.crust.GenericDataSet)
*/
@Override
public void addDataSet(GenericDataSet data)
{
synchronized (myNamedDataSets)
{
myNamedDataSets.put(data.getName(), data);
}
}
/**
* {@inheritDoc}
* <p>
* Remove a data set from the resident Map (thread-safe).
* </p>
*
* @see io.opensphere.mantle.crust.NamedDataSetManager#removeDataSet(java.lang.String)
*/
@Override
public void removeDataSet(String name)
{
synchronized (myNamedDataSets)
{
myNamedDataSets.remove(name);
}
}
/**
* Deal with the annoying Mantle API.
*
* @param cols a Map of column names to types
* @param special a Map of column names to SpecialKeys
* @return all of that as a MetaDataInfo
*/
public static MetaDataInfo getMetaDataInfo(Map<String, Class<?>> cols, Map<String, SpecialKey> special)
{
DefaultMetaDataInfo meta = new DefaultMetaDataInfo();
// add the columns
cols.entrySet().forEach(e -> meta.addKey(e.getKey(), e.getValue(), null));
// add the specials
special.entrySet().forEach(e -> meta.setSpecialKey(e.getKey(), e.getValue(), null));
return meta;
}
/**
* Create a DataElement from a Map of String to Object.
*
* @param valMap the mapping of keys to values
* @return all of that as a DataElement
*/
public static DataElement createDataElement(Map<String, Object> valMap)
{
DefaultDataElement defaultDataElement = new DefaultDataElement(ID_COUNTER.getAndIncrement());
// new DefaultDataElement(ID_COUNTER++, TimeSpan.TIMELESS, sink);
defaultDataElement.setMetaDataProvider(new SimpleMetaDataProvider(valMap));
return defaultDataElement;
}
/**
* Create a DataElement from a Map of String to Object. This method also
* accepts a set of field names, which can include those which do not show
* up in the map because their values are null.
*
* @param valMap the mapping of keys to values
* @param cols the full set of keys
* @return all of that as a DataElement
*/
public static DataElement createDataElement(Map<String, Object> valMap, Set<String> cols)
{
DefaultDataElement dde = new DefaultDataElement(ID_COUNTER.getAndIncrement());
// new DefaultDataElement(ID_COUNTER++, TimeSpan.TIMELESS, sink);
dde.setMetaDataProvider(new SimpleMetaDataProvider(valMap, cols));
return dde;
}
/**
* Create a DataElement with support for being shown on the map.
*
* @param mdp the data fields
* @param type the data layer
* @param firstPoint The first point of the polyline.
* @param secondPoint The second point in the polyline or null if we should
* just create a point.
* @return all of that as a DataElement
*/
public static DataElement createMapDataElement(MetaDataProvider mdp, DataTypeInfo type, LatLonAlt firstPoint,
LatLonAlt secondPoint)
{
MapGeometrySupport mgs = null;
if (secondPoint == null)
{
mgs = new SimpleMapPointGeometrySupport(firstPoint);
}
else
{
mgs = new SimpleMapPolylineGeometrySupport(New.list(firstPoint, secondPoint));
}
mgs.setColor(type.getBasicVisualizationInfo().getTypeColor(), null);
return new DefaultMapDataElement(ID_COUNTER.getAndIncrement(), null, type, mdp, mgs);
}
}
|
package com.st.map;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Set;
/*
需求:
创建一个ArrayList集合,存储三个元素,每一个元素都是HashMap,每一个HashMap的键和值都是String,并遍历
思路:
1:创建ArrayList集合
2:创建HashMap集合,并添加键值对元素
3:把HashMap作为元素添加到ArrayList集合
4:遍历ArrayList集合
给出如下的数据:
第一个HashMap集合的元素:
孙策 大乔
周瑜 小乔
第二个HashMap集合的元素:
郭靖 黄蓉
杨过 小龙女
第三个HashMap集合的元素:
令狐冲 任盈盈
林平之 岳灵珊
*/
public class ArrayListIncludeHashMapDemo {
public static void main(String[] args) {
//创建ArrayList集合
ArrayList<HashMap<String, String>> array = new ArrayList<HashMap<String, String>>();
//创建HashMap集合,并添加键值对元素
HashMap<String, String> hm1 = new HashMap<String, String>();
hm1.put("孙策", "大乔");
hm1.put("周瑜", "小乔");
//把HashMap作为元素添加到ArrayList集合
array.add(hm1);
HashMap<String, String> hm2 = new HashMap<String, String>();
hm2.put("郭靖", "黄蓉");
hm2.put("杨过", "小龙女");
//把HashMap作为元素添加到ArrayList集合
array.add(hm2);
HashMap<String, String> hm3 = new HashMap<String, String>();
hm3.put("令狐冲", "任盈盈");
hm3.put("林平之", "岳灵珊");
//把HashMap作为元素添加到ArrayList集合
array.add(hm3);
//遍历ArrayList集合
for (HashMap<String, String> hm : array) {
Set<String> keySet = hm.keySet();
for (String key : keySet) {
String value = hm.get(key);
System.out.println(key + "," + value);
}
}
}
}
|
<gh_stars>0
const bodyParser = require('body-parser');
const cors = require('cors');
const express = require('express');
// local imports
const authMiddleware = require('./middlewares/auth');
const containsSQLMiddleware = require('./middlewares/contains-sql');
const logger = require('../config/logger')(__filename);
const v1 = require('./v1');
const v2 = require('./v2');
const corsConfiguration = {
origin: '*',
methods: ['GET', 'PUT', 'PATCH', 'POST', 'DELETE', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization'],
};
const app = express();
app.use(cors(corsConfiguration));
// extended: true allows the values of the objects passed, to be of any type
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.options('*', cors(corsConfiguration));
// check each request for authorization token
app.use(containsSQLMiddleware);
app.use(authMiddleware);
app.use('/v1', v1);
app.use('/v2', v2);
app.get('/_health', (req, res) => {
logger.silly('API is Alive & Kicking!');
return res.status(200).json({ status: 'UP' });
});
module.exports = app;
|
<gh_stars>0
import * as path from 'path';
import * as fs from 'fs';
import { AbsoluteUrlMapper } from './mapper';
import { ImageCache } from '../util/imagecache';
export const relativeToOpenFileUrlMapper: AbsoluteUrlMapper = {
map(fileName: string, imagePath: string) {
let absoluteImagePath: string;
const pathName = path.normalize(imagePath);
if (pathName) {
let testImagePath = path.join(fileName, '..', pathName);
if (ImageCache.has(testImagePath) || fs.existsSync(testImagePath)) {
absoluteImagePath = testImagePath;
}
}
return absoluteImagePath;
},
refreshConfig() {},
};
|
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
if [ $# != 5 ] ; then
echo "=============================================================================================================="
echo "Please run the script as: "
echo "sh run_eval_distribute_ascend.sh DATASET_PATH LABEL_PATH MODEL_CKPT CKPT_NUMBER RANK_TABLE_PATH"
echo "for example:"
echo "sh run_eval_distribute_ascend.sh /disk0/dataset/finetune_dataset/finetune_test.mindrecord /disk0/dataset/finetune_dataset/test.json /disk2/ckpt_32p_0602 32 /disk0/rank_table_2p.json"
echo "It is better to use absolute path."
echo "=============================================================================================================="
exit 1;
fi
get_real_path(){
if [ "${1:0:1}" == "/" ]; then
echo "$1"
else
echo "$(realpath -m $PWD/$1)"
fi
}
DATASET=$(get_real_path $1)
echo $DATASET
LABEL=$(get_real_path $2)
MODEL_CKPT=$(get_real_path $3)
CKPT_NUMBER=$4
RANK_TABLE_PATH=$(get_real_path $5)
current_exec_path=$(pwd)
echo ${current_exec_path}
export RANK_SIZE=2
export DEVICE_NUM=2
export RANK_TABLE_FILE=$RANK_TABLE_PATH
for((i=0;i<=1;i++));
do
rm -rf ${current_exec_path}/eval$i
mkdir ${current_exec_path}/eval$i
cd ${current_exec_path}/eval$i || exit
cp -r ../../*.py ./
cp -r ../../src ./
cp -r ../../scripts/*.sh ./
export RANK_ID=$i
export DEVICE_ID=$i
echo "start eval for rank $RANK_ID, device $DEVICE_ID"
env > env.log
python ../../eval.py --dataset $DATASET --data_path $LABEL --ckpt_path_doc $MODEL_CKPT --ckpt_partition $CKPT_NUMBER --distribute True --has_train_strategy True> log_cpm.log 2>&1 &
cd ${current_exec_path} || exit
done
cd ${current_exec_path} || exit
|
import {stdout, stderr} from 'node:process';
import {expectType} from 'tsd';
import supportsColor, {createSupportsColor, Options, ColorInfo} from './index.js';
const options: Options = {};
expectType<ColorInfo>(supportsColor.stdout);
expectType<ColorInfo>(supportsColor.stderr);
expectType<ColorInfo>(createSupportsColor(stdout));
expectType<ColorInfo>(createSupportsColor(stderr));
expectType<ColorInfo>(createSupportsColor(stdout, options));
|
# -*- coding: utf-8 -*-
# Librerias Django:
# Urls
from django.conf.urls import url
from .views import FixAlmacenView
app_name = "ti"
urlpatterns = [
url(
r'^fix/almacen/$',
FixAlmacenView.as_view(),
name='fix_almacen'
),
]
|
<filename>com.ensoftcorp.open.dynadoc.core/src/com/ensoftcorp/open/dynadoc/core/wrapper/ClassMethodsWrapper.java
package com.ensoftcorp.open.dynadoc.core.wrapper;
import java.nio.file.Path;
import java.util.List;
import com.ensoftcorp.atlas.core.db.graph.Node;
import com.ensoftcorp.atlas.core.xcsg.XCSG;
import com.ensoftcorp.open.dynadoc.core.constants.FileNameTemplateConstants;
import com.ensoftcorp.open.dynadoc.core.data.JavaClass;
import com.ensoftcorp.open.dynadoc.core.data.JavaMethod;
import com.ensoftcorp.open.dynadoc.core.data.JavaParameter;
import com.ensoftcorp.open.dynadoc.core.path.WorkingDirectory;
import com.ensoftcorp.open.dynadoc.core.path.WorkingDirectoryCache;
import com.ensoftcorp.open.dynadoc.core.utils.HTMLUtils;
import com.ensoftcorp.open.dynadoc.core.utils.PathUtils;
import com.ensoftcorp.open.dynadoc.core.utils.SaveUtils;
import com.hp.gagawa.java.elements.A;
import com.hp.gagawa.java.elements.Br;
import com.hp.gagawa.java.elements.Code;
import com.hp.gagawa.java.elements.Div;
import com.hp.gagawa.java.elements.Img;
import com.hp.gagawa.java.elements.Table;
import com.hp.gagawa.java.elements.Tbody;
import com.hp.gagawa.java.elements.Td;
import com.hp.gagawa.java.elements.Tfoot;
import com.hp.gagawa.java.elements.Th;
import com.hp.gagawa.java.elements.Thead;
import com.hp.gagawa.java.elements.Tr;
public class ClassMethodsWrapper {
private static final String METHODS_TABLE_JAVASCRIPT_FILE_NAME = "jquery-methods-table-script.js";
private static final String METHODS_SECTION_HEADER = "Method Summary";
private static final String METHODS_TABLE_NAME = "methods-table";
private static final String [] METHODS_TABLE_HEADERS = { "Visibility", "Return", "Name", "Parameters", "Static", "Instance", "Concrete", "Deprecated", "External Use", "CFG", "Call", "DFG"};
private List<JavaMethod> methods;
private WorkingDirectory workingDirectory;
public ClassMethodsWrapper(JavaClass javaClass) {
this.methods = javaClass.getMethods();
this.methods.removeAll(javaClass.getConstructors());
this.workingDirectory = WorkingDirectoryCache.getWorkingDirectory(javaClass.getClassNode());
}
private List<JavaMethod> getMethods() {
return this.methods;
}
public WorkingDirectory getWorkingDirectory() {
return this.workingDirectory;
}
private Path getPathToGraphFile(String fileName) {
Path graphsDirectoryPath = PathUtils.getGraphsWorkingDirectory(this.getWorkingDirectory()).getPath();
return graphsDirectoryPath.resolve(fileName);
}
private String getRelativePathStringToGraphFile(String fileName) {
return PathUtils.getRelativePathStringToGraphsDirectory(this.getWorkingDirectory()) + fileName;
}
public Div wrap() {
Div methodsTableDiv = new Div();
methodsTableDiv.setCSSClass("card text-white bg-info mb-3");
methodsTableDiv.setStyle("max-width: 98%; margin: 10pt");
Div cardHeader = new Div();
cardHeader.setCSSClass("card-header");
cardHeader.appendText(METHODS_SECTION_HEADER);
Div cardContent = new Div();
cardContent.setCSSClass("card-body bg-white text-dark");
Table table = new Table();
table.setId(METHODS_TABLE_NAME);
table.setCSSClass("display small");
table.setStyle("width:100%");
Thead tHead = new Thead();
Tr tr = new Tr();
Th firstColumn = new Th();
tr.appendChild(firstColumn);
for(String headerText: METHODS_TABLE_HEADERS) {
Th column = new Th();
column.appendText(headerText);
tr.appendChild(column);
}
Th lastColumn = new Th();
lastColumn.setStyle("display:none;");
tr.appendChild(lastColumn);
tHead.appendChild(tr);
table.appendChild(tHead);
Tbody tBody = new Tbody();
List<JavaMethod> methods = this.getMethods();
for(JavaMethod method: methods) {
Tr methodRow = this.wrapMethod(method);
tBody.appendChild(methodRow);
}
table.appendChild(tBody);
Tfoot tFoot = new Tfoot();
tr = new Tr();
firstColumn = new Th();
tr.appendChild(firstColumn);
for(int i = 0; i < METHODS_TABLE_HEADERS.length; i++) {
Th column = new Th();
tr.appendChild(column);
}
lastColumn = new Th();
lastColumn.setStyle("display:none;");
tr.appendChild(lastColumn);
tFoot.appendChild(tr);
table.appendChild(tFoot);
cardContent.appendChild(table);
cardHeader.appendChild(cardContent);
methodsTableDiv.appendChild(cardHeader);
return methodsTableDiv;
}
private Tr wrapMethod(JavaMethod method) {
Tr row = new Tr();
Td showHideColumn = this.wrapShowHideIcon();
row.appendChild(showHideColumn);
Td visibilityColumn = this.wrapVisibility(method);
row.appendChild(visibilityColumn);
Td returnTypeColumn = this.wrapReturnType(method);
row.appendChild(returnTypeColumn);
Td nameColumn = this.wrapName(method);
row.appendChild(nameColumn);
Td parametersColumn = this.wrapParameters(method);
row.appendChild(parametersColumn);
Td staticColumn = this.wrapStatic(method);
row.appendChild(staticColumn);
Td instanceColumn = this.wrapInstance(method);
row.appendChild(instanceColumn);
Td concreteMethod = this.wrapConcrete(method);
row.appendChild(concreteMethod);
Td deprecatedColumn = this.wrapDeprecated(method);
row.appendChild(deprecatedColumn);
Td usedOutsideClassColumn = this.wrapCalledOutsideContainingClass(method);
row.appendChild(usedOutsideClassColumn);
Td cfgColumn = this.wrapCFG(method);
row.appendChild(cfgColumn);
Td callGraphColumn = this.wrapCallGraph(method);
row.appendChild(callGraphColumn);
Td ddgColumn = this.wrapDDG(method);
row.appendChild(ddgColumn);
Td commentColumn = this.wrapComments(method);
row.appendChild(commentColumn);
return row;
}
private Td wrapShowHideIcon() {
Td td = new Td();
td.setCSSClass("details-control");
return td;
}
private Td wrapVisibility(JavaMethod method) {
Td td = new Td();
Code code = new Code();
code.appendText(method.getVisibility());
td.appendChild(code);
return td;
}
private Td wrapReturnType(JavaMethod method) {
Td td = new Td();
Code code = new Code();
Node typeNode = method.getReturnType();
if(typeNode == null) {
code.appendText("void");
}else {
String typeName = typeNode.getAttr(XCSG.name).toString();
if(typeNode.taggedWith(XCSG.Primitive)) {
code.appendText(typeName);
}else {
A link = new A();
link.setHref("#");
link.appendText(typeName);
code.appendChild(link);
}
}
td.appendChild(code);
return td;
}
private Td wrapName(JavaMethod method) {
Td td = new Td();
Code code = new Code();
code.appendText(method.getName());
td.appendChild(code);
return td;
}
private Td wrapParameters(JavaMethod method) {
Td td = new Td();
td.setStyle("white-space:nowrap");
List<JavaParameter> parameters = method.getParameters();
for(int index = 0; index < parameters.size(); index++) {
JavaParameter parameter = parameters.get(index);
String name = parameter.getName();
Node typeNode = parameter.getTypeNode();
String typeName = parameter.getTypeName();
Code p = new Code();
if(typeNode.taggedWith(XCSG.Primitive)) {
p.appendText(typeName);
}else {
A link = new A();
link.setHref("#");
link.appendText(typeName);
p.appendChild(link);
}
p.appendText(" " + name);
td.appendChild(p);
if(index < parameters.size() - 1) {
td.appendChild(new Br());
}
}
return td;
}
private Td wrapStatic(JavaMethod method) {
Td td = new Td();
if(method.staticMethod()) {
td.appendChild(this.checkImg());
}
return td;
}
private Td wrapInstance(JavaMethod method) {
Td td = new Td();
if(method.instanceMethod()) {
td.appendChild(this.checkImg());
}
return td;
}
private Td wrapConcrete(JavaMethod method) {
Td td = new Td();
if(method.concreteMethod()) {
td.appendChild(this.checkImg());
}
return td;
}
private Td wrapDeprecated(JavaMethod method) {
Td td = new Td();
if(method.deprecatedMethod()) {
td.appendChild(this.checkImg());
}
return td;
}
private Td wrapCalledOutsideContainingClass(JavaMethod method) {
Td td = new Td();
if(method.calledOutsideContainingClass()) {
td.appendChild(this.checkImg());
}
return td;
}
private Td wrapCFG(JavaMethod method) {
Td td = new Td();
A CFGlink = new A();
String cfgFileName = String.format(FileNameTemplateConstants.CFG_GRAPH_FILE_NAME_TEMPLATE, method.getSignature());
Path cfgFilePath = this.getPathToGraphFile(cfgFileName);
SaveUtils.saveGraph(cfgFilePath, method.getCFG(), method.getCFGMarkup());
String relativePathToCFGGraph = this.getRelativePathStringToGraphFile(cfgFileName);
CFGlink.setHref(relativePathToCFGGraph);
CFGlink.setTarget("_blank");
CFGlink.setAttribute("role", "button");
CFGlink.setAttribute("class", "btn btn-success");
CFGlink.appendText("Show");
td.appendChild(CFGlink);
return td;
}
private Td wrapCallGraph(JavaMethod method) {
Td td = new Td();
A link = new A();
String callGraphFileName = String.format(FileNameTemplateConstants.CALL_GRAPH_FILE_NAME_TEMPLATE, method.getSignature());
Path callGraphFilePath = this.getPathToGraphFile(callGraphFileName);
SaveUtils.saveGraph(callGraphFilePath, method.getCallGraph(), method.getCallGraphMarkup());
String relativePathToCallGraph = this.getRelativePathStringToGraphFile(callGraphFileName);
link.setHref(relativePathToCallGraph);
link.setTarget("_blank");
link.setAttribute("role", "button");
link.setAttribute("class", "btn btn-success");
link.appendText("Show");
td.appendChild(link);
return td;
}
private Td wrapDDG(JavaMethod method) {
Td td = new Td();
A link = new A();
String ddgFileName = String.format(FileNameTemplateConstants.DATA_DEPENDENCY_GRAPH_FILE_NAME_TEMPLATE, method.getSignature());
Path ddgFilePath = this.getPathToGraphFile(ddgFileName);
SaveUtils.saveGraph(ddgFilePath, method.getDataDependenceGraph(), method.getDataDependencyGraphMarkup());
String relativePathToDDG = this.getRelativePathStringToGraphFile(ddgFileName);
link.setHref(relativePathToDDG);
link.setTarget("_blank");
link.setAttribute("role", "button");
link.setAttribute("class", "btn btn-primary");
link.appendText("Show");
td.appendChild(link);
return td;
}
private Td wrapComments(JavaMethod method) {
Td td = new Td();
td.setAttribute("style", "display:none;");
Div div = new Div();
div.appendText(method.getComments());
td.appendChild(div);
return td;
}
private Img checkImg() {
return HTMLUtils.checkImg(this.getWorkingDirectory());
}
}
|
# Copyright 2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python
"""
Template manager using wheezy.template to manage all HTML templates returned by the application. User needs to know how templates are labeled and what input dictionaries are expected. See init_templates() for the templates available. See templates in 'templates' directory to understand how the tempaltes look like and what input values are expected.
usage:
from template_mgr import template_mgr
template_mgr.init_templates()
input_dic = {'home_url':'https://127.0.0.1/home', 'css_url':'http://127.0.0.1/style.css'}
html = render_template('login', input_dic)
"""
from wheezy.template.engine import Engine
from wheezy.template.ext.core import CoreExtension
from wheezy.template.loader import FileLoader
from mportal_tools import mportal_log
g_template_dic = {} # Dictionary of prepared templates for rendering.
def init_templates():
""" Initialize all templates.
"""
global g_template_dic
searchpath = ['mportal/templates']
engine = Engine(loader=FileLoader(searchpath),extensions=[CoreExtension()])
g_template_dic['login'] = engine.get_template('login.html')
g_template_dic['console'] = engine.get_template('console.html')
g_template_dic['not_found'] = engine.get_template('not_found.html')
g_template_dic['redirect'] = engine.get_template('redirect.html')
def render_template(p_template_label, p_dic):
""" Generates a HTML web page to be returned to the client.
param p_template_label What web page to produce. See init_templates() for valid values.
param p_dic Input dictionary for rendering the web page. See templates in 'template' folder for expected values.
return HTML as string object.
"""
global g_template_dic
if p_template_label in g_template_dic:
return str(g_template_dic[p_template_label].render(p_dic))
else:
return 'System error.'
|
<reponame>janothan/Evaluation-Framework<filename>evaluation_framework/abstract_taskManager.py
from abc import ABCMeta, abstractmethod
"""
It abstracts the behavior of a Task manager. It should be extended by each task manager.
"""
class AbstractTaskManager(metaclass=ABCMeta):
def __init__(self):
super().__init__()
"""
It evaluates the specific task.
vectors: dataframe which contains the vectors data
vector_file: path of the vector file
vector_size: size of the vectors
result_directory: directory where the results must be stored
log_dictionary: dictionary to store all the information to store in the log file
scores_dictionary: dictionary to store all the scores which will be used in the comparison phase
"""
@abstractmethod
def evaluate(
self,
vectors,
vector_file,
vector_size,
result_directory,
log_dictionary,
scores_dictionary,
):
pass
|
class Person {
name: string;
age: number;
address: string;
hobbies: string[];
constructor(name: string, age: number, address: string, hobbies: string[]) {
this.name = name;
this.age = age;
this.address = address;
this.hobbies = hobbies;
}
}
|
const compareStrings = (str1, str2) => {
const str1Length = str1.length;
const str2Length = str2.length;
if (str1Length > str2Length) {
return 1;
}
else if (str1Length < str2Length) {
return -1;
}
else {
for (let i = 0; i < str1Length; i++) {
let result = str1.charCodeAt(i) - str2.charCodeAt(i);
if (result !== 0) {
return result;
}
}
return 0;
}
};
console.log(compareStrings('Cord', 'Car')); // Output: 1
|
# Install ripgrep (grep but better)
apt-get install ripgrep
# General kernel and system information, all flags.
uname -a
# Information about the distro and its version.
cat /etc/os-release
head -n 2 /etc/os-release
# Login and out
login
logout
exit
# CTRL+D
# Shutdown is safer than poweroff
shutdown now
reboot now
poweroff
# Search installed packages
dpkg -l | grep PACKAGE_NAME
# Search all available packages
apt list | grep PACKAGE_NAME
# Get package details
apt show PACKAGE_NAME
# Search previous commands
history | grep nano
# Get last boot date
uptime -s
who -b
# Get boot history
last reboot | less
# Sleep for X seconds
sleep X
# Get all IP addresses for the host
hostname -I
# Config files changed
nano /etc/network/interfaces
nano /etc/sudoers
nano /etc/login.defs
nano /etc/security/pwquality.conf
nano /etc/ssh/sshd_config
# Generate a hash from the file
sha1sum ~/Documents/virtualbox/born2beroot/born2beroot.vdi
|
def update_django_settings(settings: dict, debug: bool) -> dict:
updated_settings = settings.copy()
updated_settings['DEBUG'] = debug
updated_settings['TEMPLATES'][0]['OPTIONS']['debug'] = debug
if debug:
updated_settings['INSTALLED_APPS'].append('debug_toolbar')
return updated_settings
|
#
# Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
export pravega_client_auth_method=Bearer
export pravega_client_auth_loadDynamic=true
export KEYCLOAK_SERVICE_ACCOUNT_FILE=/opt/pravega-sensor-collector/conf/keycloak.json
export JAVA_OPTS="-Xmx512m"
export PRAVEGA_SENSOR_COLLECTOR_NET1_CLASS=io.pravega.sensor.collector.network.NetworkDriver
export PRAVEGA_SENSOR_COLLECTOR_NET1_NETWORK_INTERFACE=ens33
export PRAVEGA_SENSOR_COLLECTOR_NET1_MEMORY_QUEUE_CAPACITY_ELEMENTS=10000
export PRAVEGA_SENSOR_COLLECTOR_NET1_SAMPLES_PER_EVENT=100
export PRAVEGA_SENSOR_COLLECTOR_NET1_SAMPLES_PER_SEC=100
export PRAVEGA_SENSOR_COLLECTOR_NET1_PERSISTENT_QUEUE_FILE=/opt/pravega-sensor-collector/network-ens33.db
export PRAVEGA_SENSOR_COLLECTOR_NET1_PERSISTENT_QUEUE_CAPACITY_EVENTS=100
export PRAVEGA_SENSOR_COLLECTOR_NET1_PRAVEGA_CONTROLLER_URI=tls://pravega-controller.sdp.cluster1.sdp-demo.org:443
export PRAVEGA_SENSOR_COLLECTOR_NET1_SCOPE=edge
export PRAVEGA_SENSOR_COLLECTOR_NET1_CREATE_SCOPE=false
export PRAVEGA_SENSOR_COLLECTOR_NET1_STREAM=sensors
export PRAVEGA_SENSOR_COLLECTOR_NET1_ROUTING_KEY=$(hostname)
export PRAVEGA_SENSOR_COLLECTOR_NET1_EXACTLY_ONCE=true
export PRAVEGA_SENSOR_COLLECTOR_NET1_TRANSACTION_TIMEOUT_MINUTES=2.0
|
import sqlite3
from structures.ExperimentInfo import ExperimentInfo
def create_connection(db_file):
""" create a database connection to the SQLite database
specified by db_file
:param db_file: database file
:return: Connection object or None
"""
conn = None
try:
conn = sqlite3.connect(db_file)
except sqlite3.Error as e:
print(e)
return conn
def create_experiment(conn, current_experiment):
"""
Create a new experiment into the experiments table
:param conn:
:param project:
:return: project id
"""
data_tuple = (current_experiment.id, current_experiment.sample_weight,
current_experiment.project_id, current_experiment.user_id,
current_experiment.time_stamp, current_experiment.epochs,
current_experiment.batch_size, current_experiment.Framework,
current_experiment.input_shape, current_experiment.layers_count,
current_experiment.output_shape, current_experiment.optimizer,
current_experiment.lossfunction, current_experiment.callbacks_log,
current_experiment.model_file, current_experiment.accuracy_value,
current_experiment.loss_value, current_experiment.predict_function,
current_experiment.list_of_accuracy_over_epochs, current_experiment.list_of_loss_over_epochs, current_experiment.auto_predict_function)
sql = ''' INSERT INTO 'experiments' (id, sample_weight, project_id, user_id, time_stamp, epochs, batch_size, Framework, input_shape, layers_count, output_shape, optimizer, lossfunction, callbacks_log, model_file, accuracy_value, loss_value, predict_function, list_of_accuracy_over_epochs, list_of_loss_over_epochs, auto_predict_function) VALUES (
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?,
?
); '''
cur = conn.cursor()
cur.execute(sql, data_tuple)
return cur.lastrowid
|
#!/bin/sh -e
#
# Copyright (c) 2012, Intel Corporation.
# Copyright (c) 2020, Foundries.IO Ltd
# All rights reserved.
#
# install.sh [device_name] [rootfs_name]
#
PATH=/sbin:/bin:/usr/sbin:/usr/bin
# minimal ESP partition size is 100mb
boot_size=100
# Get a list of hard drives
hdnamelist=""
live_dev_name=`cat /proc/mounts | grep ${1%/} | awk '{print $1}'`
live_dev_name=${live_dev_name#\/dev/}
# Only strip the digit identifier if the device is not an mmc
case $live_dev_name in
mmcblk*)
;;
nvme*)
;;
*)
live_dev_name=${live_dev_name%%[0-9]*}
;;
esac
echo "Searching for hard drives ..."
# Sleep for at least 5 seconds for USB device enumeration to happen
sleep 5
# Some eMMC devices have special sub devices such as mmcblk0boot0 etc
# we're currently only interested in the root device so pick them wisely
devices=`ls /sys/block/ | grep -v mmcblk` || true
mmc_devices=`ls /sys/block/ | grep "mmcblk[0-9]\{1,\}$"` || true
devices="$devices $mmc_devices"
for device in $devices; do
case $device in
mtdblock*)
# skip mtd device
;;
loop*)
# skip loop device
;;
sr*)
# skip CDROM device
;;
ram*)
# skip ram device
;;
*)
# skip the device LiveOS is on
# Add valid hard drive name to the list
case $device in
$live_dev_name*)
# skip the device we are running from
;;
*)
hdnamelist="$hdnamelist $device"
;;
esac
;;
esac
done
if [ -z "${hdnamelist}" ]; then
echo "You need another device (besides the live device /dev/${live_dev_name}) to install the image. Installation aborted."
exit 1
fi
TARGET_DEVICE_NAME=""
for hdname in $hdnamelist; do
# Display found hard drives and their basic info
echo "-------------------------------"
echo /dev/$hdname
if [ -r /sys/block/$hdname/device/vendor ]; then
echo -n "VENDOR="
cat /sys/block/$hdname/device/vendor
fi
if [ -r /sys/block/$hdname/device/model ]; then
echo -n "MODEL="
cat /sys/block/$hdname/device/model
fi
if [ -r /sys/block/$hdname/device/uevent ]; then
echo -n "UEVENT="
cat /sys/block/$hdname/device/uevent
fi
echo
done
# Get user choice
while true; do
echo "Please select an install target or press n to exit ($hdnamelist ): "
read answer
if [ "$answer" = "n" ]; then
echo "Installation manually aborted."
exit 1
fi
for hdname in $hdnamelist; do
if [ "$answer" = "$hdname" ]; then
TARGET_DEVICE_NAME=$answer
break
fi
done
if [ -n "$TARGET_DEVICE_NAME" ]; then
break
fi
done
if [ -n "$TARGET_DEVICE_NAME" ]; then
echo "Installing image on /dev/$TARGET_DEVICE_NAME ..."
else
echo "No hard drive selected. Installation aborted."
exit 1
fi
device=/dev/$TARGET_DEVICE_NAME
#
# The udev automounter can cause pain here, kill it
#
rm -f /etc/udev/rules.d/automount.rules
rm -f /etc/udev/scripts/mount*
#
# Unmount anything the automounter had mounted
#
umount ${device}* 2> /dev/null || /bin/true
mkdir -p /tmp
# Create /etc/mtab if not present
if [ ! -e /etc/mtab ] && [ -e /proc/mounts ]; then
ln -sf /proc/mounts /etc/mtab
fi
disk_size=$(parted ${device} unit mb print | grep '^Disk .*: .*MB' | cut -d" " -f 3 | sed -e "s/MB//")
rootfs_size=$((disk_size-boot_size))
rootfs_start=$((boot_size))
rootfs_end=$((rootfs_start+rootfs_size))
# MMC devices are special in a couple of ways
# 1) they use a partition prefix character 'p'
# 2) they are detected asynchronously (need rootwait)
rootwait=""
part_prefix=""
if [ ! "${device#/dev/mmcblk}" = "${device}" ] || \
[ ! "${device#/dev/nvme}" = "${device}" ]; then
part_prefix="p"
rootwait="rootwait"
fi
# USB devices also require rootwait
if [ -d /dev/disk/by-id ]; then
find /dev/disk/by-id/ -name usb* | while read usbdev; do
if readlink $usbdev | grep -q $TARGET_DEVICE_NAME; then
rootwait="rootwait"
break
fi
done
fi
bootfs=${device}${part_prefix}1
rootfs=${device}${part_prefix}2
echo
echo "Current partition table available on ${device}:"
echo
parted ${device} print
# Get user choice for partition table
while true; do
echo "Erase and recreate partition table for device ${device}? (y / n): "
read answer
if [ "$answer" = "y" ]; then
echo "Deleting partition table on ${device} ..."
dd if=/dev/zero of=${device} bs=512 count=35
echo "*****************"
echo "Boot partition size: $boot_size MB ($bootfs)"
echo "Rootfs partition size: $rootfs_size MB ($rootfs)"
echo "*****************"
echo "Creating new partition table on ${device} ..."
parted ${device} mklabel gpt
echo "Creating boot partition on $bootfs"
parted ${device} mkpart boot fat32 0% $boot_size
parted ${device} set 1 boot on
format_boot="y"
echo "Creating rootfs partition on $rootfs"
parted ${device} mkpart root ext4 $rootfs_start 100%
parted ${device} print
echo "Waiting for device nodes..."
sleep 1
C=0
while [ $C -ne 2 ] && [ ! -e $bootfs -o ! -e $rootfs ]; do
C=$(( C + 1 ))
sleep 1
done
break
elif [ "$answer" = "n" ]; then
echo "Not erasing current partition table for device ${device}, assuming ${bootfs} as boot/ESP and ${rootfs} as rootfs."
echo
echo "Format ${bootfs} (ESP) partition? (n - default / y): "
read answer
if [ "$answer" = "y" ]; then
format_boot="y"
fi
break
fi
done
if [ "$format_boot" = "y" ]; then
echo "Formatting $bootfs to vfat..."
mkfs.vfat -F 32 -n boot $bootfs
fi
echo "Formatting $rootfs to ext4..."
mkfs.ext4 -F $rootfs
mkdir /tgt_root
mkdir /src_root
mkdir -p /boot
# Handling of the target root partition
mount $rootfs /tgt_root
mount -o rw,loop,noatime,nodiratime /run/media/$1/$2 /src_root
echo "Copying rootfs files..."
cp -a /src_root/* /tgt_root
# Update fstab at the ostree deploy folder
if [ -d /tgt_root/ostree/deploy/lmp/deploy ] ; then
deploy_hash=$(ls /tgt_root/ostree/deploy/lmp/deploy/ | grep -m 1 -v "\.origin")
boot_uuid=$(blkid -o value -s UUID ${bootfs})
sed -i "s/LABEL=efi/UUID=${boot_uuid}/g" /tgt_root/ostree/deploy/lmp/deploy/${deploy_hash}/etc/fstab
fi
# Update boot args to include UUID and extra options
rootfs_uuid=$(blkid -o value -s UUID ${rootfs})
sed -i "s/root=LABEL=otaroot/root=UUID=${rootfs_uuid} ${rootwait}/g" \
/tgt_root/boot/loader/grub.cfg /tgt_root/boot/loader/entries/*.conf
# LMP preloaded containers (containers and updated installed_versions)
if [ -d /run/media/$1/ostree/deploy/lmp/var/lib/docker ]; then
cp -a /run/media/$1/ostree/deploy/lmp/var/lib/docker /tgt_root/ostree/deploy/lmp/var/lib/
cp -a /run/media/$1/ostree/deploy/lmp/var/sota/import/installed_versions /tgt_root/ostree/deploy/lmp/var/sota/import/
fi
if [ -d /run/media/$1/ostree/deploy/lmp/var/sota/compose-apps ]; then
# Delete preloaded containers previously available as part of rootfs.img (platform build)
rm -rf /tgt_root/ostree/deploy/lmp/var/sota/compose-apps
cp -a /run/media/$1/ostree/deploy/lmp/var/sota/compose-apps /tgt_root/ostree/deploy/lmp/var/sota/compose-apps
fi
# LMP specific customizations, if available (live media first partition, vfat)
if [ -d /run/media/${live_dev_name}1/lmp ]; then
cp -a /run/media/${live_dev_name}1/lmp /tgt_root/ostree/deploy/lmp/var/
fi
umount /src_root
# Handling of the target boot partition
mount $bootfs /boot
echo "Preparing boot partition..."
EFIDIR="/boot/EFI/BOOT"
mkdir -p $EFIDIR
# Copy the efi loader
efiloader=`basename /run/media/$1/EFI/BOOT/boot*.efi`
cp /run/media/$1/EFI/BOOT/${efiloader} $EFIDIR
# Generate boot grub.cfg
cat << EOF > $EFIDIR/grub.cfg
search.fs_uuid ${rootfs_uuid} root
configfile /boot/loader/grub.cfg
EOF
# Make sure startup.nsh is also available at the boot partition
if [ -f /run/media/$1/startup.nsh ]; then
cp /run/media/$1/startup.nsh /boot
fi
# Set default EFI boot entry
BOOTLABEL="LmP"
if [ -d /sys/firmware/efi/efivars ]; then
# Delete old LmP entry
bootnum=`efibootmgr | grep "^Boot[0-9]" | grep "${BOOTLABEL}$" | sed -e "s|Boot||" -e "s|\*.*||"`
if [ -n "$bootnum" ]; then
efibootmgr -b $bootnum -B || true
fi
# Add new LmP entry
efibootmgr -c -d ${device} -p 1 -w -L ${BOOTLABEL} -l "\EFI\BOOT\\${efiloader}" || true
fi
umount /tgt_root
umount /boot
sync
echo "Installation successful. Remove your installation media and press ENTER to reboot."
read enter
echo "Rebooting..."
reboot -f
|
# /usr/share/console-login-helper-messages/profile.sh
# Originally from https://github.com/coreos/baselayout/blob/master/baselayout/coreos-profile.sh
# Only print for interactive shells.
if [[ $- == *i* ]]; then
FAILED=$(systemctl list-units --state=failed --no-legend --plain)
if [[ ! -z "${FAILED}" ]]; then
COUNT=$(wc -l <<<"${FAILED}")
echo "[systemd]"
echo -e "Failed Units: \033[31m${COUNT}\033[39m"
awk '{ print " " $1 }' <<<"${FAILED}"
fi
fi
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR="$2"
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/anytime_models/examples/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-n=9 -c=32 -s=1 --ds_name=cifar100 --batch_size=64 --nr_gpu=1 --samloss=100 --adaloss_gamma=0.14 --adaloss_momentum=0.99 --adaloss_final_extra=0.5 --adaloss_update_per=100 --sum_rand_ratio=0 --is_select_arr -f=5
|
<reponame>TimCrooker/sao
import { colors, logger } from 'swaglog'
export class Terror extends Error {
grit: boolean
cmdOutput?: string
constructor(message: string) {
super(message)
this.grit = true
this.name = this.constructor.name
if (typeof Error.captureStackTrace === 'function') {
Error.captureStackTrace(this, this.constructor)
} else {
this.stack = new Error(message).stack
}
}
}
export function handleError(error: Error | Terror): void {
if (error instanceof Terror) {
if (error.cmdOutput) {
console.error(error.cmdOutput)
}
logger.error(error.message)
logger.debug(colors.dim(error.stack))
} else if (error.name === 'CACError') {
logger.error(error.message)
} else {
logger.error(error.stack)
}
process.exit(1)
}
|
<filename>modules/component-web-core/src/main/java/com/nortal/spring/cw/core/web/component/element/FormDataElement.java
package com.nortal.spring.cw.core.web.component.element;
import java.util.Collection;
/**
* Tegemist on liidese kirjeldusega, mis täiendavalt implementeerib liidest {@link FormElement}. Antud liidest implementeerivad kõik
* elemendid, mis sisaldavad endas mingisuguses vormis andmeid, olgu siis tegu lihtsate andmetega nagu {@link String} või loendeid
* {@link Collection}
*
* @author <NAME>
*
*/
public interface FormDataElement extends FormElement, FormElementMarkedState {
/**
* Meetod tagastab mudelobjekti, mille üksik välja (<i>property</i>) väärtus antud element sisaldab ning hiljem peale konvertimist
* {@link FormDataElement#convert()} kirjutab elemendi sees oleva väärtuse tagasi mudelobjekti
*
* @return {@link Object}
*/
Object getEntity();
/**
* Mudelobjekti määramine, mille üksik välja (<i>property</i>) väärtus antud element sisaldab ning hiljem peale konvertimist
* {@link FormDataElement#convert()} kirjutab elemendi sees oleva väärtuse tagasi mudelobjekti.
*
* @param entity
* {@link Object}
*/
void setEntity(Object entity);
/**
* Meetodi välja kutsumisel kirjutatakse elemendi väärtuse tagasi olemobjekti
*/
void convert();
/**
* Meetodi välja kutsumisel rakendatakse elemendi spetsiifilised vailidaatorid, mis implementeerivad liidest {@link ElementValidator}
* ning täiendavate piirangute implementatsioonid, mis laiendavad klassi {@link AbstractValidator}
*/
void validate();
/**
* Meetodi välja kutsumisel rakendatakse elemendi spetsiifilised kitsenduste kontrollid, mis implementeerivad liidest
* {@link ElementConstraint} ning täiendavate piirangute implementatsioonid, mis laiendavad klassi {@link AbstractConstraint}
*/
void checkConstraints();
/**
* Elemendile toore väärtuse lisamine. Väärtuse tüüp peab vastama elemendi enda geneerilisele tüübile
*
* @param value
* {@link Object}
*/
void setRawValue(Object value);
/**
* Meetod tagastab elemendi väärtuse toorena
*
* @return {@link Object}
*/
Object getRawValue();
/**
* Meetod tagastab <code>true</code> kui AJAX põhine valideerimine on sisse lülitatud, mis tähendab seda, et kui kasutajaliideses välja
* väärtus muutub saadetakse välja väärtus valideerimiseks elemendi validaatorisse. Vea korral kuvatakse vastav veateade
*
* @return {@link Boolean}
*/
boolean isUseAjaxValidation();
/**
* AJAX põhine valideerimine on sisse/välja lülitamine
*
* @param useAjaxValidation
* {@link Boolean}
*/
void setUseAjaxValidation(boolean useAjaxValidation);
/**
* Elemendi kohustuslikuks/mitte kohustuslikuks määramine. Kui element on kohustuslik, kuvatakse kasutusliideses elemendi juures * ning
* salvestamisel rakenduvad kohustuslikkuse kontrollid
*
* @param mandatory
* {@link Boolean}
*/
void setMandatory(boolean mandatory);
/**
* Meetod tagastab <code>true</code> juhul kui tegemist on kohustusliku elemendiga, mis tähendab seda et antud elemendi väärtus ei tohi
* olla tühi
*
*/
boolean isMandatory();
/**
* Meetod tagastab TRUE kui elemendi juures on määratud andmeolem ning andmeolemis olev välja väärtus on erinev elemendi enda väärtusega
*
* @return {@link Boolean}
*/
boolean isChanged();
/**
* HTML tagide teisaldmise või mitte teisaldamise sisse/välja lülitamine. Kui teisaldamine on sisse lülitatud asendatakse esitluskihis
* elemendi väärtuse kuvamisel sümbolid <,>,&,'," vastavateks HTML koodideks.<br>
* <b>Oluline on, et sümbolite teisaldamine oleks alati sisse lülitatud ning välja tuleks see lülitada kohtades, kus XSS rünnakud ei saa
* tekitada kahju</b>
*
* @param escapeXml
* {@link Boolean}
*/
void setEscapeXml(boolean escapeXml);
/**
* Täiendava elemendi andmestiku kontrolliva reeglite implementatsiooni lisamine
*
* @param constraint
* {@link ElementConstraint}
* @return {@link FormDataElement}
*/
FormDataElement addConstraint(ElementConstraint constraint);
/**
* Täiendava elemendi andmestiku kontrolliva reeglite implementatsiooni lisamine
*
* @param validator
* {@link ElementValidator}
* @return {@link FormDataElement}
*/
FormDataElement addValidator(ElementValidator validator);
}
|
package com.example.apahlavan1.flickrbrowser;
import android.os.AsyncTask;
import android.util.Log;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Created by apahlavan1 on 1/9/2016.
*/
enum DownloadStatus {
IDLE,
PROCESSING,
NOT_INITIALISED,
FAILED_OR_EMPTY,
OK
}
public class GetRawData {
private String LOG_TAG = GetRawData.class.getSimpleName();
private String rawUrl, data;
private DownloadStatus downloadStatus;
public GetRawData(String rawUrl) {
this.rawUrl = rawUrl;
this.downloadStatus = DownloadStatus.IDLE;
}
public void reset(){
this.downloadStatus = DownloadStatus.IDLE;
this.rawUrl = null;
this.data = null;
}
public String getData() {
return data;
}
public DownloadStatus getDownloadStatus() {
return downloadStatus;
}
public class DownloadRawData extends AsyncTask<String, Void, String>{
protected void onPostExecute(String webData){
//TODO Fill in later
}
protected String doInBackground(String... params){
HttpURLConnection urlConnection = null;
BufferedReader reader = null;
if(params == null)
return null;
try {
URL url = new URL(params[0]);
urlConnection = (HttpURLConnection) url.openConnection();
urlConnection.setRequestMethod("GET");
urlConnection.connect();
InputStream inputStream = urlConnection.getInputStream();
if(inputStream == null)
return null;
StringBuffer buffer = new StringBuffer();
reader = new BufferedReader(new InputStreamReader(inputStream));
}
catch (IOException e){
Log.d(LOG_TAG, "");
} finally {
}
}
}
}
|
<reponame>ioitiki/hair_salon
export class Team {
constructor(public name: string, public game: string, public players: any[], public description: string, public image_src: string) {}
}
|
pip uninstall pyindigo -y
sudo rm -rf ./build ./dist ./src/pyindigo.egg-info
cd src/pyindigo_client
make reinstall
cd ../..
python setup.py install
|
#! /bin/bash
git clone -b monolith https://github.com/express42/reddit.git
cd reddit && bundle install
|
import re
from typing import List
def extract_module_names(code: str) -> List[str]:
module_names = set()
import_regex = r"from\s+\.\s+import\s+(\w+)\s+#\s*noqa"
matches = re.findall(import_regex, code)
for match in matches:
module_names.add(match)
return list(module_names)
|
#!/bin/bash
# Copyright 2019 Tsinghua University (Author: Zhiyuan Tang)
# Apache 2.0.
# This script for oriental language recognition is based on ../../sre16/v2/run.sh which is used for speaker recognition.
. ./cmd.sh
. ./path.sh
stage=1
set -eu
###### Bookmark: basic preparation ######
# Prepare training set in data/train (for ap19-olr, including almost all train/test data used in the previous challenges),
# both contain at least wav.scp, utt2lang, spk2utt and utt2spk,
# spk2utt/utt2spk could be fake, e.g. the utt-id is just the spk-id.
###### Bookmark: feature and vad computation ######
if [ $stage -le 1 ]; then
# Produce Fbank and MFCC in data/{fbank,mfcc}/train
for x in train; do
mkdir -p data/fbank/$x && cp -r data/$x/{spk2utt,utt2lang,utt2spk,wav.scp} data/fbank/$x
mkdir -p data/mfcc/$x && cp -r data/$x/{spk2utt,utt2lang,utt2spk,wav.scp} data/mfcc/$x
steps/make_fbank.sh --nj 10 --cmd "$train_cmd" --write-utt2num-frames true data/fbank/$x
steps/make_mfcc.sh --nj 10 --cmd "$train_cmd" data/mfcc/$x
sid/compute_vad_decision.sh --nj 10 --cmd "$train_cmd" data/mfcc/$x data/mfcc/$x/log data/mfcc/$x/data
cp data/mfcc/$x/vad.scp data/fbank/$x/vad.scp
done
fi
###### Bookmark: x-vector training ######
# Caution: in order to use off-the-shelf scripts in ../../sre16/v2 for speaker recogniton,
# we copy utt2lang to utt2spk, i.e., each fake spk is actually a language.
if [ $stage -le 2 ]; then
mv data/fbank/train/utt2spk data/fbank/train/utt2spk.bak
mv data/fbank/train/spk2utt data/fbank/train/spk2utt.bak
cp data/fbank/train/utt2lang data/fbank/train/utt2spk
utils/utt2spk_to_spk2utt.pl data/fbank/train/utt2spk > data/fbank/train/spk2utt
utils/fix_data_dir.sh data/fbank/train
fi
# Now we prepare the features to generate examples for xvector training.
if [ $stage -le 3 ]; then
# This script applies CMVN and removes nonspeech frames. Note that this is somewhat
# wasteful, as it roughly doubles the amount of training data on disk. After
# creating training examples, this can be removed.
local/nnet3/xvector/prepare_feats_for_egs.sh --nj 10 --cmd "$train_cmd" \
data/fbank/train data/fbank/train_no_sil exp/fbank/train_no_sil
utils/fix_data_dir.sh data/fbank/train_no_sil
# Now, we need to remove features that are too short after removing silence
# frames. We want at least 0.5s (50 frames) per utterance.
min_len=50
mv data/fbank/train_no_sil/utt2num_frames data/fbank/train_no_sil/utt2num_frames.bak
awk -v min_len=${min_len} '$2 > min_len {print $1, $2}' data/fbank/train_no_sil/utt2num_frames.bak > data/fbank/train_no_sil/utt2num_frames
utils/filter_scp.pl data/fbank/train_no_sil/utt2num_frames data/fbank/train_no_sil/utt2spk > data/fbank/train_no_sil/utt2spk.new
mv data/fbank/train_no_sil/utt2spk.new data/fbank/train_no_sil/utt2spk
utils/fix_data_dir.sh data/fbank/train_no_sil
fi
nnet_dir=exp/xvect
# stage 4-6 inside
local/nnet3/xvector/run_xvector.sh --stage $stage --train-stage -1 \
--data data/fbank/train_no_sil --nnet-dir $nnet_dir \
--egs-dir $nnet_dir/egs
###### Bookmark: evaluation of three tasks ######
# Produce feats for test sets
if [ $stage -le 7 ]; then
# following test sets contain at least wav.scp, utt2lang, spk2utt and utt2spk,
# spk2utt/utt2spk could be fake, e.g. the utt-id is just the spk-id.
for x in task_1 task_2 task_3/enroll task_3/test; do
mkdir -p data_test_final/fbank/$x && cp -r data_test_final/$x/{spk2utt,utt2lang,utt2spk,wav.scp} data_test_final/fbank/$x
mkdir -p data_test_final/mfcc/$x && cp -r data_test_final/$x/{spk2utt,utt2lang,utt2spk,wav.scp} data_test_final/mfcc/$x
steps/make_fbank.sh --nj 8 --cmd "$train_cmd" --write-utt2num-frames true data_test_final/fbank/$x
steps/make_mfcc.sh --nj 8 --cmd "$train_cmd" data_test_final/mfcc/$x
sid/compute_vad_decision.sh --nj 8 --cmd "$train_cmd" data_test_final/mfcc/$x data_test_final/mfcc/$x/log data_test_final/mfcc/$x/data
cp data_test_final/mfcc/$x/vad.scp data_test_final/fbank/$x/vad.scp
done
# spk2utt is fake, actually from utt2lang, see stage 2
awk -v id=0 '{print $1, id++}' data/fbank/train/spk2utt > $nnet_dir/lang2lang_id
fi
# Task 1: Short-utterance
# Task 2: Cross-channel LID
# outputs of the original x-vect system by propagating the test set are used as scores.
if [ $stage -le 8 ]; then
# forward the net
for x in task_1 task_2; do
local/run_xvect_score.sh --cmd "$train_cmd --mem 6G" --nj 10 \
$nnet_dir data_test_final/fbank/$x \
exp/xvectors_$x
done
# print eer and cavg
for x in task_1 task_2; do
# prepare trials
local/prepare_trials.py data/fbank/train data_test_final/fbank/$x
trials=data_test_final/fbank/$x/trials
# only keep the 6 target languages for task 2
if [[ $x == "task_2" ]]; then
grep -E 'Tibet |Uyghu |ja-jp |ru-ru |vi-vn |zh-cn ' data_test_final/fbank/$x/trials > data_test_final/fbank/$x/trials.6
mv data_test_final/fbank/$x/trials.6 data_test_final/fbank/$x/trials
langs='Tibet Uyghu ja-jp ru-ru vi-vn zh-cn'
python local/filter_lre_matrix.py "$langs" exp/xvectors_$x/output.ark.utt > exp/xvectors_$x/output.ark.utt.6
mv exp/xvectors_$x/output.ark.utt.6 exp/xvectors_$x/output.ark.utt
fi
echo "---- $x ----"
eer=`compute-eer <(python local/nnet3/prepare_for_eer.py $trials exp/xvectors_$x/output.ark.utt) 2> /dev/null`
printf "%15s %5.2f \n" "$x utt level eer%:" $eer
cavg=`python local/compute_cavg.py -matrix $trials exp/xvectors_$x/output.ark.utt`
printf "%15s %7.4f \n" "$x utt level cavg:" $cavg
done
fi
# Task 3: Zero-resource LID
# x-vects are extracted for both enroll and test sets, then used for identification.
# the training set for the x-vect system doesn't include the languages in task 3.
if [ $stage -le 9 ]; then
for x in task_3/enroll task_3/test; do
local/nnet3/xvector/extract_xvectors.sh --cmd "$train_cmd --mem 6G" --nj 10 \
$nnet_dir data_test_final/fbank/$x \
exp/xvectors_$x
done
for x in task_3/test; do
# prepare trials
local/prepare_trials.py data_test_final/fbank/task_3/enroll data_test_final/fbank/$x
trials=data_test_final/fbank/$x/trials
done
exp=exp/xvectors_task_3
# basic cosine scoring on x-vectors
local/cosine_scoring.sh data_test_final/fbank/task_3/enroll data_test_final/fbank/task_3/test \
$exp/enroll $exp/test $trials $exp/scores
# print eer and cavg
for i in cosine; do
echo "---- task_3 ----"
eer=`compute-eer <(python local/prepare_for_eer.py $trials $exp/scores/${i}_scores) 2> /dev/null`
printf "%15s %5.2f \n" "$i eer%:" $eer
cavg=`python local/compute_cavg.py -pairs $trials $exp/scores/${i}_scores`
printf "%15s %7.4f \n" "$i cavg:" $cavg
done
fi
exit 0
|
import React from "react";
const SpecialButton = props => {
return <button className="button specialButton">{props.special}</button>;
};
export default SpecialButton;
|
# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
GLSA_WHITELIST=(
201412-09 # incompatible CA certificate version numbers
201908-14 # backported both CVE fixes
201909-01 # Perl, SDK only
201909-08 # backported fix
201911-01 # package too old to even have the affected USE flag
202003-20 # backported fix
202003-12 # only applies to old, already-fixed CVEs
202003-24 # SDK only
202003-26 # SDK only
202003-30 # fixed by updating within older minor release
202003-31 # SDK only
202003-52 # difficult to update :-(
202004-10 # fixed by updating within older minor release
202004-13 # fixed by updating within older minor release
202005-02 # SDK only
202005-09 # SDK only
)
glsa_image() {
if glsa-check-$BOARD -t all | grep -Fvx "${GLSA_WHITELIST[@]/#/-e}"; then
echo "The above GLSAs apply to $ROOT"
return 1
fi
return 0
}
test_image_content() {
local root="$1"
local returncode=0
info "Checking $1"
local check_root="${BUILD_LIBRARY_DIR}/check_root"
if ! ROOT="$root" "$check_root" libs; then
warn "test_image_content: Failed dependency check"
warn "This may be the result of having a long-lived SDK with binary"
warn "packages that predate portage 2.2.18. If this is the case try:"
echo " emerge-$BOARD -agkuDN --rebuilt-binaries=y -j9 @world"
echo " emerge-$BOARD -a --depclean"
#returncode=1
fi
local blacklist_dirs=(
"$root/usr/share/locale"
)
for dir in "${blacklist_dirs[@]}"; do
if [ -d "$dir" ]; then
warn "test_image_content: Blacklisted directory found: $dir"
# Only a warning for now, size isn't important enough to kill time
# playing whack-a-mole on things like this this yet.
#error "test_image_content: Blacklisted directory found: $dir"
#returncode=1
fi
done
# Check that there are no conflicts between /* and /usr/*
if ! ROOT="$root" "$check_root" usr; then
error "test_image_content: Failed /usr conflict check"
returncode=1
fi
# Check that there are no #! lines pointing to non-existant locations
if ! ROOT="$root" "$check_root" shebang; then
warn "test_image_content: Failed #! check"
# Only a warning for now. We still have to actually remove all of the
# offending scripts.
#error "test_image_content: Failed #! check"
#returncode=1
fi
if ! sudo ROOT="$root" "$check_root" symlink; then
error "test_image_content: Failed symlink check"
returncode=1
fi
if ! ROOT="$root" glsa_image; then
returncode=1
fi
return $returncode
}
|
import styled from "styled-components";
import { spacingScale } from "../../utils/spacing";
export const Wrapper = styled.section`
display: flex;
justify-content: center;
margin: ${spacingScale.spacing_xxl};
`;
export const InnerWrap = styled.div`
display: flex;
flex-direction: column;
align-items: center;
padding: ${spacingScale.spacing_xl};
`;
export const GameIcon = styled.p`
margin: ${spacingScale.spacing_l};
`;
export const UserMessage = styled.p`
font-size: 1.8rem;
font-weight: 600;
text-align: center;
color: aliceblue;
`;
export const NewGameButton = styled.button`
min-width: 10ch;
min-height: 44px;
padding: ${spacingScale.spacing_s};
cursor: pointer;
background: aliceblue;
color: #0249ef;
border-radius: 0.2em;
transition: 220ms all ease-in-out;
&:hover,
:active {
background-color: #bdc0ef;
}
&:focus {
outline-style: solid;
outline-color: transparent;
box-shadow: 0 0 0 4px #ff4f00;
}
`;
|
curl -o 20170101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2017&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20161116.html 'http://likumi.lv/body_print.php?id=89648&version_date=16.11.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20160901.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.09.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20160713.html 'http://likumi.lv/body_print.php?id=89648&version_date=13.07.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20160615.html 'http://likumi.lv/body_print.php?id=89648&version_date=15.06.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20160601.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.06.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20160323.html 'http://likumi.lv/body_print.php?id=89648&version_date=23.03.2016&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20150706.html 'http://likumi.lv/body_print.php?id=89648&version_date=06.07.2015&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20150702.html 'http://likumi.lv/body_print.php?id=89648&version_date=02.07.2015&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20150701.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.07.2015&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20150310.html 'http://likumi.lv/body_print.php?id=89648&version_date=10.03.2015&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20150101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2015&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20141129.html 'http://likumi.lv/body_print.php?id=89648&version_date=29.11.2014&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20140703.html 'http://likumi.lv/body_print.php?id=89648&version_date=03.07.2014&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20140326.html 'http://likumi.lv/body_print.php?id=89648&version_date=26.03.2014&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20140101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2014&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20131025.html 'http://likumi.lv/body_print.php?id=89648&version_date=25.10.2013&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20130901.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.09.2013&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20130529.html 'http://likumi.lv/body_print.php?id=89648&version_date=29.05.2013&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20130401.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.04.2013&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20130101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2013&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20120801.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.08.2012&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20120701.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.07.2012&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20120321.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.03.2012&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20120301.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.03.2012&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20120101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2012&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110803.html 'http://likumi.lv/body_print.php?id=89648&version_date=03.08.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110720.html 'http://likumi.lv/body_print.php?id=89648&version_date=20.07.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110401.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.04.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110217.html 'http://likumi.lv/body_print.php?id=89648&version_date=17.02.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110129.html 'http://likumi.lv/body_print.php?id=89648&version_date=29.01.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20110101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2011&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20100721.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.07.2010&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20100623.html 'http://likumi.lv/body_print.php?id=89648&version_date=23.06.2010&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20100303.html 'http://likumi.lv/body_print.php?id=89648&version_date=03.03.2010&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20100217.html 'http://likumi.lv/body_print.php?id=89648&version_date=17.02.2010&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20100101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2010&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20091007.html 'http://likumi.lv/body_print.php?id=89648&version_date=07.10.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090701.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.07.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090618.html 'http://likumi.lv/body_print.php?id=89648&version_date=18.06.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090617.html 'http://likumi.lv/body_print.php?id=89648&version_date=17.06.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090603.html 'http://likumi.lv/body_print.php?id=89648&version_date=03.06.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090513.html 'http://likumi.lv/body_print.php?id=89648&version_date=13.05.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090225.html 'http://likumi.lv/body_print.php?id=89648&version_date=25.02.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090121.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.01.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20090101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2009&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20080807.html 'http://likumi.lv/body_print.php?id=89648&version_date=07.08.2008&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20080601.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.06.2008&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20080514.html 'http://likumi.lv/body_print.php?id=89648&version_date=14.05.2008&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20080123.html 'http://likumi.lv/body_print.php?id=89648&version_date=23.01.2008&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20080101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2008&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20071005.html 'http://likumi.lv/body_print.php?id=89648&version_date=05.10.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070801.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.08.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070629.html 'http://likumi.lv/body_print.php?id=89648&version_date=29.06.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070626.html 'http://likumi.lv/body_print.php?id=89648&version_date=26.06.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070621.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.06.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070612.html 'http://likumi.lv/body_print.php?id=89648&version_date=12.06.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070601.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.06.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070309.html 'http://likumi.lv/body_print.php?id=89648&version_date=09.03.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20070101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2007&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20060801.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.08.2006&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20060721.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.07.2006&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20060701.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.07.2006&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20060124.html 'http://likumi.lv/body_print.php?id=89648&version_date=24.01.2006&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20051014.html 'http://likumi.lv/body_print.php?id=89648&version_date=14.10.2005&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20050415.html 'http://likumi.lv/body_print.php?id=89648&version_date=15.04.2005&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20050218.html 'http://likumi.lv/body_print.php?id=89648&version_date=18.02.2005&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20050125.html 'http://likumi.lv/body_print.php?id=89648&version_date=25.01.2005&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20050101.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.01.2005&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20041229.html 'http://likumi.lv/body_print.php?id=89648&version_date=29.12.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20041012.html 'http://likumi.lv/body_print.php?id=89648&version_date=12.10.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20041007.html 'http://likumi.lv/body_print.php?id=89648&version_date=07.10.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20040709.html 'http://likumi.lv/body_print.php?id=89648&version_date=09.07.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20040701.html 'http://likumi.lv/body_print.php?id=89648&version_date=01.07.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
curl -o 20040521.html 'http://likumi.lv/body_print.php?id=89648&version_date=21.05.2004&grozijumi=1&pielikumi=0&saturs=1&piezimes=0&large_font=0' -H 'DNT: 1' -H 'Accept-Encoding: gzip, deflate, sdch' -H 'Accept-Language: en-US,en;q=0.8,de;q=0.6,lv;q=0.4,ru;q=0.2' -H 'Upgrade-Insecure-Requests: 1' -H 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36' -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8' -H 'Connection: keep-alive' --compressed
|
#!/bin/bash
g++ -I../src ../src/md5.c ../src/sha1.c ../src/sha2.c hash_test.c -o hash_test
g++ -I../src ../src/hmac_sha1.c ../src/sha1.c hmac_test.c -o hmac_test
g++ -I../src ../src/pbkdf2.c ../src/hmac_sha1.c ../src/sha1.c pbkdf_test.c -o pbkdf_test
g++ -I../src ../src/aes.c aes_test.c -o aes_test
|
#!/bin/sh
name=${1%_service}
echo "Redeploying $name"
kubectl config use-context kind-experimental
kubectl rollout restart deployment "$name" -n uc4-lagom
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.